code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Types used throughout the Extras package
--
module Control.Distributed.Process.Extras.Internal.Types
( -- * Tagging
Tag
, TagPool
, newTagPool
, getTag
-- * Addressing
, Linkable(..)
, Killable(..)
, Resolvable(..)
, Routable(..)
, Addressable
, sendToRecipient
, Recipient(..)
, RegisterSelf(..)
-- * Interactions
, whereisRemote
, resolveOrDie
, CancelWait(..)
, Channel
, Shutdown(..)
, ExitReason(..)
, ServerDisconnected(..)
, NFSerializable
-- remote table
, __remoteTable
) where
import Control.Concurrent.MVar
( MVar
, newMVar
, modifyMVar
)
import Control.DeepSeq (NFData, ($!!))
import Control.Distributed.Process hiding (send)
import qualified Control.Distributed.Process as P
( send
, unsafeSend
, unsafeNSend
)
import Control.Distributed.Process.Closure
( remotable
, mkClosure
, functionTDict
)
import Control.Distributed.Process.Serializable
import Data.Binary
import Data.Typeable (Typeable)
import GHC.Generics
--------------------------------------------------------------------------------
-- API --
--------------------------------------------------------------------------------
-- | Introduces a class that brings NFData into scope along with Serializable,
-- such that we can force evaluation. Intended for use with the UnsafePrimitives
-- module (which wraps "Control.Distributed.Process.UnsafePrimitives"), and
-- guarantees evaluatedness in terms of @NFData@. Please note that we /cannot/
-- guarantee that an @NFData@ instance will behave the same way as a @Binary@
-- one with regards evaluation, so it is still possible to introduce unexpected
-- behaviour by using /unsafe/ primitives in this way.
--
class (NFData a, Serializable a) => NFSerializable a
instance (NFData a, Serializable a) => NFSerializable a
-- | Tags provide uniqueness for messages, so that they can be
-- matched with their response.
type Tag = Int
-- | Generates unique 'Tag' for messages and response pairs.
-- Each process that depends, directly or indirectly, on
-- the call mechanisms in "Control.Distributed.Process.Global.Call"
-- should have at most one TagPool on which to draw unique message
-- tags.
type TagPool = MVar Tag
-- | Create a new per-process source of unique
-- message identifiers.
newTagPool :: Process TagPool
newTagPool = liftIO $ newMVar 0
-- | Extract a new identifier from a 'TagPool'.
getTag :: TagPool -> Process Tag
getTag tp = liftIO $ modifyMVar tp (\tag -> return (tag+1,tag))
-- | Wait cancellation message.
data CancelWait = CancelWait
deriving (Eq, Show, Typeable, Generic)
instance Binary CancelWait where
instance NFData CancelWait where
-- | Simple representation of a channel.
type Channel a = (SendPort a, ReceivePort a)
-- | Used internally in whereisOrStart. Sent as (RegisterSelf,ProcessId).
data RegisterSelf = RegisterSelf
deriving (Typeable, Generic)
instance Binary RegisterSelf where
instance NFData RegisterSelf where
-- | A ubiquitous /shutdown signal/ that can be used
-- to maintain a consistent shutdown/stop protocol for
-- any process that wishes to handle it.
data Shutdown = Shutdown
deriving (Typeable, Generic, Show, Eq)
instance Binary Shutdown where
instance NFData Shutdown where
-- | Provides a /reason/ for process termination.
data ExitReason =
ExitNormal -- ^ indicates normal exit
| ExitShutdown -- ^ normal response to a 'Shutdown'
| ExitOther !String -- ^ abnormal (error) shutdown
deriving (Typeable, Generic, Eq, Show)
instance Binary ExitReason where
instance NFData ExitReason where
-- | A simple means of mapping to a receiver.
data Recipient =
Pid !ProcessId
| Registered !String
| RemoteRegistered !String !NodeId
-- | ProcReg !ProcessId !String
-- | RemoteProcReg NodeId String
-- | GlobalReg String
deriving (Typeable, Generic, Show, Eq)
instance Binary Recipient where
-- useful exit reasons
-- | Given when a server is unobtainable.
data ServerDisconnected = ServerDisconnected !DiedReason
deriving (Typeable, Generic)
instance Binary ServerDisconnected where
instance NFData ServerDisconnected where
$(remotable ['whereis])
-- | A synchronous version of 'whereis', this relies on 'call'
-- to perform the relevant monitoring of the remote node.
whereisRemote :: NodeId -> String -> Process (Maybe ProcessId)
whereisRemote node name =
call $(functionTDict 'whereis) node ($(mkClosure 'whereis) name)
sendToRecipient :: (Serializable m) => Recipient -> m -> Process ()
sendToRecipient (Pid p) m = P.send p m
sendToRecipient (Registered s) m = nsend s m
sendToRecipient (RemoteRegistered s n) m = nsendRemote n s m
unsafeSendToRecipient :: (NFSerializable m) => Recipient -> m -> Process ()
unsafeSendToRecipient (Pid p) m = P.unsafeSend p $!! m
unsafeSendToRecipient (Registered s) m = P.unsafeNSend s $!! m
unsafeSendToRecipient (RemoteRegistered s n) m = nsendRemote n s m
baseAddressableErrorMessage :: (Routable a) => a -> String
baseAddressableErrorMessage _ = "CannotResolveAddressable"
-- | Class of things to which a @Process@ can /link/ itself.
class Linkable a where
-- | Create a /link/ with the supplied object.
linkTo :: a -> Process ()
-- | Class of things that can be resolved to a 'ProcessId'.
--
class Resolvable a where
-- | Resolve the reference to a process id, or @Nothing@ if resolution fails
resolve :: a -> Process (Maybe ProcessId)
-- | Class of things that can be killed (or instructed to exit).
class Killable a where
killProc :: a -> String -> Process ()
exitProc :: (Serializable m) => a -> m -> Process ()
instance Killable ProcessId where
killProc = kill
exitProc = exit
instance Resolvable r => Killable r where
killProc r s = resolve r >>= maybe (return ()) (flip kill $ s)
exitProc r m = resolve r >>= maybe (return ()) (flip exit $ m)
-- | Provides a unified API for addressing processes.
--
class Routable a where
-- | Send a message to the target asynchronously
sendTo :: (Serializable m) => a -> m -> Process ()
-- | Send some @NFData@ message to the target asynchronously,
-- forcing evaluation (i.e., @deepseq@) beforehand.
unsafeSendTo :: (NFSerializable m) => a -> m -> Process ()
-- | Unresolvable @Addressable@ Message
unresolvableMessage :: a -> String
unresolvableMessage = baseAddressableErrorMessage
instance (Resolvable a) => Routable a where
sendTo a m = do
mPid <- resolve a
maybe (die (unresolvableMessage a))
(\p -> P.send p m)
mPid
unsafeSendTo a m = do
mPid <- resolve a
maybe (die (unresolvableMessage a))
(\p -> P.unsafeSend p $!! m)
mPid
-- | Unresolvable Addressable Message
unresolvableMessage = baseAddressableErrorMessage
instance Resolvable Recipient where
resolve (Pid p) = return (Just p)
resolve (Registered n) = whereis n
resolve (RemoteRegistered s n) = whereisRemote n s
instance Routable Recipient where
sendTo = sendToRecipient
unsafeSendTo = unsafeSendToRecipient
unresolvableMessage (Pid p) = unresolvableMessage p
unresolvableMessage (Registered n) = unresolvableMessage n
unresolvableMessage (RemoteRegistered s n) = unresolvableMessage (n, s)
instance Resolvable ProcessId where
resolve p = return (Just p)
instance Routable ProcessId where
sendTo = P.send
unsafeSendTo pid msg = P.unsafeSend pid $!! msg
unresolvableMessage p = "CannotResolvePid[" ++ (show p) ++ "]"
instance Resolvable String where
resolve = whereis
instance Routable String where
sendTo = nsend
unsafeSendTo name msg = P.unsafeNSend name $!! msg
unresolvableMessage s = "CannotResolveRegisteredName[" ++ s ++ "]"
instance Resolvable (NodeId, String) where
resolve (nid, pname) = whereisRemote nid pname
instance Routable (NodeId, String) where
sendTo (nid, pname) msg = nsendRemote nid pname msg
unsafeSendTo = sendTo -- because serialisation *must* take place
unresolvableMessage (n, s) =
"CannotResolveRemoteRegisteredName[name: " ++ s ++ ", node: " ++ (show n) ++ "]"
instance Routable (Message -> Process ()) where
sendTo f = f . wrapMessage
unsafeSendTo f = f . unsafeWrapMessage
class (Resolvable a, Routable a) => Addressable a
instance (Resolvable a, Routable a) => Addressable a
-- TODO: this probably belongs somewhere other than in ..Types.
-- | resolve the Resolvable or die with specified msg plus details of what didn't resolve
resolveOrDie :: (Routable a, Resolvable a) => a -> String -> Process ProcessId
resolveOrDie resolvable failureMsg = do
result <- resolve resolvable
case result of
Nothing -> die $ failureMsg ++ " " ++ unresolvableMessage resolvable
Just pid -> return pid
|
qnikst/distributed-process-extras
|
src/Control/Distributed/Process/Extras/Internal/Types.hs
|
bsd-3-clause
| 9,248 | 4 | 13 | 1,865 | 1,950 | 1,072 | 878 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
-- | Enno Cramer's Style.
module HIndent.Styles.Cramer (cramer) where
import Control.Monad (forM_, replicateM_, unless, when)
import Control.Monad.State.Strict (MonadState, get, gets, put)
import Data.List (intersperse, sortOn)
import Data.Maybe (catMaybes, isJust, mapMaybe)
import Language.Haskell.Exts.Annotated.Syntax
import Language.Haskell.Exts.Comments
import Language.Haskell.Exts.SrcLoc
import Language.Haskell.Exts (prettyPrint)
import HIndent.Pretty hiding (inter, spaced)
import HIndent.Types
-- | Line breaking mode for syntactical constructs.
data LineBreak
= Free -- ^ Break whenever
| Single -- ^ Force single line (if possible)
| Multi -- ^ Force multiple lines
deriving (Eq,Enum,Show)
-- | Printer state.
data State =
State {cramerLineBreak :: LineBreak -- ^ Current line breaking mode
,cramerLangPragmaLength :: Int -- ^ Padding length for pragmas
,cramerModuleImportLength :: Int -- ^ Padding length for module imports
,cramerRecordFieldLength :: Int -- ^ Padding length for record fields
}
deriving (Show)
-- | Syntax shortcut for Extenders.
type Extend f = f NodeInfo -> Printer State ()
-- | Style definition.
cramer :: Style
cramer =
Style {styleName = "cramer"
,styleAuthor = "Enno Cramer"
,styleDescription = "Enno Cramer's style"
,styleInitialState =
State {cramerLineBreak = Free
,cramerLangPragmaLength = 0
,cramerModuleImportLength = 0
,cramerRecordFieldLength = 0}
,styleExtenders =
[Extender extModule
,Extender extModulePragma
,Extender extModuleHead
,Extender extExportSpecList
,Extender extImportDecl
,Extender extDecl
,Extender extDeclHead
,Extender extConDecl
,Extender extFieldDecl
,Extender extDeriving
,Extender extRhs
,Extender extContext
,Extender extType
,Extender extPat
,Extender extExp
,Extender extStmt
,Extender extMatch
,Extender extBinds
,Extender extFieldUpdate]
,styleDefConfig =
defaultConfig {configMaxColumns = 80
,configIndentSpaces = 4
,configClearEmptyLines = True}
,styleCommentPreprocessor = return}
--------------------------------------------------------------------------------
-- Helper
-- | Return an ast node's SrcSpan.
nodeSrcSpan :: Annotated a => a NodeInfo -> SrcSpan
nodeSrcSpan = srcInfoSpan . nodeInfoSpan . ann
-- | Turn a Name into a String
nameStr :: Name a -> String
nameStr (Ident _ s) = s
nameStr (Symbol _ s) = "(" ++ s ++ ")"
-- | The difference between current column and indent level to force a
-- line break in reduceIndent.
maxDependOverhead :: Integral a => a
maxDependOverhead = 20
-- | Extract the name as a String from a ModuleName
moduleName :: ModuleName a -> String
moduleName (ModuleName _ s) = s
-- | Extract the names of a ModulePragma
pragmaNames :: ModulePragma a -> [String]
pragmaNames (LanguagePragma _ names) = map nameStr names
pragmaNames _ = []
-- | Return whether a data type has only empty constructors.
isEnum :: Decl NodeInfo -> Bool
isEnum (DataDecl _ (DataType _) Nothing (DHead _ _) constructors _) =
all isSimple constructors
where isSimple (QualConDecl _ Nothing Nothing (ConDecl _ _ [])) = True
isSimple _ = False
isEnum _ = False
-- | Return whether a data type has only zero or one constructor.
isSingletonType :: Decl NodeInfo -> Bool
isSingletonType (DataDecl _ _ _ _ [] _) = True
isSingletonType (DataDecl _ _ _ _ [ _ ] _) = True
isSingletonType _ = False
-- | If the given String is smaller than the given length, pad on
-- right with spaces until the length matches.
padRight :: Int -> String -> String
padRight l s = take (max l (length s)) (s ++ repeat ' ')
-- | Return comments with matching location.
filterComments :: Annotated a => (Maybe ComInfoLocation -> Bool) -> a NodeInfo -> [ComInfo]
filterComments f = filter (f . comInfoLocation) . nodeInfoComments . ann
-- | Return whether an AST node has matching comments.
hasComments :: Annotated a => (Maybe ComInfoLocation -> Bool) -> a NodeInfo -> Bool
hasComments f = not . null . filterComments f
-- | Copy comments marked After from one AST node to another.
copyComments :: (Annotated ast1,Annotated ast2)
=> ComInfoLocation
-> ast1 NodeInfo
-> ast2 NodeInfo
-> ast2 NodeInfo
copyComments loc from to = amap updateComments to
where updateComments info = info { nodeInfoComments = oldComments ++ newComments }
oldComments = filterComments (/= Just loc) to
newComments = filterComments (== Just loc) from
-- | Return the number of line breaks between AST nodes.
lineDelta
:: (Annotated ast1,Annotated ast2)
=> ast1 NodeInfo -> ast2 NodeInfo -> Int
lineDelta prev next = nextLine - prevLine
where prevLine = maximum (prevNodeLine : prevCommentLines)
nextLine = minimum (nextNodeLine : nextCommentLines)
prevNodeLine = srcSpanEndLine . nodeSrcSpan $ prev
nextNodeLine = srcSpanStartLine . nodeSrcSpan $ next
prevCommentLines =
map (srcSpanEndLine . commentSrcSpan) $
filterComments (== Just After) prev
nextCommentLines =
map (srcSpanStartLine . commentSrcSpan) $
filterComments (== Just Before) next
commentSrcSpan = annComment . comInfoComment
annComment (Comment _ sp _) = sp
-- | Specialized forM_ for Maybe.
maybeM_ :: Monad m
=> Maybe a -> (a -> m ()) -> m ()
maybeM_ = forM_
-- | Simplified HIndent.Pretty.inter that does not modify the indent level.
inter :: MonadState (PrintState s) m
=> m () -> [m ()] -> m ()
inter sep = sequence_ . intersperse sep
-- | Simplified HIndent.Pretty.spaced that does not modify the indent level.
spaced :: MonadState (PrintState s) m
=> [m ()] -> m ()
spaced = inter space
-- | Indent one level.
indentFull :: MonadState (PrintState s) m
=> m a -> m a
indentFull p = getIndentSpaces >>= flip indented p
-- | Indent a half level.
indentHalf :: MonadState (PrintState s) m
=> m a -> m a
indentHalf p = getIndentSpaces >>= flip indented p . (`div` 2)
-- | Set indentation level to current column.
align :: MonadState (PrintState s) m
=> m a -> m a
align p =
do st <- get
let col =
if psEolComment st
then psIndentLevel st
else max (psColumn st)
(psIndentLevel st)
column col p
-- | Update the line breaking mode and restore afterwards.
withLineBreak
:: LineBreak -> Printer State a -> Printer State a
withLineBreak lb p =
do old <- gets (cramerLineBreak . psUserState)
modifyState $ \s -> s {cramerLineBreak = lb}
result <- p
modifyState $ \s -> s {cramerLineBreak = old}
return result
-- | Use the first printer if it fits on a single line within the
-- column limit, otherwise use the second.
attemptSingleLine
:: Printer State a -> Printer State a -> Printer State a
attemptSingleLine single multi =
do prevState <- get
case cramerLineBreak . psUserState $ prevState of
Single -> single
Multi -> multi
Free ->
do result <- withLineBreak Single single
col <- getColumn
row <- getLineNum
if row == psLine prevState &&
col <= configMaxColumns (psConfig prevState)
then return result
else do put prevState
multi
-- | Same as attemptSingleLine, but execute the second printer in Multi
-- mode. Used in type signatures to force either a single line or
-- have each `->` on a line by itself.
attemptSingleLineType
:: Printer State a -> Printer State a -> Printer State a
attemptSingleLineType single multi =
attemptSingleLine single
(withLineBreak Multi multi)
-- | Format a list-like structure on a single line.
listSingleLine :: Pretty a
=> String
-> String
-> String
-> [a NodeInfo]
-> Printer State ()
listSingleLine open close _ [] =
do string open
space
string close
listSingleLine open close sep xs =
do string open
space
inter (string sep >> space) $ map pretty xs
space
string close
-- | Format a list-like structure with each element on a line by
-- itself.
listMultiLine
:: Pretty a
=> String -> String -> String -> [a NodeInfo] -> Printer State ()
listMultiLine open close _ [] =
align $
do string open
newline
string close
listMultiLine open close sep xs =
align $
do string open
space
inter (newline >> string sep >> space) $ map pretty xs
newline
string close
-- | Format a list-like structure on a single line, if possible, or
-- each element on a line by itself.
listAttemptSingleLine :: Pretty a
=> String
-> String
-> String
-> [a NodeInfo]
-> Printer State ()
listAttemptSingleLine open close sep xs =
attemptSingleLine (listSingleLine open close sep xs)
(listMultiLine open close sep xs)
-- | Format a list-like structure, automatically breaking lines when
-- the next separator and item do not fit within the column limit.
listAutoWrap
:: Pretty a
=> String -> String -> String -> [a NodeInfo] -> Printer State ()
listAutoWrap open close sep ps =
align $
do string open
unless (null ps) $
do space
pretty $ head ps
forM_ (map (\p -> string sep >> space >> pretty p)
(tail ps)) $
\p ->
do fits <- fitsColumnLimit p
unless fits newline
p
space
string close
where fitsColumnLimit p =
fmap fst . sandbox $
do _ <- p
col <- getColumn
limit <- gets (configMaxColumns . psConfig)
return $ col < limit
-- | Like `inter newline . map pretty`, but preserve empty lines
-- between elements.
preserveLineSpacing
:: (Pretty ast,Annotated ast)
=> [ast NodeInfo] -> Printer State ()
preserveLineSpacing [] = return ()
preserveLineSpacing asts@(first:rest) =
do pretty first
forM_ (zip asts rest) $
\(prev,cur) ->
do replicateM_ (max 1 $ lineDelta prev cur)
newline
pretty cur
-- | `reduceIndent short long printer` produces either `short printer`
-- or `newline >> indentFull (long printer)`, depending on whether the
-- current column is sufficiently near to the current indentation depth.
--
-- The function is used to avoid overly big dependent indentation by
-- heuristically breaking and non-dependently indenting.
reduceIndent :: (Printer State () -> Printer State ())
-> (Printer State () -> Printer State ())
-> Printer State ()
-> Printer State ()
reduceIndent short long printer =
do linebreak <- gets (cramerLineBreak . psUserState)
case linebreak of
Single -> single
Multi -> multi
Free ->
do curCol <- getColumn
curIndent <- gets psIndentLevel
indentSpaces <- gets (configIndentSpaces . psConfig)
if (curCol - curIndent - indentSpaces) < maxDependOverhead
then single
else multi
where single = short printer
multi = newline >> indentFull (long printer)
-- | Either simply precede the given printer with a space, or with
-- indent the the printer after a newline, depending on the available
-- space.
spaceOrIndent :: Printer State () -> Printer State ()
spaceOrIndent = reduceIndent (\p -> space >> p) id
-- | Special casing for `do` blocks and leading comments
inlineExpr :: (Printer State () -> Printer State ()) -> Exp NodeInfo -> Printer State ()
inlineExpr _ expr
| not (null (filterComments (== (Just Before)) expr)) =
do newline
indentFull $ pretty expr
inlineExpr _ expr@Do{} =
do space
pretty expr
inlineExpr fmt expr = fmt (pretty expr)
--------------------------------------------------------------------------------
-- Printer for reused syntactical constructs
whereBinds :: Binds NodeInfo -> Printer State ()
whereBinds binds =
do newline
indentHalf $
do write "where"
newline
indentHalf $ pretty binds
rhsExpr :: Exp NodeInfo -> Printer State ()
rhsExpr expr =
do space
rhsSeparator
inlineExpr spaceOrIndent expr
guardedRhsExpr
:: GuardedRhs NodeInfo -> Printer State ()
guardedRhsExpr (GuardedRhs _ guards expr) =
depend (write "| ") $
do inter (write ", ") $ map pretty guards
rhsExpr expr
-- | Pretty print a name for being an infix operator.
prettyInfixOp :: MonadState (PrintState s) m
=> QName NodeInfo -> m ()
prettyInfixOp op =
case op of
Qual{} ->
do write "`"
pretty' op
write "`"
UnQual _ n ->
case n of
Ident _ i -> string ("`" ++ i ++ "`")
Symbol _ s -> string s
Special _ s -> pretty s
tupleExpr
:: Pretty ast
=> Boxed -> [ast NodeInfo] -> Printer State ()
tupleExpr boxed exprs = attemptSingleLine single multi
where single =
do string open
inter (write ", ") $ map pretty exprs
string close
multi = listMultiLine open close "," exprs
(open,close) =
case boxed of
Unboxed -> ("(#","#)")
Boxed -> ("(",")")
listExpr :: Pretty ast
=> [ast NodeInfo] -> Printer State ()
listExpr [] = write "[]"
listExpr xs = listAttemptSingleLine "[" "]" "," xs
recordExpr
:: (Pretty ast,Pretty ast')
=> ast NodeInfo -> [ast' NodeInfo] -> Printer State ()
recordExpr expr updates =
do pretty expr
space
listAttemptSingleLine "{" "}" "," updates
ifExpr :: (Printer State () -> Printer State ())
-> Exp NodeInfo
-> Exp NodeInfo
-> Exp NodeInfo
-> Printer State ()
ifExpr indent cond true false = attemptSingleLine single multi
where single = spaced [if',then',else']
multi =
align $
do if'
indent $
do newline
then'
newline
else'
if' = write "if " >> pretty cond
then' = write "then " >> pretty true
else' = write "else " >> pretty false
letExpr :: Binds NodeInfo -> Exp NodeInfo -> Printer State ()
letExpr binds expr =
align $
do depend (write "let ") $ pretty binds
newline
write "in"
inlineExpr (\p -> newline >> indentFull p) expr
infixExpr :: Exp NodeInfo -> Printer State ()
-- No line break before do
infixExpr (InfixApp _ arg1 op arg2@Do{}) =
spaced [pretty arg1,pretty op,pretty arg2]
-- Try to preserve existing line break before and after infix ops
infixExpr (InfixApp _ arg1 op arg2)
| deltaBefore /= 0 && deltaAfter /= 0 =
align $ inter newline [pretty arg1,pretty op,pretty arg2]
| deltaBefore /= 0 || deltaAfter /= 0 =
pretty arg1 >>
preserveLinebreak
deltaBefore
(pretty op >>
preserveLinebreak deltaAfter
(pretty arg2))
| otherwise = attemptSingleLine single multi
where single = spaced [pretty arg1,pretty op,pretty arg2]
multi =
do pretty arg1
space
pretty op
newline
indentFull $ pretty arg2
preserveLinebreak delta p =
if delta > 0
then newline >> indentFull p
else space >> p
deltaBefore = lineDelta arg1 op
deltaAfter = lineDelta op arg2
infixExpr _ = error "not an InfixApp"
applicativeExpr :: Exp NodeInfo
-> [(QOp NodeInfo,Exp NodeInfo)]
-> Printer State ()
applicativeExpr ctor args = attemptSingleLine single multi
where single = spaced (pretty ctor : map prettyArg args)
multi =
do pretty ctor
depend space $ inter newline $ map prettyArg args
prettyArg (op,arg) = pretty op >> space >> pretty arg
typeSig :: Type NodeInfo -> Printer State ()
typeSig ty =
attemptSingleLineType (write ":: " >> pretty ty)
(align $ write ":: " >> pretty ty)
typeInfixExpr
:: Type NodeInfo -> Printer State ()
-- As HIndent does not know about operator precedence, preserve
-- existing line breaks, but do not add new ones.
typeInfixExpr (TyInfix _ arg1 op arg2)
| deltaBefore /= 0 && deltaAfter /= 0 =
align $ inter newline [pretty arg1,prettyInfixOp op,pretty arg2]
| deltaBefore /= 0 || deltaAfter /= 0 =
pretty arg1 >>
preserveLinebreak
deltaBefore
(prettyInfixOp op >>
preserveLinebreak deltaAfter
(pretty arg2))
| otherwise = spaced [pretty arg1,prettyInfixOp op,pretty arg2]
where preserveLinebreak delta p =
if delta > 0
then newline >> indentFull p
else space >> p
deltaBefore = lineDelta arg1 op
deltaAfter = lineDelta op arg2
typeInfixExpr _ = error "not a TyInfix"
--------------------------------------------------------------------------------
-- Extenders
extModule :: Extend Module
extModule (Module _ mhead pragmas imports decls) =
do modifyState $ \s -> s {cramerLangPragmaLength = pragLen
,cramerModuleImportLength = modLen}
inter (newline >> newline) $
catMaybes [unless' (null pragmas) $ preserveLineSpacing pragmas
,pretty <$> mhead
,unless' (null imports) $ preserveLineSpacing imports
,unless' (null decls) $
do forM_ (init decls) $
\decl ->
do pretty decl
newline
unless (skipNewline decl) newline
pretty (last decls)]
where pragLen = maximum $ map length $ concatMap pragmaNames pragmas
modLen = maximum $ map (length . moduleName . importModule) imports
unless' cond expr =
if not cond
then Just expr
else Nothing
skipNewline TypeSig{} = True
skipNewline _ = False
extModule other = prettyNoExt other
-- Align closing braces of pragmas
extModulePragma :: Extend ModulePragma
extModulePragma (LanguagePragma _ names) =
do namelen <- gets (cramerLangPragmaLength . psUserState)
forM_ names $
\name ->
do write "{-# LANGUAGE "
string $ padRight namelen $ nameStr name
write " #-}"
-- Avoid increasing whitespace after OPTIONS string
extModulePragma (OptionsPragma _ mtool opt) =
do write "{-# OPTIONS"
maybeM_ mtool $ \tool -> do write "_"
string $ prettyPrint tool
space
string $ trim opt
write " #-}"
where trim = reverse . dropWhile (== ' ') . reverse . dropWhile (== ' ')
extModulePragma other = prettyNoExt other
-- Empty or single item export list on one line, otherwise one item
-- per line with parens and comma aligned
extModuleHead :: Extend ModuleHead
extModuleHead (ModuleHead _ name mwarn mexports) =
do mapM_ pretty mwarn
write "module "
pretty name
maybeM_ mexports $ \exports -> pretty exports
write " where"
-- Align export list, one item per line
extExportSpecList :: Extend ExportSpecList
extExportSpecList (ExportSpecList _ exports) =
case exports of
[] -> write " ( )"
[e]
| not (hasComments (const True) e) -> write " ( " >> pretty e >> write " )"
(first:rest) ->
do newline
indentFull $
do write "( "
commentCol <- getColumn
align $ prettyExportSpec "" commentCol first
forM_ rest $
\export ->
do newline
prettyExportSpec ", " commentCol export
newline
write ")"
where printCommentsSimple loc ast =
let rawComments = filterComments (== Just loc) ast
in do preprocessor <- gets psCommentPreprocessor
comments <- preprocessor $ map comInfoComment rawComments
forM_ comments $
printComment (Just $ nodeSrcSpan ast)
prettyExportSpec prefix col spec =
do column col $ printCommentsSimple Before spec
string prefix
prettyNoExt spec
printCommentsSimple After spec
-- Align import statements
extImportDecl :: Extend ImportDecl
extImportDecl ImportDecl{..} =
do if importQualified
then write "import qualified "
else write "import "
namelen <- gets (cramerModuleImportLength . psUserState)
if isJust importAs || isJust importSpecs
then string $ padRight namelen $ moduleName importModule
else string $ moduleName importModule
maybeM_ importAs $
\name ->
do write " as "
pretty name
maybeM_ importSpecs $
\(ImportSpecList _ importHiding specs) ->
do space
when importHiding $ write "hiding "
listAutoWrap "(" ")" "," $ sortOn prettyPrint specs
extDecl :: Extend Decl
-- No dependent indentation for type decls
extDecl (TypeDecl _ declhead ty) =
do write "type "
pretty declhead
write " = "
indentFull $ pretty ty
-- Fix whitespace before 'where' in class decl
extDecl (ClassDecl _ mcontext declhead fundeps mdecls) =
do depend (write "class ") $
withCtx mcontext $
depend (pretty declhead) $
depend (unless (null fundeps) $
write " | " >>
inter (write ", ")
(map pretty fundeps)) $
when (isJust mdecls) $ write " where"
maybeM_ mdecls $
\decls ->
do newline
indentFull $ preserveLineSpacing decls
-- Align data constructors
extDecl decl@(DataDecl _ dataOrNew mcontext declHead constructors mderiv) =
do mapM_ pretty mcontext
pretty dataOrNew
space
pretty declHead
write " ="
if isEnum decl || isSingletonType decl
then attemptSingleLine single multi
else multi
maybeM_ mderiv $ \deriv -> indentFull $ newline >> pretty deriv
where single =
do space
inter (write " | ") $ map pretty constructors
multi =
reduceIndent
(depend space . indented (-2))
(\p -> write " " >> p)
(inter (newline >> write "| ") $ map pretty constructors)
-- Type signature either on a single line or split at arrows, aligned with '::'
extDecl (TypeSig _ names ty) =
do inter (write ", ") $ map pretty names
space
typeSig ty
-- Preserve empty lines between function matches
extDecl (FunBind _ matches) = preserveLineSpacing matches
-- Half-indent for where clause, half-indent binds
extDecl (PatBind _ pat rhs mbinds) =
do pretty pat
withCaseContext False $ pretty rhs
maybeM_ mbinds whereBinds
extDecl other = prettyNoExt other
-- Do not modify indent level
extDeclHead :: Extend DeclHead
extDeclHead (DHApp _ dhead var) =
do pretty dhead
space
pretty var
extDeclHead other = prettyNoExt other
extConDecl :: Extend ConDecl
-- No extra space after empty constructor
extConDecl (ConDecl _ name []) = pretty name
extConDecl (ConDecl _ name tys) = attemptSingleLine single multi
where single = spaced $ pretty name : map pretty tys
multi = depend (pretty name >> space) $ lined $ map pretty tys
-- Align record fields
extConDecl (RecDecl _ name fields) =
do modifyState $ \s -> s {cramerRecordFieldLength = fieldLen}
pretty name
space
case fields of
[] -> write "{ }"
[_] -> listAttemptSingleLine "{" "}" "," fields
_ -> listMultiLine "{" "}" "," fields
where fieldLen = maximum $ map (length . nameStr) fnames
fnames =
mapMaybe (\(FieldDecl _ ns _) ->
case ns of
[n] -> Just n
_ -> Nothing)
fields
extConDecl other = prettyNoExt other
extFieldDecl :: Extend FieldDecl
extFieldDecl (FieldDecl _ [name] ty) =
do namelen <- gets (cramerRecordFieldLength . psUserState)
string $ padRight namelen $ nameStr name
space
typeSig ty
extFieldDecl other = prettyNoExt other
-- Derived instances separated by comma and space, no line breaking
extDeriving :: Extend Deriving
extDeriving (Deriving _ instHeads) =
do write "deriving "
case instHeads of
[x] -> pretty x
xs -> parens $ inter (write ", ") $ map pretty xs
extRhs :: Extend Rhs
extRhs (UnGuardedRhs _ expr) = rhsExpr expr
extRhs (GuardedRhss _ [rhs]) = space >> guardedRhsExpr rhs
extRhs (GuardedRhss _ rhss) =
forM_ rhss $
\rhs ->
do newline
indentFull $ guardedRhsExpr rhs
-- Type constraints on a single line
extContext :: Extend Context
extContext (CxTuple _ ctxs) = parens $ inter (write ", ") $ map pretty ctxs
extContext other = prettyNoExt other
extType :: Extend Type
extType (TyForall _ mforall mcontext ty) = attemptSingleLine single multi
where single =
do maybeM_ mforall $ \vars -> prettyForall vars >> space
maybeM_ mcontext $ \context -> pretty context >> write " => "
pretty ty
multi =
do maybeM_ mforall $ \vars -> prettyForall vars >> newline
maybeM_ mcontext $
\context -> pretty context >> newline >> write "=> "
pretty ty
prettyForall vars =
do write "forall "
spaced $ map pretty vars
write "."
-- Type signature should line break at each arrow if necessary
extType (TyFun _ from to) =
attemptSingleLineType (pretty from >> write " -> " >> pretty to)
(pretty from >> newline >> write "-> " >> pretty to)
-- Parentheses reset forced line breaking
extType (TyParen _ ty) = withLineBreak Free $ parens $ pretty ty
-- Tuple types on one line, with space after comma
extType (TyTuple _ boxed tys) = withLineBreak Free $ tupleExpr boxed tys
-- Infix application
extType expr@TyInfix{} = typeInfixExpr expr
extType other = prettyNoExt other
extPat :: Extend Pat
-- Infix application with space around operator
extPat (PInfixApp _ arg1 op arg2) =
do pretty arg1
space
prettyInfixOp op
space
pretty arg2
-- Tuple patterns on one line, with space after comma
extPat (PTuple _ boxed pats) = withLineBreak Single $ tupleExpr boxed pats
-- List patterns on one line, with space after comma
extPat (PList _ pats) = withLineBreak Single $ listExpr pats
extPat other = prettyNoExt other
extExp :: Extend Exp
-- Function application on a single line or align arguments
extExp expr@(App _ fun arg) = attemptSingleLine single multi
where single = pretty fun >> space >> pretty arg
multi =
pretty fun' >> space >> align (lined $ map pretty $ reverse args')
(fun',args') = collectArgs expr
collectArgs
:: Exp NodeInfo -> (Exp NodeInfo,[Exp NodeInfo])
collectArgs app@(App _ g y) =
let (f,args) = collectArgs g
in (f,copyComments After app y : args)
collectArgs nonApp = (nonApp,[])
-- Infix application on a single line or indented rhs
extExp expr@InfixApp{} =
if all (isApplicativeOp . fst) opArgs && isFmap (fst $ head opArgs)
then applicativeExpr firstArg opArgs
else infixExpr expr
where (firstArg,opArgs) = collectOpExps expr
collectOpExps
:: Exp NodeInfo -> (Exp NodeInfo,[(QOp NodeInfo,Exp NodeInfo)])
collectOpExps app@(InfixApp _ left op right) =
let (ctorLeft,argsLeft) = collectOpExps left
(ctorRight,argsRight) = collectOpExps right
in (ctorLeft,argsLeft ++ [(op,copyComments After app ctorRight)] ++ argsRight)
collectOpExps e = (e,[])
isApplicativeOp :: QOp NodeInfo -> Bool
isApplicativeOp (QVarOp _ (UnQual _ (Symbol _ s))) =
head s == '<' && last s == '>'
isApplicativeOp _ = False
isFmap :: QOp NodeInfo -> Bool
isFmap (QVarOp _ (UnQual _ (Symbol _ "<$>"))) = True
isFmap _ = False
-- No space after lambda
extExp (Lambda _ pats expr) =
do write "\\"
maybeSpace
spaced $ map pretty pats
write " ->"
inlineExpr (\p -> attemptSingleLine (space >> p) (spaceOrIndent p)) expr
where maybeSpace =
case pats of
PBangPat{}:_ -> space
PIrrPat{}:_ -> space
_ -> return ()
-- If-then-else on one line or newline and indent before then and else
extExp (If _ cond true false) = ifExpr id cond true false
-- Newline before in
extExp (Let _ binds expr) = letExpr binds expr
-- Tuples on a single line (no space inside parens but after comma) or
-- one element per line with parens and comma aligned
extExp (Tuple _ boxed exprs) = tupleExpr boxed exprs
-- List on a single line or one item per line with aligned brackets and comma
extExp (List _ exprs) = listExpr exprs
-- Record construction and update on a single line or one line per
-- field with aligned braces and comma
extExp (RecConstr _ qname updates) = recordExpr qname updates
extExp (RecUpdate _ expr updates) = recordExpr expr updates
-- Full indentation for case alts and preserve empty lines between alts
extExp (Case _ expr alts) =
do write "case "
pretty expr
write " of"
newline
withCaseContext True $ indentFull $ preserveLineSpacing alts
-- Line break and indent after do
extExp (Do _ stmts) =
do write "do"
newline
indentFull $ preserveLineSpacing stmts
extExp (ListComp _ e qstmt) =
brackets (do space
pretty e
unless (null qstmt)
(do newline
indented (-1)
(write "|")
prefixedLined ","
(map (\x -> do space
pretty x
space)
qstmt)))
-- Type signatures like toplevel decl
extExp (ExpTypeSig _ expr ty) =
do pretty expr
space
typeSig ty
extExp other = prettyNoExt other
extStmt :: Extend Stmt
extStmt (Qualifier _ (If _ cond true false)) = ifExpr indentFull cond true false
extStmt other = prettyNoExt other
extMatch :: Extend Match
-- Indent where same as for top level decl
extMatch (Match _ name pats rhs mbinds) =
do pretty name
space
spaced $ map pretty pats
withCaseContext False $ pretty rhs
maybeM_ mbinds whereBinds
extMatch other = prettyNoExt other
-- Preserve empty lines between bindings
extBinds :: Extend Binds
extBinds (BDecls _ decls) = preserveLineSpacing decls
extBinds other = prettyNoExt other
-- No line break after equal sign
extFieldUpdate :: Extend FieldUpdate
extFieldUpdate (FieldUpdate _ qname expr) =
do pretty qname
write " = "
pretty expr
extFieldUpdate other = prettyNoExt other
|
lunaris/hindent
|
src/HIndent/Styles/Cramer.hs
|
bsd-3-clause
| 31,341 | 0 | 20 | 9,175 | 8,780 | 4,254 | 4,526 | 732 | 8 |
--8000---
{-
A note on quality, copied from a version of README.md:
This is an old toy project I wrote as a relatively inexperienced functional
programmer. I have not since cleaned up it, and by my current standards, its
quality seems very low, sorry, although I haven't reviewed it very deeply.
-}
{-
cfipu is a primitive, minimalistic language, similar to assembly and brainfuck. It can read input from stdin, and print to stdout. When a file is interpreted, its contents are placed in memory after being processed by the preprocessor.
There are two pointers: the data pointer, and the instruction pointer, both of which initially point to the first byte. Each byte is parsed, the instruction pointer is set ahead by one byte, and then the command is executed if the byte is a command. There are only 8 primitive commands:
0: End the program.
1: Print current byte.
2: Get one character (blocking, if necessary). If EOF has been reached, the data pointer is set behind by one.
3: Set the data pointer behind by one.
4: Set the data pointer ahead by two.
5: Decrement the value pointed to by the data pointer by one.
6: If the current byte is non-zero, the data pointer is set behind by one; otherwise, the data pointer is set forward by two.
7: Sets the instruction pointer to the data pointer.
When 0x00 or 0x30 is read at the instruction pointer, the program is ended; and so forth. If the byte read at the instruction pointer is not recognized, it is simply ignored, and the program continues as usual.
The memory will expand as much your system will allow. Every byte is 0x00, by default. Every cell in memory is a byte, and can only have 256 values: 0x00-0xFF.
Additionally, a preprocessor exists so that common code can be factored out.
#: This symbol is place before each symbol that should not be replaced or removed (it can be placed before another hash). These symbols are not treated specially *inside* comments, particularly delimited ones.
@: Delimited macro. This is simply a preprocessor shortcut, and it can also be used for readability. The length of the delimiter is the sum of one and the predefined symbols following the symbol. So "@0000macro07macro macro" evaluates to " 07". Macros can also be used for better readability. If a '@' symbol appears within the body of the definition of a macro, it will be treated like any other non-command symbol. (The preprocessor removes the macro definitions and replaces the instances of the them with the body of them). Comments are removed by the preprocessor before macros are parsed. Macros are parsed in one pass entirely by the preprocessor before replacement actually takes place, so the relative positioning of the macros from the rest of the source doesn't affect the functionality of the program, although the order of macro definitions themselves does matter, since a macro can appear in the definition of another one that is defined later in the program.
8: This begins a delimited comment. The end delimiter can be elided at the end of a file.
9: This is the other type of comments. When this is read by the preprocessor, any whitespace preceding the '9' is removed as well as the rest of the line, but not including the newline character, which may be omitted, in which case the rest of the file is removed by the preprocessor.
Incrementing the data pointer by two might seem arbitrary, but is necessary, because otherwise it would be impossible to set the data pointer ahead of the instruction pointer.
After macros are parsed by the preprocessor, all unknown characters are stripped out by the preprocessor *until* the special character sequence @@ is encountered. The @@ is removed by the preprocessor and then it isn't treated specially anymore, and then characters that would not be recognized by the instruction pointer are no longer stripped out.
Any file can be interpreted or compiled as cfipu source; but, of course, not all cfipu programs will run as intended. They can run indefinitely, or maliciously use up many resources.
-}
{-
- Example program
-
- Take 2 single digit numbers, and print the difference also as a single digit. This program assumes that both digits are single digit numbers and that the result is also a single digit number.
---
@-5-
@+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
@<3<
@>43>
@0n+++++++++++++++++++++++++++++++++++++++++++++++++n+
@0n-------------------------------------------------n-
9 @0n+n+
9 @0n+n+
8*
@0w0n+w0
@0w1n++w1
@0w2n+++w2
@0w3n++++w3
@0w<w3w<
@0w4n+++++w4
@0w5n++++++w5
@0w-w5w-
@0w6n+++++++w6
@0w7n++++++++w7
@0u0n-u0
@0u1n--u1
@0u2n---u2
@0u3n----u3
@0u<u3u<
@0u4n-----u4
@0u5n----=-u5
@0u-u5u-
@0u6n-------u6
@0u7n--------u7
*
@0w0w0
@0w1+w1
@0w2++w2
@0w3+++w3
@0w<w3w<
@0w4++++w4
@0w5+++++w5
@0w-w5w-
@0w6++++++w6
@0w7+++++++w7
@0u0u0
@0u1-u1
@0u2--u2
@0u3---u3
@0u<u3u<
@0u4----u4
@0u5---=-u5
@0u-u5u-
@0u6------u6
@0u7-------u7
@z>w<>w->w6>w4>w7<<<<7z 9 Set the value in the data pointer to zero. The 5 bytes after it must be zero; they will not be zero after execution (although nothing prevents them from being reset to zero; this function doesn't need to be called because the bytes will always be set to the same commands. Execution is returned to 6 bytes after the original value.
9@s>>w3>w3>w5>w4>w3>w5>w6>w4>w7<<<<<<<<7s 9 Subtract the current byte by the byte in the cell to the right. 9 bytes after the two cells must be zero initially and will be changed. Execution is returned to 11 bytes after the current cell.
@s>>w3>w3>w5>w4>w3>w5>w6>w4>w7<<<<<<<<7s 9 Subtract the current byte by the byte in the cell to the right. 9 bytes after the two cells must be zero initially and will be changed. Execution is returned to 11 bytes after the current cell.
@0uz>u<>u4>u6>u4>u7<<<<<uz 9 Set the data pointer to what it was set to when you called the original function. After this is finished executing, The data pointer will be set to what is was immediately before this function was called.
@0us>>u3>u3>u5>u4>u2>u4>u6>u4>u7<<<<<<<<us
9 Note that we have two 5's due to the comment at the beginning. This is unavoidable if we use '-' as a macro.
9 Allocate 64 bytes of data
<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
2>2n-<
>>>>>>>>>>>
w<>w<>w<>w<>w<>w<>w<>w<>w<>w<>w<>w1>w0><<<<<<<<<<<<<
<<<<<<<<<<<
s
80000000000
-}
--- MAIN ---
{-# LANGUAGE ScopedTypeVariables, FlexibleContexts #-}
module Main
( main
, help
, execute
, preprocess
, isPredefinedSymbol
, Format
, setWidth
, factor
, PMacro
, PMName
, PMPattern
, Pattern
, macroPatterns
, macroPatternsMaxKeyLen
, pmacrosFull
, pmacros
, findIndexPrefix
, mlookupLen
, lookupLen
, flp
, maybeOutify
, foldrUpdate
, apply
, wrap
, module Data.Memory
) where
import Control.Exception
import Control.Monad.State hiding (get)
import Data.Char (isSpace)
import Data.Default
import Data.List (genericLength, genericReplicate, genericIndex, genericTake, genericDrop, genericSplitAt, findIndex, intersperse, isPrefixOf, sortBy, sort)
import qualified Data.Map as M
import Data.Memory
import qualified Data.Set as S
import System.Environment (getArgs, getProgName)
import System.IO (openFile, hGetContents, IOMode(..), hSetBuffering, BufferMode(..), stdin, stdout)
import System.IO.Error
main :: IO ()
main = do
args <- getArgs
progName <- getProgName
let argc = length args
if null args
then do
help progName
else do
let src = args !! 0
pre = args !! 1
fact = args !! 2
wdth = args !! 3
srcHandle <- openFile src ReadMode
source <- hGetContents srcHandle
source `seq` return ()
let format = flip execState def $ do
when (argc >= 4) $ do
setWidth $ read wdth
preprocessed = preprocess source
facpreprocessed = factor format preprocessed
when (argc >= 2) $ do
writeFile pre preprocessed
when (argc >= 3) $ do
writeFile fact facpreprocessed
let mem = initialMemory . stringToCells $ preprocessed
execute mem
help :: String -> IO ()
help progName = do
putStrLn $ "Usage: " ++ progName ++ " [IN source filename] (OUT preprocessed source filename) (OUT factored preprocessed source filename) (width of line wrapping for factoring)"
putStrLn $ "All written files will be overwritten without warning."
--- EXECUTION ---
execute :: Memory -> IO ()
execute mem = do
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
execute' mem
where execute' :: Memory -> IO ()
execute' m = let m' = moveInstructionPointerRight m
in case M.lookup (getInstruction m) instructions of
(Just instruction) -> instruction m' execute'
(Nothing) -> execute' m'
instructions :: M.Map Cell (Memory -> (Memory -> IO ()) -> IO ())
instructions = M.fromList . concat . (\ ~(is:iss) -> map (\ ~(k, v) -> (k - (charToCell '0'), v)) is : iss) . replicate 2 . map (\ ~(k, v) -> (charToCell k, v)) $
[ ('0', \_ _ -> do
return ())
, ('1', \m r -> do
putChar $ cellToChar $ getData m
r m)
, ('2', \m r -> do
c <- getChar
r $ setData (charToCell c) m
; `catch` get m r)
, ('3', \m r -> do
r $ moveDataPointerLeft m)
, ('4', \m r -> do
r $ moveDataPointerRight . moveDataPointerRight $ m)
, ('5', \m r -> do
r $ modifyData dec m)
, ('6', \m r -> do
let c = getData m
c' = cellToWord8 c
case c' of
0x00 -> r $ moveDataPointerRight . moveDataPointerRight $ m
_ -> r $ moveDataPointerLeft m)
, ('7', \m r -> do
r $ moveInstructionPointerToDataPointer m)
]
where dec :: Cell -> Cell
dec c = let c' = cellToWord8 c
in case c' of
0x00 -> 0xFF
x -> pred x
get m r e
| isEOFError e = r $ moveDataPointerLeft m
--- THE PREPROCESSOR ---
preprocess :: String -> String
preprocess = removeHashes . parseRecognizedCharacters . parseMacros . parseLineComments . parseDelimitedComments
removeHashes :: String -> String
removeHashes [] = []
removeHashes ('#':x:xs) = x : removeHashes xs
removeHashes (x:xs) = x : removeHashes xs
parseRecognizedCharacters :: String -> String
parseRecognizedCharacters [] = []
parseRecognizedCharacters ('#':x:xs) = '#':x : parseRecognizedCharacters xs
parseRecognizedCharacters ('@':'@':xs) = xs
parseRecognizedCharacters (x:xs)
| isPredefinedSymbol x = x : parseRecognizedCharacters xs
| otherwise = parseRecognizedCharacters xs
parseLineComments :: String -> String
parseLineComments = concatMap line . lines
where line :: String -> String
line xs = let xs' = replace xs
replace :: String -> String
replace [] = []
replace ('#':_:xs'') = '#':'#' : replace xs''
replace (x'':xs'') = x'' : replace xs''
in case (findIndex isLineCommentSymbol xs') of
(Just i) -> let i' = b $ pred i
b this_i
| this_i < 0 = 0
| isSpace $ xs !! this_i = b $ pred this_i
| otherwise = succ this_i
in genericTake i' xs
(Nothing) -> xs ++ "\n"
isLineCommentSymbol :: Char -> Bool
isLineCommentSymbol '9' = True
isLineCommentSymbol _ = False
parseDelimitedComments :: String -> String
parseDelimitedComments = parse
where parse :: String -> String
ignore :: String -> String -> String
parse [] = []
parse ('#':x:xs) = '#':x : parse xs
parse ('8':xs) = let lenStr = takeWhile isPredefinedSymbol xs
len = integerLength lenStr
rest = genericDrop len xs
(former, latter) = genericSplitAt (succ len) rest
in ignore former latter
parse (x:xs) = x : parse xs
ignore _ [] = []
ignore delimiter a@(_:xs)
| genericTake (integerLength delimiter) a == delimiter = parse $ genericDrop (integerLength delimiter) a
| otherwise = ignore delimiter xs
-- macros --
-- We need an *ordered* map
type Macros = [(PMName, PMPattern)]
parseMacros :: String -> String
parseMacros = execState $ do
m <- readMacros
m' <- processMacros m
applyMacros m'
-- Read macros and remove their definitions
readMacros :: State String Macros
readMacros = state $ step False []
where step :: Bool -> Macros -> String -> (Macros, String)
step _ ms [] = (ms, [])
step noIgnr ms ('#':x:xs) = let (m', s') = step noIgnr ms xs
in (m', '#':x : s')
step False ms ('@':'@':xs) = let (m', s') = step True ms xs
in (m', '@':'@' : s')
step noIgnr ms ('@':xs) = let lenStr = takeWhile isPredefinedSymbol xs
len = integerLength lenStr
rest = genericDrop len xs
(name, body) = genericSplitAt (succ len) rest
terminatorIndex = findIndexPrefix name body :: Integer
(former, latter) = genericSplitAt terminatorIndex body
macro = (name, former)
post = genericDrop (succ len) latter
in step noIgnr (macro : ms) post
step noIgnr ms (x:xs) = let (m', s') = step noIgnr ms xs
in (m', x : s')
-- Substitute macro definitions themselves, and sort the list of macros by length of key, descending, so that the macros with longer names will be tested first.
processMacros :: Macros -> State String Macros
processMacros ms = return . fst . foldrUpdate replace 0 $ ms
where len = integerLength ms
replace x (lastMacros, n) = let n' = succ n
replaceSingle (name, pattern) = (name, execState (applyMacros [x]) pattern)
in ((map replaceSingle $ genericTake (len - n') lastMacros) ++ (genericDrop (len - n') lastMacros), n')
applyMacros :: Macros -> State String ()
applyMacros [] = return ()
applyMacros m = modify step
where m' = sortBy sort' m
len = maximum . map (integerLength . fst) $ m'
sort' a b = (integerLength . fst $ b) `compare` (integerLength . fst $ a)
step [] = []
step ('#':x:xs) = '#':x : step xs
step a@(x:xs) = case lookupLen (genericTake len a) $ m' of
(Just (pattern, len')) -> pattern ++ step (genericDrop len' a)
(Nothing) -> x : step xs
isPredefinedSymbol :: Char -> Bool
isPredefinedSymbol '0' = True
isPredefinedSymbol '1' = True
isPredefinedSymbol '2' = True
isPredefinedSymbol '3' = True
isPredefinedSymbol '4' = True
isPredefinedSymbol '5' = True
isPredefinedSymbol '6' = True
isPredefinedSymbol '7' = True
isPredefinedSymbol '8' = True
isPredefinedSymbol '9' = True
isPredefinedSymbol '@' = True
isPredefinedSymbol '#' = True
isPredefinedSymbol _ = False
--- FACTORING ---
-- May not work correctly when the input is changed after being preprocessed
factor :: Format -> String -> String
factor fmt s = let s' = prefixHash s
(ms, s'') = apply' (integerLength macroPatterns) S.empty s'
apply' :: (Integral a) => a -> S.Set PMacro -> String -> (S.Set PMacro, String)
apply' 0 this_ms xs = (this_ms, xs)
apply' n this_ms xs = let (this_ms', xs') = r' this_ms xs
in apply' (pred n) this_ms' xs'
body = s''
nms = pmacros S.\\ ms
ms' = reverse . sort $ S.toList ms
nms' = reverse . sort $ S.toList nms
header = initl ++ foldr used [] ms' ++ inter ++ foldr unused [] nms' ++ append
used :: PMacro -> String -> String
used x acc = (++) acc $ case lookup x pmacrosFull of
(Just (name, pattern)) -> "@" ++ (genericReplicate (pred . integerLength $ name) '0') ++ name ++ pattern ++ name ++ "\n"
(Nothing) -> []
unused :: PMacro -> String -> String
unused = used
initl = ""
inter
| null nms' = ""
| otherwise = "\n80--\n\n"
append
| null nms' = ""
| otherwise = "--\n\n"
text = (++) header $ flip wrap body $ f_width fmt
in text
where r' :: S.Set PMacro -> String -> (S.Set PMacro, String)
r' ms [] = (ms, [])
r' ms a@(x:xs) = case lookupLen (genericTake macroPatternsMaxKeyLen a) macroPatterns of
(Just ((Pattern {p_macro = macro, p_name = name}), len)) -> let (ms', a') = r' (S.insert macro ms) $ genericDrop (len :: Integer) a
in (ms', name ++ a')
(Nothing) -> let (ms', xs') = r' ms $ xs
in (ms', x:xs')
prefixHash :: String -> String
prefixHash [] = []
prefixHash a@(x:xs) =
case lookupLen (genericTake (macroPatternsMaxKeyLen :: Integer) a) $ map snd pmacrosFull of
(Just (_, len)) -> ('#' : intersperse '#' (genericTake (len :: Integer) a)) ++ (prefixHash $ genericDrop (len :: Integer) a)
(Nothing) -> x : prefixHash xs
data PMacro = -- predefined macro enumeration
PM_minus |
PM_plus |
PM_lessthan |
PM_greaterthan |
PM_nminus |
PM_nplus deriving (Eq, Ord)
type PMName = String
type PMPattern = String
data Pattern = Pattern { p_macro :: PMacro
, p_name :: PMName
}
macroPatterns ::[(PMPattern, Pattern)]
macroPatterns = map (\ ~(pattern, (name, macro)) -> (pattern, Pattern {p_name = name, p_macro = macro})) $
[ ("5", ("-", PM_minus))
, (replicate 255 '-', ("+", PM_plus))
, ("3", ("<", PM_lessthan))
, ("43", (">", PM_greaterthan))
, (replicate 48 '-', ("n-", PM_nminus))
, (replicate 48 '+', ("n+", PM_nplus))
]
macroPatternsMaxKeyLen :: (Integral a) => a
macroPatternsMaxKeyLen = maximum . map genericLength . map fst $ macroPatterns
pmacrosFull :: [(PMacro, (PMName, PMPattern))]
pmacrosFull = map f macroPatterns
where f (pattern, (Pattern {p_macro = macro, p_name = name})) = (macro, (name, pattern))
pmacros :: S.Set PMacro
pmacros = S.fromList . map fst $ pmacrosFull
-- formatting --
data Format = Format { f_width :: Integer
}
instance Default Format where
def = Format { f_width = 80
}
setWidth :: Integer -> State Format ()
setWidth w = modify $ (\ ~fmt -> fmt{f_width = w})
--- HELPER FUNCTIONS ---
findIndexPrefix :: forall a b. (Eq a, Eq b, Integral a) => [b] -> [b] -> a
findIndexPrefix find = r' 0
where r' :: (Eq a, Eq b, Integral a) => a -> [b] -> a
r' i [] = i
r' i a@(_:xs)
| find `isPrefixOf` a = i
| otherwise = r' (succ i) xs
mlookupLen :: (Eq k, Ord k, Integral b) => [k] -> M.Map [k] a -> Maybe (a, b)
mlookupLen k = lookupLen k . sortBy (\ ~(k1, _v1) ~(k2, _v2) -> integerLength k2 `compare` integerLength k1) . M.toList
lookupLen :: (Eq k, Integral b) => [k] -> [([k], a)] -> Maybe (a, b)
lookupLen [] _ = Nothing
lookupLen a@(_:_ks) xs = foldr (\ ~(k, v) acc -> if k `isPrefixOf` a then Just (v, genericLength k) `mplus` acc else Nothing `mplus` acc) mzero xs --`mplus` lookupLen ks xs
flp :: (a, b) -> (b, a)
flp (a, b) = (b, a)
maybeOutify :: ((Maybe a), b) -> Maybe (a, b)
maybeOutify ((Just a), b) = Just (a, b)
maybeOutify ((Nothing), _) = Nothing
foldrUpdate :: (a -> ([a], b) -> ([a], b)) -> b -> [a] -> ([a], b)
foldrUpdate = r' (0 :: Integer)
where r' :: (Integral c) => c -> (a -> ([a], b) -> ([a], b)) -> b -> [a] -> ([a], b)
r' n f z xs = let (xs', acc) = r' (succ n) f z xs
in case () of _
| n >= genericLength xs -> (xs, z)
| otherwise -> f (genericIndex xs' n) (xs', acc)
-- Negative integers are not checked!
apply :: (Integral a) => a -> (b -> b) -> b -> b
apply 0 _f = id
apply n f = f . apply (pred n) f
wrap :: forall a. (Integral a) => a -> String -> String
wrap w = r' 0
where r' :: (Integral a) => a -> String -> String
r' _ [] = ""
r' _ ('\n':xs) = '\n' : r' 0 xs
r' a s@(x:xs)
| not $ isPredefinedSymbol x = '@':'@':s
| a >= w = x:'\n' : r' 0 xs
| otherwise = x : (r' (succ a) xs)
-- | 'genericLength' specialized for 'Integer's.
integerLength :: [a] -> Integer
integerLength = genericLength
|
bairyn/cfipu
|
src/Cfipu.hs
|
bsd-3-clause
| 22,825 | 1 | 22 | 7,472 | 5,313 | 2,804 | 2,509 | 324 | 5 |
module Main where
import Codec.Picture
import Codec.Picture.Types (promoteImage, ColorConvertible(..))
import Control.Applicative ((<$>))
import Control.Monad (when)
import Data.List (isSuffixOf)
import System.Directory (doesFileExist, getDirectoryContents)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.FilePath.Posix (takeBaseName, (</>))
-- |Compare the content of two directories. Only .png images are compared.
main :: IO ()
main = do
args <- getArgs
when (length args /= 2) printUsage
getDirectoryContents (args !! 0)
>>= dirDiff (args !! 0) (args !! 1)
>>= flip when exitFailure
where printUsage = putStrLn "usage: DiffImg DIRECTORY_A DIRECTORY_B" >> exitFailure
-- |Compare a list of images that have one exemplar in both directories.
dirDiff :: FilePath -> FilePath -> [FilePath] -> IO Bool
dirDiff dirA dirB files = or <$> mapM diff pngs
where diff file = fileDiff (dirA </> file) (dirB </> file)
pngs = [x | x <- files, ".png" `isSuffixOf` x]
-- |Compare two image files. See 'imageDiff' below.
fileDiff :: FilePath -> FilePath -> IO Bool
fileDiff fileA fileB = do
missing <- not <$> doesFileExist fileB
if missing
then printMissing >> return True
else do
a <- readPng fileA
b <- readPng fileB
let (hasDiff, result) = diff a b
write outputFile result
when hasDiff $ putStrLn diffMsg
return hasDiff
where diff (Right a) (Right b) = dynImageDiff a b
diff (Left a) _ = error a
diff _ (Left b) = error b
diffMsg = fileA ++ " and " ++ fileB ++ " differ!"
printMissing = putStrLn (fileB ++ " is missing!")
outputFile = (takeBaseName fileA) ++ ".diff.png"
write fname (Right png) = writePng fname png
write _ (Left err) = putStrLn $ diffMsg ++ " " ++ err
-- |Compare two 'DynamicImage'. See 'imageDiff' below.
dynImageDiff :: DynamicImage -> DynamicImage -> (Bool, Either String (Image PixelRGB8))
dynImageDiff (ImageRGB8 a) (ImageRGB8 b) = imageDiff (promoteImage a) (promoteImage b)
dynImageDiff (ImageRGB8 a) (ImageRGBA8 b) = imageDiff (promoteImage a) (promoteImage b)
dynImageDiff (ImageRGBA8 a) (ImageRGB8 b) = imageDiff (promoteImage a) (promoteImage b)
dynImageDiff (ImageRGBA8 a) (ImageRGBA8 b) = imageDiff (promoteImage a) (promoteImage b)
dynImageDiff (ImageRGB16 a) (ImageRGB16 b) = imageDiff (promoteImage a) (promoteImage b)
dynImageDiff (ImageRGB16 a) (ImageRGBA16 b) = imageDiff (promoteImage a) b
dynImageDiff (ImageRGBA16 a) (ImageRGB16 b) = imageDiff a (promoteImage b)
dynImageDiff (ImageRGBA16 a) (ImageRGBA16 b) = imageDiff a b
dynImageDiff _ _ = error "Only images with 8 or 16-bit/color RGB(A) are supported"
p8to16a :: Image PixelRGB8 -> Image PixelRGBA16
p8to16a = promoteImage
-- |Compare two images. The returned 'Bool' is True when the two images differ.
-- The returned 'Image' contains the differences. Differnces are shown as white
-- pixels. Equal pixels are shown as black. Ignored areas are shown as green.
-- Areas that are 100% transparent in the first image are ignored.
imageDiff :: Image PixelRGBA16 -> Image PixelRGBA16 -> (Bool, Either String (Image PixelRGB8))
imageDiff a@(Image a_width a_height _) b@(Image b_width b_height _)
| a_width /= b_width = (True, Left "Images have different width.")
| a_height /= b_height = (True, Left "Images have different height.")
| otherwise = right $ generateFoldImage pixelDiff False a_width a_height
where pixelDiff acc x y =
if ignorePixel
then ign
else if pixelAt a x y == pixelAt b x y
then ok
else nok
where ignorePixel = pixelOpacity (pixelAt a x y) == 0
ok = (acc, PixelRGB8 0 0 0)
ign = (acc, PixelRGB8 0 150 0)
nok = (True, PixelRGB8 255 255 255)
right (c,d) = (c, Right d)
|
KaiHa/GuiTest
|
src/DiffImg.hs
|
bsd-3-clause
| 4,169 | 0 | 13 | 1,130 | 1,238 | 633 | 605 | 70 | 5 |
--- Copyright © 2010 Bart Massey
-- [This program is licensed under the "3-clause ('new') BSD License"]
-- Please see the file COPYING in this distribution for license information.
--- Create a phonetic code database optionally used by
--- Text.SpellingSuggest and in particular by "thimk"
import System.Console.ParseArgs
import Text.SpellingSuggest.PCDB
import Text.SpellingSuggest.Dictionary
data ArgIndex = ArgDict | ArgDB deriving (Eq, Ord, Show)
main :: IO ()
main = do
av <- parseArgsIO ArgsComplete argd
let dictPath = getArg av ArgDict
dict <- readDictionary dictPath
let dbPath = getArg av ArgDB
db <- createDB dict dbPath
closeDB db
where
argd = [ Arg { argIndex = ArgDB,
argName = Just "pcdb",
argAbbr = Just 'p',
argData = argDataOptional "db-path" ArgtypeString,
argDesc = "Database path" },
Arg { argIndex = ArgDict,
argName = Nothing,
argAbbr = Nothing,
argData = argDataOptional "path" ArgtypeString,
argDesc = "Dictionary file to index" } ]
|
gregwebs/haskell-spell-suggest
|
thimk-makedb.hs
|
bsd-3-clause
| 1,146 | 0 | 10 | 334 | 226 | 123 | 103 | 22 | 1 |
module Seed where
import Rumpus
start :: Start
start = do
myCodeHidden ==> True
setShape Sphere
setSize 0.2
setColor (V4 0.2 0.3 0.1 1)
setBody Physical
handIDs <- getHandIDs
myCollisionBegan ==> \hitID _ -> do
let notHand = hitID `notElem` handIDs
isHeld <- isBeingHeld
when (notHand && not isHeld) $ do
removeComponent myCollisionBegan
_treeID <- spawnChildInstance "Tree"
setRotation (V3 0 1 0) 0
setShape Cube
animateSizeTo (V3 0.4 0.1 0.4) 1
return ()
|
lukexi/rumpus
|
pristine/Intro/Seed.hs
|
bsd-3-clause
| 601 | 0 | 16 | 217 | 189 | 85 | 104 | 20 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnicodeSyntax #-}
{-|
[@ISO639-1@] hu
[@ISO639-2@] hun
[@ISO639-3@] hun
[@Native name@] magyar
[@English name@] Hungarian
-}
module Text.Numeral.Language.HUN.TestData (cardinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "base" Prelude ( Integral )
import "numerals" Text.Numeral.Grammar.Reified ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
cardinals ∷ (Integral i) ⇒ TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (0, "nulla")
, (1, "egy")
, (2, "kettö")
, (3, "három")
, (4, "négy")
, (5, "öt")
, (6, "hat")
, (7, "hét")
, (8, "nyolc")
, (9, "kilenc")
, (10, "tíz")
, (11, "tízenegy")
, (12, "tízenkettö")
, (13, "tízenhárom")
, (14, "tízennégy")
, (15, "tízenöt")
, (16, "tízenhat")
, (17, "tízenhét")
, (18, "tízennyolc")
, (19, "tízenkilenc")
, (20, "húsz")
, (21, "húszonegy")
, (22, "húszonkettö")
, (23, "húszonhárom")
, (24, "húszonnégy")
, (25, "húszonöt")
, (26, "húszonhat")
, (27, "húszonhét")
, (28, "húszonnyolc")
, (29, "húszonkilenc")
, (30, "harminc")
, (31, "harmincegy")
, (32, "harminckettö")
, (33, "harminchárom")
, (34, "harmincnégy")
, (35, "harmincöt")
, (36, "harminchat")
, (37, "harminchét")
, (38, "harmincnyolc")
, (39, "harminckilenc")
, (40, "negyven")
, (41, "negyvenegy")
, (42, "negyvenkettö")
, (43, "negyvenhárom")
, (44, "negyvennégy")
, (45, "negyvenöt")
, (46, "negyvenhat")
, (47, "negyvenhét")
, (48, "negyvennyolc")
, (49, "negyvenkilenc")
, (50, "ötven")
, (51, "ötvenegy")
, (52, "ötvenkettö")
, (53, "ötvenhárom")
, (54, "ötvennégy")
, (55, "ötvenöt")
, (56, "ötvenhat")
, (57, "ötvenhét")
, (58, "ötvennyolc")
, (59, "ötvenkilenc")
, (60, "hatvan")
, (61, "hatvanegy")
, (62, "hatvankettö")
, (63, "hatvanhárom")
, (64, "hatvannégy")
, (65, "hatvanöt")
, (66, "hatvanhat")
, (67, "hatvanhét")
, (68, "hatvannyolc")
, (69, "hatvankilenc")
, (70, "hetven")
, (71, "hetvenegy")
, (72, "hetvenkettö")
, (73, "hetvenhárom")
, (74, "hetvennégy")
, (75, "hetvenöt")
, (76, "hetvenhat")
, (77, "hetvenhét")
, (78, "hetvennyolc")
, (79, "hetvenkilenc")
, (80, "nyolcvan")
, (81, "nyolcvanegy")
, (82, "nyolcvankettö")
, (83, "nyolcvanhárom")
, (84, "nyolcvannégy")
, (85, "nyolcvanöt")
, (86, "nyolcvanhat")
, (87, "nyolcvanhét")
, (88, "nyolcvannyolc")
, (89, "nyolcvankilenc")
, (90, "kilencven")
, (91, "kilencvenegy")
, (92, "kilencvenkettö")
, (93, "kilencvenhárom")
, (94, "kilencvennégy")
, (95, "kilencvenöt")
, (96, "kilencvenhat")
, (97, "kilencvenhét")
, (98, "kilencvennyolc")
, (99, "kilencvenkilenc")
, (100, "száz")
]
)
]
|
telser/numerals
|
src-test/Text/Numeral/Language/HUN/TestData.hs
|
bsd-3-clause
| 3,749 | 0 | 8 | 1,067 | 1,003 | 671 | 332 | 114 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Pt.StateMachineTest where
import Pt.StateMachine
import Test.HUnit
import Data.ByteString.Lazy.Char8 as B
tests = "StateMachine" ~: test $ (($ runFSM) <$> tests') ++ (($ runFSMC1) <$> tests') ++ [c1]
where tests' = [ basics
, escapes
]
basics f = "Basics" ~: test [ "BEL" ~: [BEL] ~=? f "\a"
, "BS" ~: [BS] ~=? f "\b"
, "HT" ~: [HT] ~=? f "\t"
, "LF" ~: [LF] ~=? f "\n"
, "VT" ~: [VT] ~=? f "\v"
, "FF" ~: [FF] ~=? f "\f"
, "CR" ~: [CR] ~=? f "\r"
]
escapes f = "ESC+" ~: test [ "D -> IND" ~: [IND] ~=? f (pack ['\x1b','D'])
, "E -> NEL" ~: [NEL] ~=? f (pack ['\x1b','E'])
, "H -> HTS" ~: [HTS] ~=? f (pack ['\x1b','H'])
, "M -> RI" ~: [RI] ~=? f (pack ['\x1b','M'])
]
c1 = "C1 (S8C1T)" ~: test [ "IND" ~: [IND] ~=? runFSMC1 "\x84"
, "NEL" ~: [NEL] ~=? runFSMC1 "\x85"
, "HTS" ~: [HTS] ~=? runFSMC1 "\x88"
, "RI" ~: [RI] ~=? runFSMC1 "\x8d"
, "SS2" ~: [SS2 'a'] ~=? runFSMC1 (pack ['\x8e','a'])
, "SS3" ~: [SS3 'a'] ~=? runFSMC1 (pack ['\x8f','a'])
, "CSI SGR" ~: [SGR [Foreground (Truecolor 1 2 3)]] ~=? runFSMC1 (B.cons '\x9b' "38;2;1;2;3m")
]
|
mrak/ptui
|
test/Pt/StateMachineTest.hs
|
bsd-3-clause
| 1,623 | 0 | 15 | 746 | 522 | 281 | 241 | 26 | 1 |
module Lib.Once
( once
) where
import Control.Monad (join)
import Data.IORef
import Prelude.Compat
once :: IO (IO a -> IO (Maybe a))
once = do
doneRef <- newIORef False
pure $ \act -> join $ atomicModifyIORef doneRef $ \x -> (True, f x act)
where
f False = fmap Just
f True = const (pure Nothing)
|
buildsome/buildsome
|
src/Lib/Once.hs
|
gpl-2.0
| 322 | 0 | 12 | 81 | 142 | 73 | 69 | 11 | 2 |
{-# LANGUAGE PatternGuards, TypeSynonymInstances, FlexibleInstances #-}
module HSE.Bracket where
import HSE.Type
import HSE.Util
import Util
class Brackets a where
remParen :: a -> Maybe a -- remove one paren, or Nothing if there is no paren
addParen :: a -> a -- write out a paren
-- | Is this item lexically requiring no bracketing ever
-- i.e. is totally atomic
isAtom :: a -> Bool
-- | Is the child safe free from brackets in the parent position.
-- Err on the side of caution, True = don't know
needBracket :: Int -> a -> a -> Bool
instance Brackets Exp_ where
remParen (Paren _ x) = Just x
remParen _ = Nothing
addParen = Paren an
isAtom x = case x of
Paren{} -> True
Tuple{} -> True
List{} -> True
LeftSection{} -> True
RightSection{} -> True
TupleSection{} -> True
RecConstr{} -> True
ListComp{} -> True
EnumFrom{} -> True
EnumFromTo{} -> True
EnumFromThen{} -> True
EnumFromThenTo{} -> True
_ -> isLexeme x
-- note: i is the index in children, not in the AST
needBracket i parent child
| isAtom child = False
| InfixApp{} <- parent, App{} <- child = False
| isSection parent, App{} <- child = False
| Let{} <- parent, App{} <- child = False
| ListComp{} <- parent = False
| List{} <- parent = False
| Tuple{} <- parent = False
| If{} <- parent, isAnyApp child = False
| App{} <- parent, i == 0, App{} <- child = False
| ExpTypeSig{} <- parent, i == 0, isApp child = False
| Paren{} <- parent = False
| isDotApp parent, isDotApp child, i == 1 = False
| RecConstr{} <- parent = False
| RecUpdate{} <- parent, i /= 0 = False
| Case{} <- parent, i /= 0 || isAnyApp child = False
| Lambda{} <- parent, i == length (universeBi parent :: [Pat_]) - 1 = False -- watch out for PViewPat
| Do{} <- parent = False
| otherwise = True
instance Brackets Type_ where
remParen (TyParen _ x) = Just x
remParen _ = Nothing
addParen = TyParen an
isAtom x = case x of
TyParen{} -> True
TyTuple{} -> True
TyList{} -> True
TyVar{} -> True
TyCon{} -> True
_ -> False
needBracket i parent child
| isAtom child = False
| TyFun{} <- parent, i == 1, TyFun{} <- child = False
| TyFun{} <- parent, TyApp{} <- child = False
| TyTuple{} <- parent = False
| TyList{} <- parent = False
| TyInfix{} <- parent, TyApp{} <- child = False
| TyParen{} <- parent = False
| otherwise = True
instance Brackets Pat_ where
remParen (PParen _ x) = Just x
remParen _ = Nothing
addParen = PParen an
isAtom x = case x of
PParen{} -> True
PTuple{} -> True
PList{} -> True
PVar{} -> True
PApp _ _ [] -> True
PWildCard{} -> True
_ -> False
needBracket i parent child
| isAtom child = False
| PTuple{} <- parent = False
| PList{} <- parent = False
| PInfixApp{} <- parent, PApp{} <- child = False
| PParen{} <- parent = False
| otherwise = True
-- | Add a Paren around something if it is not atomic
paren :: Exp_ -> Exp_
paren x = if isAtom x then x else addParen x
-- | Descend, and if something changes then add/remove brackets appropriately
descendBracket :: (Exp_ -> (Bool, Exp_)) -> Exp_ -> Exp_
descendBracket op x = descendIndex g x
where
g i y = if a then f i b else b
where (a,b) = op y
f i (Paren _ y) | not $ needBracket i x y = y
f i y | needBracket i x y = addParen y
f i y = y
transformBracket :: (Exp_ -> Maybe Exp_) -> Exp_ -> Exp_
transformBracket op = snd . g
where
g = f . descendBracket g
f x = maybe (False,x) ((,) True) (op x)
-- | Add/remove brackets as suggested needBracket at 1-level of depth
rebracket1 :: Exp_ -> Exp_
rebracket1 = descendBracket (\x -> (True,x))
-- a list of application, with any necessary brackets
appsBracket :: [Exp_] -> Exp_
appsBracket = foldl1 (\x -> rebracket1 . App an x)
|
bergmark/hlint
|
src/HSE/Bracket.hs
|
bsd-3-clause
| 4,244 | 0 | 13 | 1,370 | 1,568 | 775 | 793 | 103 | 4 |
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Compiler
-- Copyright : Isaac Jones 2003-2004
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This should be a much more sophisticated abstraction than it is. Currently
-- it's just a bit of data about the compiler, like it's flavour and name and
-- version. The reason it's just data is because currently it has to be in
-- 'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The
-- only interesting bit of info it contains is a mapping between language
-- extensions and compiler command line flags. This module also defines a
-- 'PackageDB' type which is used to refer to package databases. Most compilers
-- only know about a single global package collection but GHC has a global and
-- per-user one and it lets you create arbitrary other package databases. We do
-- not yet fully support this latter feature.
module Distribution.Simple.Compiler (
-- * Haskell implementations
module Distribution.Compiler,
Compiler(..),
showCompilerId, showCompilerIdWithAbi,
compilerFlavor, compilerVersion,
compilerCompatVersion,
compilerInfo,
-- * Support for package databases
PackageDB(..),
PackageDBStack,
registrationPackageDB,
absolutePackageDBPaths,
absolutePackageDBPath,
-- * Support for optimisation levels
OptimisationLevel(..),
flagToOptimisationLevel,
-- * Support for debug info levels
DebugInfoLevel(..),
flagToDebugInfoLevel,
-- * Support for language extensions
Flag,
languageToFlags,
unsupportedLanguages,
extensionsToFlags,
unsupportedExtensions,
parmakeSupported,
reexportedModulesSupported,
renamingPackageFlagsSupported,
unifiedIPIDRequired,
packageKeySupported,
unitIdSupported,
libraryDynDirSupported,
-- * Support for profiling detail levels
ProfDetailLevel(..),
knownProfDetailLevels,
flagToProfDetailLevel,
showProfDetailLevel,
) where
import Distribution.Compiler
import Distribution.Version
import Distribution.Text
import Language.Haskell.Extension
import Distribution.Simple.Utils
import Distribution.Compat.Binary
import Control.Monad (liftM)
import Data.List (nub)
import qualified Data.Map as M (Map, lookup)
import Data.Maybe (catMaybes, isNothing, listToMaybe)
import GHC.Generics (Generic)
import System.Directory (canonicalizePath)
data Compiler = Compiler {
compilerId :: CompilerId,
-- ^ Compiler flavour and version.
compilerAbiTag :: AbiTag,
-- ^ Tag for distinguishing incompatible ABI's on the same architecture/os.
compilerCompat :: [CompilerId],
-- ^ Other implementations that this compiler claims to be compatible with.
compilerLanguages :: [(Language, Flag)],
-- ^ Supported language standards.
compilerExtensions :: [(Extension, Flag)],
-- ^ Supported extensions.
compilerProperties :: M.Map String String
-- ^ A key-value map for properties not covered by the above fields.
}
deriving (Eq, Generic, Show, Read)
instance Binary Compiler
showCompilerId :: Compiler -> String
showCompilerId = display . compilerId
showCompilerIdWithAbi :: Compiler -> String
showCompilerIdWithAbi comp =
display (compilerId comp) ++
case compilerAbiTag comp of
NoAbiTag -> []
AbiTag xs -> '-':xs
compilerFlavor :: Compiler -> CompilerFlavor
compilerFlavor = (\(CompilerId f _) -> f) . compilerId
compilerVersion :: Compiler -> Version
compilerVersion = (\(CompilerId _ v) -> v) . compilerId
compilerCompatVersion :: CompilerFlavor -> Compiler -> Maybe Version
compilerCompatVersion flavor comp
| compilerFlavor comp == flavor = Just (compilerVersion comp)
| otherwise =
listToMaybe [ v | CompilerId fl v <- compilerCompat comp, fl == flavor ]
compilerInfo :: Compiler -> CompilerInfo
compilerInfo c = CompilerInfo (compilerId c)
(compilerAbiTag c)
(Just . compilerCompat $ c)
(Just . map fst . compilerLanguages $ c)
(Just . map fst . compilerExtensions $ c)
-- ------------------------------------------------------------
-- * Package databases
-- ------------------------------------------------------------
-- |Some compilers have a notion of a database of available packages.
-- For some there is just one global db of packages, other compilers
-- support a per-user or an arbitrary db specified at some location in
-- the file system. This can be used to build isloated environments of
-- packages, for example to build a collection of related packages
-- without installing them globally.
--
data PackageDB = GlobalPackageDB
| UserPackageDB
| SpecificPackageDB FilePath
deriving (Eq, Generic, Ord, Show, Read)
instance Binary PackageDB
-- | We typically get packages from several databases, and stack them
-- together. This type lets us be explicit about that stacking. For example
-- typical stacks include:
--
-- > [GlobalPackageDB]
-- > [GlobalPackageDB, UserPackageDB]
-- > [GlobalPackageDB, SpecificPackageDB "package.conf.inplace"]
--
-- Note that the 'GlobalPackageDB' is invariably at the bottom since it
-- contains the rts, base and other special compiler-specific packages.
--
-- We are not restricted to using just the above combinations. In particular
-- we can use several custom package dbs and the user package db together.
--
-- When it comes to writing, the top most (last) package is used.
--
type PackageDBStack = [PackageDB]
-- | Return the package that we should register into. This is the package db at
-- the top of the stack.
--
registrationPackageDB :: PackageDBStack -> PackageDB
registrationPackageDB [] = error "internal error: empty package db set"
registrationPackageDB dbs = last dbs
-- | Make package paths absolute
absolutePackageDBPaths :: PackageDBStack -> IO PackageDBStack
absolutePackageDBPaths = mapM absolutePackageDBPath
absolutePackageDBPath :: PackageDB -> IO PackageDB
absolutePackageDBPath GlobalPackageDB = return GlobalPackageDB
absolutePackageDBPath UserPackageDB = return UserPackageDB
absolutePackageDBPath (SpecificPackageDB db) =
SpecificPackageDB `liftM` canonicalizePath db
-- ------------------------------------------------------------
-- * Optimisation levels
-- ------------------------------------------------------------
-- | Some compilers support optimising. Some have different levels.
-- For compilers that do not the level is just capped to the level
-- they do support.
--
data OptimisationLevel = NoOptimisation
| NormalOptimisation
| MaximumOptimisation
deriving (Bounded, Enum, Eq, Generic, Read, Show)
instance Binary OptimisationLevel
flagToOptimisationLevel :: Maybe String -> OptimisationLevel
flagToOptimisationLevel Nothing = NormalOptimisation
flagToOptimisationLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: OptimisationLevel)
&& i <= fromEnum (maxBound :: OptimisationLevel)
-> toEnum i
| otherwise -> error $ "Bad optimisation level: " ++ show i
++ ". Valid values are 0..2"
_ -> error $ "Can't parse optimisation level " ++ s
-- ------------------------------------------------------------
-- * Debug info levels
-- ------------------------------------------------------------
-- | Some compilers support emitting debug info. Some have different
-- levels. For compilers that do not the level is just capped to the
-- level they do support.
--
data DebugInfoLevel = NoDebugInfo
| MinimalDebugInfo
| NormalDebugInfo
| MaximalDebugInfo
deriving (Bounded, Enum, Eq, Generic, Read, Show)
instance Binary DebugInfoLevel
flagToDebugInfoLevel :: Maybe String -> DebugInfoLevel
flagToDebugInfoLevel Nothing = NormalDebugInfo
flagToDebugInfoLevel (Just s) = case reads s of
[(i, "")]
| i >= fromEnum (minBound :: DebugInfoLevel)
&& i <= fromEnum (maxBound :: DebugInfoLevel)
-> toEnum i
| otherwise -> error $ "Bad debug info level: " ++ show i
++ ". Valid values are 0..3"
_ -> error $ "Can't parse debug info level " ++ s
-- ------------------------------------------------------------
-- * Languages and Extensions
-- ------------------------------------------------------------
unsupportedLanguages :: Compiler -> [Language] -> [Language]
unsupportedLanguages comp langs =
[ lang | lang <- langs
, isNothing (languageToFlag comp lang) ]
languageToFlags :: Compiler -> Maybe Language -> [Flag]
languageToFlags comp = filter (not . null)
. catMaybes . map (languageToFlag comp)
. maybe [Haskell98] (\x->[x])
languageToFlag :: Compiler -> Language -> Maybe Flag
languageToFlag comp ext = lookup ext (compilerLanguages comp)
-- |For the given compiler, return the extensions it does not support.
unsupportedExtensions :: Compiler -> [Extension] -> [Extension]
unsupportedExtensions comp exts =
[ ext | ext <- exts
, isNothing (extensionToFlag comp ext) ]
type Flag = String
-- |For the given compiler, return the flags for the supported extensions.
extensionsToFlags :: Compiler -> [Extension] -> [Flag]
extensionsToFlags comp = nub . filter (not . null)
. catMaybes . map (extensionToFlag comp)
extensionToFlag :: Compiler -> Extension -> Maybe Flag
extensionToFlag comp ext = lookup ext (compilerExtensions comp)
-- | Does this compiler support parallel --make mode?
parmakeSupported :: Compiler -> Bool
parmakeSupported = ghcSupported "Support parallel --make"
-- | Does this compiler support reexported-modules?
reexportedModulesSupported :: Compiler -> Bool
reexportedModulesSupported = ghcSupported "Support reexported-modules"
-- | Does this compiler support thinning/renaming on package flags?
renamingPackageFlagsSupported :: Compiler -> Bool
renamingPackageFlagsSupported = ghcSupported "Support thinning and renaming package flags"
-- | Does this compiler have unified IPIDs (so no package keys)
unifiedIPIDRequired :: Compiler -> Bool
unifiedIPIDRequired = ghcSupported "Requires unified installed package IDs"
-- | Does this compiler support package keys?
packageKeySupported :: Compiler -> Bool
packageKeySupported = ghcSupported "Uses package keys"
-- | Does this compiler support unit IDs?
unitIdSupported :: Compiler -> Bool
unitIdSupported = ghcSupported "Uses unit IDs"
-- | Does this compiler support a package database entry with:
-- "dynamic-library-dirs"?
libraryDynDirSupported :: Compiler -> Bool
libraryDynDirSupported comp = case compilerFlavor comp of
GHC -> compilerVersion comp >= Version [8,0,1,20161021] []
_ -> False
-- | Utility function for GHC only features
ghcSupported :: String -> Compiler -> Bool
ghcSupported key comp =
case compilerFlavor comp of
GHC -> checkProp
GHCJS -> checkProp
_ -> False
where checkProp =
case M.lookup key (compilerProperties comp) of
Just "YES" -> True
_ -> False
-- ------------------------------------------------------------
-- * Profiling detail level
-- ------------------------------------------------------------
-- | Some compilers (notably GHC) support profiling and can instrument
-- programs so the system can account costs to different functions. There are
-- different levels of detail that can be used for this accounting.
-- For compilers that do not support this notion or the particular detail
-- levels, this is either ignored or just capped to some similar level
-- they do support.
--
data ProfDetailLevel = ProfDetailNone
| ProfDetailDefault
| ProfDetailExportedFunctions
| ProfDetailToplevelFunctions
| ProfDetailAllFunctions
| ProfDetailOther String
deriving (Eq, Generic, Read, Show)
instance Binary ProfDetailLevel
flagToProfDetailLevel :: String -> ProfDetailLevel
flagToProfDetailLevel "" = ProfDetailDefault
flagToProfDetailLevel s =
case lookup (lowercase s)
[ (name, value)
| (primary, aliases, value) <- knownProfDetailLevels
, name <- primary : aliases ]
of Just value -> value
Nothing -> ProfDetailOther s
knownProfDetailLevels :: [(String, [String], ProfDetailLevel)]
knownProfDetailLevels =
[ ("default", [], ProfDetailDefault)
, ("none", [], ProfDetailNone)
, ("exported-functions", ["exported"], ProfDetailExportedFunctions)
, ("toplevel-functions", ["toplevel", "top"], ProfDetailToplevelFunctions)
, ("all-functions", ["all"], ProfDetailAllFunctions)
]
showProfDetailLevel :: ProfDetailLevel -> String
showProfDetailLevel dl = case dl of
ProfDetailNone -> "none"
ProfDetailDefault -> "default"
ProfDetailExportedFunctions -> "exported-functions"
ProfDetailToplevelFunctions -> "toplevel-functions"
ProfDetailAllFunctions -> "all-functions"
ProfDetailOther other -> other
|
tolysz/prepare-ghcjs
|
spec-lts8/cabal/Cabal/Distribution/Simple/Compiler.hs
|
bsd-3-clause
| 13,675 | 0 | 15 | 3,066 | 2,164 | 1,214 | 950 | 202 | 6 |
module B1.Program.Chart.GraphUtils
( colorLineStripPoint
, lineStripPoint
, getPriceRange
, getXValues
, getY
, heightPercentage
) where
import Graphics.Rendering.OpenGL
import B1.Data.Price
import B1.Data.Technicals.StockData
import B1.Graphics.Rendering.OpenGL.Box
import B1.Graphics.Rendering.OpenGL.Point
import B1.Program.Chart.Colors
colorLineStripPoint :: Box -> Color3 GLfloat -> [GLfloat] -> Int -> Int
-> [GLfloat]
colorLineStripPoint bounds color heightPercentages size index =
let point = lineStripPoint bounds heightPercentages size index
colorList = color3ToList color
in if null point
then []
else point ++ colorList
lineStripPoint :: Box -> [GLfloat] -> Int -> Int -> [GLfloat]
lineStripPoint bounds heightPercentages size index
| numSegments == 0 = []
| null heightPercentages = []
| index >= size = []
| index >= length heightPercentages = []
| otherwise = [x, y]
where
numSegments = if size > 1 then size - 1 else 0
(totalWidth, totalHeight) = boxSize bounds
segmentWidth = totalWidth / realToFrac numSegments
x = boxRight bounds - realToFrac index * segmentWidth
percentage = heightPercentages !! index
y = boxBottom bounds + realToFrac percentage * totalHeight
heightPercentage :: (Float, Float) -> Float -> Float
heightPercentage (minimum, maximum) value = percentage
where
difference = value - minimum
totalRange = maximum - minimum
percentage = difference / totalRange
getXValues :: Box -> Int -> Int -> (GLfloat, GLfloat, GLfloat)
getXValues bounds numElements index = (leftX, centerX, rightX)
where
totalWidth = boxWidth bounds
barWidth = realToFrac totalWidth / realToFrac numElements
halfBarWidth = barWidth / 2
centerX = boxRight bounds - halfBarWidth - realToFrac index * barWidth
leftX = centerX - halfBarWidth
rightX = centerX + halfBarWidth
getY :: Box -> (Float, Float) -> Float -> GLfloat
getY bounds (minPrice, maxPrice) value = y
where
range = value - minPrice
totalRange = maxPrice - minPrice
totalHeight = boxHeight bounds
heightPercentage = range / totalRange
height = totalHeight * realToFrac heightPercentage
y = boxBottom bounds + height
getPriceRange :: StockPriceData -> (Float, Float)
getPriceRange priceData
| null allPrices = (0, 0)
| otherwise = (adjustedMinPrice, adjustedMaxPrice)
where
takeElements = take $ numDailyElements priceData
allPrices = concat $ map takeElements
[ map high $ prices priceData
, map low $ prices priceData
, movingAverage25 priceData
, movingAverage50 priceData
, movingAverage200 priceData
]
minPrice = minimum allPrices
maxPrice = maximum allPrices
extra = (maxPrice - minPrice) * 0.05
adjustedMinPrice = minPrice - extra
adjustedMaxPrice = maxPrice + extra
|
madjestic/b1
|
src/B1/Program/Chart/GraphUtils.hs
|
bsd-3-clause
| 2,876 | 0 | 11 | 616 | 840 | 449 | 391 | 71 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Language.Haskell.Liquid.GhcInterface (
-- * extract all information needed for verification
getGhcInfo
) where
import IdInfo
import InstEnv
import Bag (bagToList)
import ErrUtils
import GHC hiding (Target, desugarModule)
import DriverPhases (Phase(..))
import DriverPipeline (compileFile)
import Text.PrettyPrint.HughesPJ
import HscTypes hiding (Target)
import CoreSyn
import Class
import Var
import CoreMonad (liftIO)
import DataCon
import qualified Control.Exception as Ex
import GHC.Paths (libdir)
import System.FilePath ( replaceExtension, normalise)
import DynFlags
import Control.Monad (filterM, foldM, when, forM, forM_, liftM)
import Control.Applicative hiding (empty)
import Data.Monoid hiding ((<>))
import Data.List (find, nub)
import Data.Maybe (catMaybes, maybeToList)
import qualified Data.HashSet as S
import System.Console.CmdArgs.Verbosity (whenLoud)
import System.Directory (removeFile, createDirectory, doesFileExist)
import Language.Fixpoint.Types hiding (Result, Expr)
import Language.Fixpoint.Misc
import Language.Haskell.Liquid.Types
import Language.Haskell.Liquid.Errors
import Language.Haskell.Liquid.ANFTransform
import Language.Haskell.Liquid.Bare
import Language.Haskell.Liquid.GhcMisc
import Language.Haskell.Liquid.Misc
import Language.Haskell.Liquid.PrettyPrint
import Language.Haskell.Liquid.Visitors
import Language.Haskell.Liquid.CmdLine (withCabal, withPragmas)
import Language.Haskell.Liquid.Parse
import qualified Language.Haskell.Liquid.Measure as Ms
import Language.Fixpoint.Names
import Language.Fixpoint.Files
--------------------------------------------------------------------
getGhcInfo :: Config -> FilePath -> IO (Either ErrorResult GhcInfo)
--------------------------------------------------------------------
getGhcInfo cfg target = (Right <$> getGhcInfo' cfg target)
`Ex.catch` (\(e :: SourceError) -> handle e)
`Ex.catch` (\(e :: Error) -> handle e)
`Ex.catch` (\(e :: [Error]) -> handle e)
where
handle = return . Left . result
getGhcInfo' cfg0 target
= runGhc (Just libdir) $ do
liftIO $ cleanFiles target
addTarget =<< guessTarget target Nothing
(name,tgtSpec) <- liftIO $ parseSpec target
cfg <- liftIO $ withPragmas cfg0 target $ Ms.pragmas tgtSpec
cfg <- liftIO $ withCabal cfg
let paths = idirs cfg
updateDynFlags cfg
liftIO $ whenLoud $ putStrLn ("paths = " ++ show paths)
let name' = ModName Target (getModName name)
impNames <- allDepNames <$> depanal [] False
impSpecs <- getSpecs (real cfg) (totality cfg) target paths impNames [Spec, Hs, LHs]
compileCFiles =<< liftIO (foldM (\c (f,_,s) -> withPragmas c f (Ms.pragmas s)) cfg impSpecs)
impSpecs' <- forM impSpecs $ \(f,n,s) -> do
when (not $ isSpecImport n) $
addTarget =<< guessTarget f Nothing
return (n,s)
load LoadAllTargets
modguts <- getGhcModGuts1 target
hscEnv <- getSession
coreBinds <- liftIO $ anormalize (not $ nocaseexpand cfg) hscEnv modguts
let datacons = [ dataConWorkId dc
| tc <- mgi_tcs modguts
, dc <- tyConDataCons tc
]
let impVs = importVars coreBinds ++ classCons (mgi_cls_inst modguts)
let defVs = definedVars coreBinds
let useVs = readVars coreBinds
let letVs = letVars coreBinds
let derVs = derivedVars coreBinds $ fmap (fmap is_dfun) $ mgi_cls_inst modguts
logicmap <- liftIO makeLogicMap
(spec, imps, incs) <- moduleSpec cfg coreBinds (impVs ++ defVs) letVs name' modguts tgtSpec logicmap impSpecs'
liftIO $ whenLoud $ putStrLn $ "Module Imports: " ++ show imps
hqualFiles <- moduleHquals modguts paths target imps incs
return $ GI hscEnv coreBinds derVs impVs (letVs ++ datacons) useVs hqualFiles imps incs spec
makeLogicMap
= do lg <- getCoreToLogicPath
lspec <- readFile lg
return $ parseSymbolToLogic lg lspec
classCons :: Maybe [ClsInst] -> [Id]
classCons Nothing = []
classCons (Just cs) = concatMap (dataConImplicitIds . head . tyConDataCons . classTyCon . is_cls) cs
derivedVars :: CoreProgram -> Maybe [DFunId] -> [Id]
derivedVars cbs (Just fds) = concatMap (derivedVs cbs) fds
derivedVars _ Nothing = []
derivedVs :: CoreProgram -> DFunId -> [Id]
derivedVs cbs fd = concatMap bindersOf cbf ++ deps
where cbf = filter f cbs
f (NonRec x _) = eqFd x
f (Rec xes ) = any eqFd (fst <$> xes)
eqFd x = varName x == varName fd
deps :: [Id]
deps = concatMap dep $ (unfoldingInfo . idInfo <$> concatMap bindersOf cbf)
dep (DFunUnfolding _ _ e) = concatMap grapDep e
dep (CoreUnfolding {uf_tmpl = e}) = grapDep e
dep _ = []
grapDep :: CoreExpr -> [Id]
grapDep e = freeVars S.empty e
updateDynFlags cfg
= do df <- getSessionDynFlags
let df' = df { importPaths = idirs cfg ++ importPaths df
, libraryPaths = idirs cfg ++ libraryPaths df
, includePaths = idirs cfg ++ includePaths df
, profAuto = ProfAutoCalls
, ghcLink = LinkInMemory
--FIXME: this *should* be HscNothing, but that prevents us from
-- looking up *unexported* names in another source module..
, hscTarget = HscInterpreted -- HscNothing
, ghcMode = CompManager
-- prevent GHC from printing anything
, log_action = \_ _ _ _ _ -> return ()
-- , verbosity = 3
} `xopt_set` Opt_MagicHash
-- `gopt_set` Opt_Hpc
`gopt_set` Opt_ImplicitImportQualified
`gopt_set` Opt_PIC
#if __GLASGOW_HASKELL__ >= 710
`gopt_set` Opt_Debug
#endif
(df'',_,_) <- parseDynamicFlags df' (map noLoc $ ghcOptions cfg)
setSessionDynFlags $ df'' -- {profAuto = ProfAutoAll}
compileCFiles cfg
= do df <- getSessionDynFlags
setSessionDynFlags $ df { includePaths = nub $ idirs cfg ++ includePaths df
, importPaths = nub $ idirs cfg ++ importPaths df
, libraryPaths = nub $ idirs cfg ++ libraryPaths df }
hsc <- getSession
os <- mapM (\x -> liftIO $ compileFile hsc StopLn (x,Nothing)) (nub $ cFiles cfg)
df <- getSessionDynFlags
setSessionDynFlags $ df { ldInputs = map (FileOption "") os ++ ldInputs df }
mgi_namestring = moduleNameString . moduleName . mgi_module
importVars = freeVars S.empty
definedVars = concatMap defs
where
defs (NonRec x _) = [x]
defs (Rec xes) = map fst xes
------------------------------------------------------------------
-- | Extracting CoreBindings From File ---------------------------
------------------------------------------------------------------
getGhcModGuts1 :: FilePath -> Ghc MGIModGuts
getGhcModGuts1 fn = do
modGraph <- getModuleGraph
case find ((== fn) . msHsFilePath) modGraph of
Just modSummary -> do
-- mod_guts <- modSummaryModGuts modSummary
mod_p <- parseModule modSummary
mod_guts <- coreModule <$> (desugarModule =<< typecheckModule (ignoreInline mod_p))
let deriv = getDerivedDictionaries mod_guts
return $! (miModGuts (Just deriv) mod_guts)
Nothing -> exitWithPanic "Ghc Interface: Unable to get GhcModGuts"
getDerivedDictionaries cm = instEnvElts $ mg_inst_env cm
cleanFiles :: FilePath -> IO ()
cleanFiles fn
= do forM_ bins (tryIgnore "delete binaries" . removeFileIfExists)
tryIgnore "create temp directory" $ createDirectory dir
where
bins = replaceExtension fn <$> ["hi", "o"]
dir = tempDirectory fn
removeFileIfExists f = doesFileExist f >>= (`when` removeFile f)
--------------------------------------------------------------------------------
-- | Desugaring (Taken from GHC, modified to hold onto Loc in Ticks) -----------
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
-- | Extracting Qualifiers -----------------------------------------------------
--------------------------------------------------------------------------------
moduleHquals mg paths target imps incs
= do hqs <- specIncludes Hquals paths incs
hqs' <- moduleImports [Hquals] paths (mgi_namestring mg : imps)
hqs'' <- liftIO $ filterM doesFileExist [extFileName Hquals target]
let rv = sortNub $ hqs'' ++ hqs ++ (snd <$> hqs')
liftIO $ whenLoud $ putStrLn $ "Reading Qualifiers From: " ++ show rv
return rv
--------------------------------------------------------------------------------
-- | Extracting Specifications (Measures + Assumptions) ------------------------
--------------------------------------------------------------------------------
moduleSpec cfg cbs vars defVars target mg tgtSpec logicmap impSpecs
= do addImports impSpecs
addContext $ IIModule $ moduleName $ mgi_module mg
env <- getSession
let specs = (target,tgtSpec):impSpecs
let imps = sortNub $ impNames ++ [ symbolString x
| (_,spec) <- specs
, x <- Ms.imports spec
]
ghcSpec <- liftIO $ makeGhcSpec cfg target cbs vars defVars exports env logicmap specs
return (ghcSpec, imps, Ms.includes tgtSpec)
where
exports = mgi_exports mg
impNames = map (getModString.fst) impSpecs
addImports = mapM (addContext . IIDecl . qualImportDecl . getModName . fst)
allDepNames = concatMap (map declNameString . ms_textual_imps)
declNameString = moduleNameString . unLoc . ideclName . unLoc
patErrorName = "PatErr"
realSpecName = "Real"
notRealSpecName = "NotReal"
getSpecs rflag tflag target paths names exts
= do fs' <- sortNub <$> moduleImports exts paths names
patSpec <- getPatSpec paths tflag
rlSpec <- getRealSpec paths rflag
let fs = patSpec ++ rlSpec ++ fs'
liftIO $ whenLoud $ putStrLn ("getSpecs: " ++ show fs)
transParseSpecs exts paths (S.singleton target) mempty (map snd fs)
getPatSpec paths totalitycheck
| totalitycheck
= (map (patErrorName, )) . maybeToList <$> moduleFile paths patErrorName Spec
| otherwise
= return []
getRealSpec paths freal
| freal
= (map (realSpecName, )) . maybeToList <$> moduleFile paths realSpecName Spec
| otherwise
= (map (notRealSpecName, )) . maybeToList <$> moduleFile paths notRealSpecName Spec
transParseSpecs _ _ _ specs []
= return specs
transParseSpecs exts paths seenFiles specs newFiles
= do newSpecs <- liftIO $ mapM (\f -> addFst3 f <$> parseSpec f) newFiles
impFiles <- moduleImports exts paths $ specsImports newSpecs
let seenFiles' = seenFiles `S.union` (S.fromList newFiles)
let specs' = specs ++ map (third noTerm) newSpecs
let newFiles' = [f | (_,f) <- impFiles, not (f `S.member` seenFiles')]
transParseSpecs exts paths seenFiles' specs' newFiles'
where
specsImports ss = nub $ concatMap (map symbolString . Ms.imports . thd3) ss
noTerm spec = spec { Ms.decr=mempty, Ms.lazy=mempty, Ms.termexprs=mempty }
third f (a,b,c) = (a,b,f c)
parseSpec :: FilePath -> IO (ModName, Ms.BareSpec)
parseSpec file
= do whenLoud $ putStrLn $ "parseSpec: " ++ file
either Ex.throw return . specParser file =<< readFile file
specParser file str
| isExtFile Spec file = specSpecificationP file str
| isExtFile Hs file = hsSpecificationP file str
| isExtFile LHs file = lhsSpecificationP file str
| otherwise = exitWithPanic $ "SpecParser: Cannot Parse File " ++ file
moduleImports :: GhcMonad m => [Ext] -> [FilePath] -> [String] -> m [(String, FilePath)]
moduleImports exts paths names
= liftM concat $ forM names $ \name -> do
map (name,) . catMaybes <$> mapM (moduleFile paths name) exts
moduleFile :: GhcMonad m => [FilePath] -> String -> Ext -> m (Maybe FilePath)
moduleFile paths name ext
| ext `elem` [Hs, LHs]
= do mg <- getModuleGraph
case find ((==name) . moduleNameString . ms_mod_name) mg of
Nothing -> liftIO $ getFileInDirs (extModuleName name ext) paths
Just ms -> return $ normalise <$> ml_hs_file (ms_location ms)
| otherwise
= liftIO $ getFileInDirs (extModuleName name ext) paths
specIncludes :: GhcMonad m => Ext -> [FilePath] -> [FilePath] -> m [FilePath]
specIncludes ext paths reqs
= do let libFile = extFileNameR ext $ symbolString preludeName
let incFiles = catMaybes $ reqFile ext <$> reqs
liftIO $ forM (libFile : incFiles) $ \f -> do
mfile <- getFileInDirs f paths
case mfile of
Just file -> return file
Nothing -> errorstar $ "cannot find " ++ f ++ " in " ++ show paths
reqFile ext s
| isExtFile ext s
= Just s
| otherwise
= Nothing
instance PPrint GhcSpec where
pprint spec = (text "******* Target Variables ********************")
$$ (pprint $ tgtVars spec)
$$ (text "******* Type Signatures *********************")
$$ (pprintLongList $ tySigs spec)
$$ (text "******* Assumed Type Signatures *************")
$$ (pprintLongList $ asmSigs spec)
$$ (text "******* DataCon Specifications (Measure) ****")
$$ (pprintLongList $ ctors spec)
$$ (text "******* Measure Specifications **************")
$$ (pprintLongList $ meas spec)
instance PPrint GhcInfo where
pprint info = (text "*************** Imports *********************")
$+$ (intersperse comma $ text <$> imports info)
$+$ (text "*************** Includes ********************")
$+$ (intersperse comma $ text <$> includes info)
$+$ (text "*************** Imported Variables **********")
$+$ (pprDoc $ impVars info)
$+$ (text "*************** Defined Variables ***********")
$+$ (pprDoc $ defVars info)
$+$ (text "*************** Specification ***************")
$+$ (pprint $ spec info)
$+$ (text "*************** Core Bindings ***************")
$+$ (pprint $ cbs info)
instance Show GhcInfo where
show = showpp
instance PPrint [CoreBind] where
pprint = pprDoc . tidyCBs
instance PPrint TargetVars where
pprint AllVars = text "All Variables"
pprint (Only vs) = text "Only Variables: " <+> pprint vs
------------------------------------------------------------------------
-- Dealing With Errors -------------------------------------------------
------------------------------------------------------------------------
-- | Convert a GHC error into one of ours
instance Result SourceError where
result = (`Crash` "Invalid Source")
. concatMap errMsgErrors
. bagToList
. srcErrorMessages
errMsgErrors e = [ ErrGhc (errMsgSpan e) (pprint e)]
|
Kyly/liquidhaskell
|
src/Language/Haskell/Liquid/GhcInterface.hs
|
bsd-3-clause
| 15,953 | 0 | 20 | 4,262 | 4,323 | 2,203 | 2,120 | 287 | 4 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Defines the core functionality of this package. This package is
-- distinguished from Yesod.Persist in that the latter additionally exports the
-- persistent modules themselves.
module Yesod.Persist.Core
( YesodPersist (..)
, defaultRunDB
, YesodPersistRunner (..)
, defaultGetDBRunner
, DBRunner (..)
, runDBSource
, respondSourceDB
, YesodDB
, get404
, getBy404
, insert400
, insert400_
) where
import Database.Persist
import Control.Monad.Trans.Reader (ReaderT, runReaderT)
import Yesod.Core
import Data.Conduit
import Blaze.ByteString.Builder (Builder)
import Data.Pool
import Control.Monad.Trans.Resource
import Control.Exception (throwIO)
import Yesod.Core.Types (HandlerContents (HCError))
import qualified Database.Persist.Sql as SQL
unSqlPersistT :: a -> a
unSqlPersistT = id
type YesodDB site = ReaderT (YesodPersistBackend site) (HandlerFor site)
class Monad (YesodDB site) => YesodPersist site where
type YesodPersistBackend site
runDB :: YesodDB site a -> HandlerFor site a
-- | Helper for creating 'runDB'.
--
-- Since 1.2.0
defaultRunDB :: PersistConfig c
=> (site -> c)
-> (site -> PersistConfigPool c)
-> PersistConfigBackend c (HandlerFor site) a
-> HandlerFor site a
defaultRunDB getConfig getPool f = do
master <- getYesod
Database.Persist.runPool
(getConfig master)
f
(getPool master)
-- |
--
-- Since 1.2.0
class YesodPersist site => YesodPersistRunner site where
-- | This function differs from 'runDB' in that it returns a database
-- runner function, as opposed to simply running a single action. This will
-- usually mean that a connection is taken from a pool and then reused for
-- each invocation. This can be useful for creating streaming responses;
-- see 'runDBSource'.
--
-- It additionally returns a cleanup function to free the connection. If
-- your code finishes successfully, you /must/ call this cleanup to
-- indicate changes should be committed. Otherwise, for SQL backends at
-- least, a rollback will be used instead.
--
-- Since 1.2.0
getDBRunner :: HandlerFor site (DBRunner site, HandlerFor site ())
newtype DBRunner site = DBRunner
{ runDBRunner :: forall a. YesodDB site a -> HandlerFor site a
}
-- | Helper for implementing 'getDBRunner'.
--
-- Since 1.2.0
#if MIN_VERSION_persistent(2,5,0)
defaultGetDBRunner :: (SQL.IsSqlBackend backend, YesodPersistBackend site ~ backend)
=> (site -> Pool backend)
-> HandlerFor site (DBRunner site, HandlerFor site ())
#else
defaultGetDBRunner :: YesodPersistBackend site ~ SQL.SqlBackend
=> (site -> Pool SQL.SqlBackend)
-> HandlerFor site (DBRunner site, HandlerFor site ())
#endif
defaultGetDBRunner getPool = do
pool <- fmap getPool getYesod
let withPrep conn f = f (persistBackend conn) (SQL.connPrepare $ persistBackend conn)
(relKey, (conn, local)) <- allocate
(do
(conn, local) <- takeResource pool
withPrep conn SQL.connBegin
return (conn, local)
)
(\(conn, local) -> do
withPrep conn SQL.connRollback
destroyResource pool local conn)
let cleanup = liftIO $ do
withPrep conn SQL.connCommit
putResource local conn
_ <- unprotect relKey
return ()
return (DBRunner $ \x -> runReaderT (unSqlPersistT x) conn, cleanup)
-- | Like 'runDB', but transforms a @Source@. See 'respondSourceDB' for an
-- example, practical use case.
--
-- Since 1.2.0
runDBSource :: YesodPersistRunner site
=> ConduitT () a (YesodDB site) ()
-> ConduitT () a (HandlerFor site) ()
runDBSource src = do
(dbrunner, cleanup) <- lift getDBRunner
transPipe (runDBRunner dbrunner) src
lift cleanup
-- | Extends 'respondSource' to create a streaming database response body.
respondSourceDB :: YesodPersistRunner site
=> ContentType
-> ConduitT () (Flush Builder) (YesodDB site) ()
-> HandlerFor site TypedContent
respondSourceDB ctype = respondSource ctype . runDBSource
-- | Get the given entity by ID, or return a 404 not found if it doesn't exist.
#if MIN_VERSION_persistent(2,5,0)
get404 :: (MonadIO m, PersistStoreRead backend, PersistRecordBackend val backend)
=> Key val
-> ReaderT backend m val
#else
get404 :: (MonadIO m, PersistStore (PersistEntityBackend val), PersistEntity val)
=> Key val
-> ReaderT (PersistEntityBackend val) m val
#endif
get404 key = do
mres <- get key
case mres of
Nothing -> notFound'
Just res -> return res
-- | Get the given entity by unique key, or return a 404 not found if it doesn't
-- exist.
#if MIN_VERSION_persistent(2,5,0)
getBy404 :: (PersistUniqueRead backend, PersistRecordBackend val backend, MonadIO m)
=> Unique val
-> ReaderT backend m (Entity val)
#else
getBy404 :: (PersistUnique (PersistEntityBackend val), PersistEntity val, MonadIO m)
=> Unique val
-> ReaderT (PersistEntityBackend val) m (Entity val)
#endif
getBy404 key = do
mres <- getBy key
case mres of
Nothing -> notFound'
Just res -> return res
-- | Create a new record in the database, returning an automatically
-- created key, or raise a 400 bad request if a uniqueness constraint
-- is violated.
--
-- @since 1.4.1
#if MIN_VERSION_persistent(2,5,0)
insert400 :: (MonadIO m, PersistUniqueWrite backend, PersistRecordBackend val backend)
=> val
-> ReaderT backend m (Key val)
#else
insert400 :: (MonadIO m, PersistUnique (PersistEntityBackend val), PersistEntity val)
=> val
-> ReaderT (PersistEntityBackend val) m (Key val)
#endif
insert400 datum = do
conflict <- checkUnique datum
case conflict of
Just unique ->
badRequest' $ map (unHaskellName . fst) $ persistUniqueToFieldNames unique
Nothing -> insert datum
-- | Same as 'insert400', but doesn’t return a key.
--
-- @since 1.4.1
#if MIN_VERSION_persistent(2,5,0)
insert400_ :: (MonadIO m, PersistUniqueWrite backend, PersistRecordBackend val backend)
=> val
-> ReaderT backend m ()
#else
insert400_ :: (MonadIO m, PersistUnique (PersistEntityBackend val), PersistEntity val)
=> val
-> ReaderT (PersistEntityBackend val) m ()
#endif
insert400_ datum = insert400 datum >> return ()
-- | Should be equivalent to @lift . notFound@, but there's an apparent bug in
-- GHC 7.4.2 that leads to segfaults. This is a workaround.
notFound' :: MonadIO m => m a
notFound' = liftIO $ throwIO $ HCError NotFound
-- | Constructed like 'notFound'', and for the same reasons.
badRequest' :: MonadIO m => Texts -> m a
badRequest' = liftIO . throwIO . HCError . InvalidArgs
|
s9gf4ult/yesod
|
yesod-persistent/Yesod/Persist/Core.hs
|
mit
| 7,149 | 0 | 14 | 1,716 | 1,344 | 710 | 634 | 115 | 2 |
{-| Cluster rebalancer.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Program.Hbal
( main
, options
, arguments
, iterateDepth
) where
import Control.Arrow ((&&&))
import Control.Lens (over)
import Control.Monad
import Data.List
import Data.Maybe (isNothing, fromMaybe)
import System.Exit
import System.IO
import Text.Printf (printf)
import Ganeti.HTools.AlgorithmParams (AlgorithmOptions(..), fromCLIOptions)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Cluster.Metrics as Metrics
import qualified Ganeti.HTools.Cluster.Utils as ClusterUtils
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.BasicTypes
import Ganeti.Common
import Ganeti.HTools.CLI
import Ganeti.HTools.ExtLoader
import Ganeti.HTools.Types
import Ganeti.HTools.Loader
import Ganeti.OpCodes (wrapOpCode, setOpComment, setOpPriority)
import Ganeti.OpCodes.Lens (metaParamsL, opReasonL)
import Ganeti.JQueue (currentTimestamp, reasonTrailTimestamp)
import Ganeti.JQueue.Objects (Timestamp)
import Ganeti.Jobs as Jobs
import Ganeti.Utils
import Ganeti.Version (version)
-- | Options list and functions.
options :: IO [OptType]
options = do
luxi <- oLuxiSocket
return
[ oPrintNodes
, oPrintInsts
, oPrintCommands
, oDataFile
, oEvacMode
, oRestrictedMigrate
, oRapiMaster
, luxi
, oIAllocSrc
, oExecJobs
, oFirstJobGroup
, oReason
, oGroup
, oMaxSolLength
, oVerbose
, oQuiet
, oOfflineNode
, oStaticKvmNodeMemory
, oMinScore
, oMaxCpu
, oMinDisk
, oMinGain
, oMinGainLim
, oDiskMoves
, oSelInst
, oInstMoves
, oIgnoreSoftErrors
, oDynuFile
, oIgnoreDyn
, oMonD
, oMonDDataFile
, oMonDExitMissing
, oMonDXen
, oExTags
, oExInst
, oSaveCluster
, oPriority
]
-- | The list of arguments supported by the program.
arguments :: [ArgCompletion]
arguments = []
-- | Wraps an 'OpCode' in a 'MetaOpCode' while also adding a comment
-- about what generated the opcode.
annotateOpCode :: Maybe String -> Timestamp -> Jobs.Annotator
annotateOpCode reason ts =
over (metaParamsL . opReasonL)
(++ [( "hbal", fromMaybe ("hbal " ++ version ++ " called") reason
, reasonTrailTimestamp ts)])
. setOpComment ("rebalancing via hbal " ++ version)
. wrapOpCode
{- | Start computing the solution at the given depth and recurse until
we find a valid solution or we exceed the maximum depth.
-}
iterateDepth :: Bool -- ^ Whether to print moves
-> AlgorithmOptions -- ^ Algorithmic options to apply
-> Cluster.Table -- ^ The starting table
-> Int -- ^ Remaining length
-> Int -- ^ Max node name len
-> Int -- ^ Max instance name len
-> [MoveJob] -- ^ Current command list
-> Score -- ^ Score at which to stop
-> IO (Cluster.Table, [MoveJob]) -- ^ The resulting table
-- and commands
iterateDepth printmove algOpts ini_tbl max_rounds nmlen imlen cmd_strs
min_score =
let Cluster.Table ini_nl ini_il _ _ = ini_tbl
allowed_next = Cluster.doNextBalance ini_tbl max_rounds min_score
m_fin_tbl = if allowed_next
then Cluster.tryBalance algOpts ini_tbl
else Nothing
in case m_fin_tbl of
Just fin_tbl ->
do
let (Cluster.Table _ _ _ fin_plc) = fin_tbl
cur_plc@(idx, _, _, move, _) <-
exitIfEmpty "Empty placement list returned for solution?!" fin_plc
let fin_plc_len = length fin_plc
(sol_line, cmds) = Cluster.printSolutionLine ini_nl ini_il
nmlen imlen cur_plc fin_plc_len
afn = Cluster.involvedNodes ini_il cur_plc
upd_cmd_strs = (afn, idx, move, cmds):cmd_strs
when printmove $ do
putStrLn sol_line
hFlush stdout
iterateDepth printmove algOpts fin_tbl max_rounds
nmlen imlen upd_cmd_strs min_score
Nothing -> return (ini_tbl, cmd_strs)
-- | Displays the cluster stats.
printStats :: Node.List -> Node.List -> IO ()
printStats ini_nl fin_nl = do
let ini_cs = Cluster.totalResources ini_nl
fin_cs = Cluster.totalResources fin_nl
printf "Original: mem=%d disk=%d\n"
(Cluster.csFmem ini_cs) (Cluster.csFdsk ini_cs) :: IO ()
printf "Final: mem=%d disk=%d\n"
(Cluster.csFmem fin_cs) (Cluster.csFdsk fin_cs)
-- | Executes the jobs, if possible and desired.
maybeExecJobs :: Options
-> [a]
-> Node.List
-> Instance.List
-> [JobSet]
-> IO (Result ())
maybeExecJobs opts ord_plc fin_nl il cmd_jobs =
if optExecJobs opts && not (null ord_plc)
then (case optLuxi opts of
Nothing ->
return $ Bad "Execution of commands possible only on LUXI"
Just master -> do
ts <- currentTimestamp
let annotator = maybe id setOpPriority (optPriority opts) .
annotateOpCode (optReason opts) ts
execWithCancel annotator master $
zip (map toOpcodes cmd_jobs) (map toDescr cmd_jobs))
else return $ Ok ()
where toOpcodes = map (\(_, idx, move, _) ->
Cluster.iMoveToJob fin_nl il idx move)
toDescr job = "Executing jobset for instances " ++ commaJoin
(map (\(_, idx, _, _) -> Container.nameOf il idx) job)
-- | Select the target node group.
selectGroup :: Options -> Group.List -> Node.List -> Instance.List
-> IO (String, (Node.List, Instance.List))
selectGroup opts gl nlf ilf = do
let ngroups = ClusterUtils.splitCluster nlf ilf
when (length ngroups > 1 && isNothing (optGroup opts)) $ do
hPutStrLn stderr "Found multiple node groups:"
mapM_ (hPutStrLn stderr . (" " ++) . Group.name .
flip Container.find gl . fst) ngroups
exitErr "Aborting."
case optGroup opts of
Nothing -> do
(gidx, cdata) <- exitIfEmpty "No groups found by splitCluster?!" ngroups
let grp = Container.find gidx gl
return (Group.name grp, cdata)
Just g -> case Container.findByName gl g of
Nothing -> do
hPutStrLn stderr $ "Node group " ++ g ++
" not found. Node group list is:"
mapM_ (hPutStrLn stderr . (" " ++) . Group.name ) (Container.elems gl)
exitErr "Aborting."
Just grp ->
case lookup (Group.idx grp) ngroups of
Nothing ->
-- This will only happen if there are no nodes assigned
-- to this group
return (Group.name grp, (Container.empty, Container.empty))
Just cdata -> return (Group.name grp, cdata)
-- | Do a few checks on the cluster data.
checkCluster :: Int -> Node.List -> Instance.List -> IO ()
checkCluster verbose nl il = do
-- nothing to do on an empty cluster
when (Container.null il) $ do
printf "Cluster is empty, exiting.\n"::IO ()
exitSuccess
-- hbal doesn't currently handle split clusters
let split_insts = Cluster.findSplitInstances nl il
unless (null split_insts || verbose <= 1) $ do
hPutStrLn stderr "Found instances belonging to multiple node groups:"
mapM_ (\i -> hPutStrLn stderr $ " " ++ Instance.name i) split_insts
hPutStrLn stderr "These instances will not be moved."
printf "Loaded %d nodes, %d instances\n"
(Container.size nl)
(Container.size il)::IO ()
let csf = commonSuffix nl il
when (not (null csf) && verbose > 1) $
printf "Note: Stripping common suffix of '%s' from names\n" csf
-- | Do a few checks on the selected group data.
checkGroup :: Bool -> Int -> String -> Node.List -> Instance.List -> IO ()
checkGroup force verbose gname nl il = do
printf "Group size %d nodes, %d instances\n"
(Container.size nl)
(Container.size il)::IO ()
putStrLn $ "Selected node group: " ++ gname
let (bad_nodes, bad_instances) = Cluster.computeBadItems nl il
unless (verbose < 1) $ printf
"Initial check done: %d bad nodes, %d bad instances.\n"
(length bad_nodes) (length bad_instances)
let other_nodes = filter (not . (`elem` bad_nodes)) $ Container.elems nl
node_status = map (Node.name &&& Node.getPolicyHealth) other_nodes
policy_bad = filter (isBad . snd) node_status
when (verbose > 4) $ do
printf "Bad nodes: %s\n" . show $ map Node.name bad_nodes :: IO ()
printf "N+1 happy nodes: %s\n" . show $ map Node.name other_nodes :: IO ()
printf "Node policy status: %s\n" $ show node_status :: IO ()
unless (null bad_nodes) $
putStrLn "Cluster is not N+1 happy, continuing but no guarantee \
\that the cluster will end N+1 happy."
unless (null policy_bad) $ do
printf "The cluster contains %d policy-violating nodes.\n"
$ length policy_bad :: IO ()
putStrLn $ if force
then "Continuing, ignoring soft errors."
else "Continuing, but the set of moves might be too restricted;\
\ consider using the --ignore-soft-errors option."
-- | Check that we actually need to rebalance.
checkNeedRebalance :: Options -> Score -> Score -> IO ()
checkNeedRebalance opts ini_cv opt_cv = do
let min_cv = optMinScore opts
when (ini_cv - opt_cv < min_cv) $ do
printf "Cluster is already well balanced (initial score %.6g,\n\
\optimum score due to N+1 reservations %.6g,\n\
\minimum score %.6g).\nNothing to do, exiting\n"
ini_cv opt_cv min_cv:: IO ()
exitSuccess
-- | Main function.
main :: Options -> [String] -> IO ()
main opts args = do
unless (null args) $ exitErr "This program doesn't take any arguments."
let verbose = optVerbose opts
shownodes = optShowNodes opts
showinsts = optShowInsts opts
force = optIgnoreSoftErrors opts
ini_cdata@(ClusterData gl fixed_nl ilf ctags ipol) <- loadExternalData opts
when (verbose > 1) $ do
putStrLn $ "Loaded cluster tags: " ++ intercalate "," ctags
putStrLn $ "Loaded cluster ipolicy: " ++ show ipol
nlf <- setNodeStatus opts fixed_nl
checkCluster verbose nlf ilf
maybeSaveData (optSaveCluster opts) "original" "before balancing" ini_cdata
(gname, (nl, il)) <- selectGroup opts gl nlf ilf
checkGroup force verbose gname nl il
maybePrintInsts showinsts "Initial" (Cluster.printInsts nl il)
maybePrintNodes shownodes "Initial cluster" (Cluster.printNodes nl)
let ini_cv = Metrics.compCV nl
opt_cv = Metrics.optimalCVScore nl
ini_tbl = Cluster.Table nl il ini_cv []
min_cv = optMinScore opts
if verbose > 2
then printf "Initial coefficients: overall %.8f\n%s"
ini_cv (Metrics.printStats " " nl)::IO ()
else printf "Initial score: %.8f\n" ini_cv
checkNeedRebalance opts ini_cv opt_cv
putStrLn "Trying to minimize the CV..."
let imlen = maximum . map (length . Instance.alias) $ Container.elems il
nmlen = maximum . map (length . Node.alias) $ Container.elems nl
(fin_tbl, cmd_strs) <- iterateDepth True (fromCLIOptions opts) ini_tbl
(optMaxLength opts)
nmlen imlen [] (opt_cv + min_cv)
let (Cluster.Table fin_nl fin_il fin_cv fin_plc) = fin_tbl
ord_plc = reverse fin_plc
sol_msg = case () of
_ | null fin_plc -> printf "No solution found\n"
| verbose > 2 ->
printf "Final coefficients: overall %.8f\n%s"
fin_cv (Metrics.printStats " " fin_nl)
| otherwise ->
printf "Cluster score improved from %.8f to %.8f\n"
ini_cv fin_cv ::String
putStr sol_msg
unless (verbose < 1) $
printf "Solution length=%d\n" (length ord_plc)
let cmd_jobs = (if optFirstJobGroup opts then take 1 else id)
$ Cluster.splitJobs cmd_strs
maybeSaveCommands (if optFirstJobGroup opts
then "First set of jobs:"
else "Commands to run to reach the above solution:")
opts
$ Cluster.formatCmds cmd_jobs
maybeSaveData (optSaveCluster opts) "balanced" "after balancing"
ini_cdata { cdNodes = fin_nl, cdInstances = fin_il }
maybePrintInsts showinsts "Final" (Cluster.printInsts fin_nl fin_il)
maybePrintNodes shownodes "Final cluster" (Cluster.printNodes fin_nl)
when (verbose > 3) $ printStats nl fin_nl
exitIfBad "hbal" =<< maybeExecJobs opts ord_plc fin_nl il cmd_jobs
|
ganeti/ganeti
|
src/Ganeti/HTools/Program/Hbal.hs
|
bsd-2-clause
| 14,278 | 0 | 19 | 3,841 | 3,252 | 1,651 | 1,601 | 279 | 4 |
module Settings.Builders.Configure (configureBuilderArgs) where
import Packages
import Rules.Gmp
import Settings.Builders.Common
configureBuilderArgs :: Args
configureBuilderArgs = do
gmpPath <- expr gmpBuildPath
libffiPath <- expr libffiBuildPath
mconcat [ builder (Configure gmpPath) ? do
hostPlatform <- getSetting HostPlatform
buildPlatform <- getSetting BuildPlatform
pure [ "--enable-shared=no"
, "--host=" ++ hostPlatform
, "--build=" ++ buildPlatform ]
, builder (Configure libffiPath) ? do
top <- expr topDirectory
targetPlatform <- getSetting TargetPlatform
pure [ "--prefix=" ++ top -/- libffiPath -/- "inst"
, "--libdir=" ++ top -/- libffiPath -/- "inst/lib"
, "--enable-static=yes"
, "--enable-shared=no" -- TODO: add support for yes
, "--host=" ++ targetPlatform ] ]
|
snowleopard/shaking-up-ghc
|
src/Settings/Builders/Configure.hs
|
bsd-3-clause
| 1,045 | 0 | 16 | 362 | 207 | 104 | 103 | 22 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[InstEnv]{Utilities for typechecking instance declarations}
The bits common to TcInstDcls and TcDeriv.
-}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module InstEnv (
DFunId, InstMatch, ClsInstLookupResult,
OverlapFlag(..), OverlapMode(..), setOverlapModeMaybe,
ClsInst(..), DFunInstType, pprInstance, pprInstanceHdr, pprInstances,
instanceHead, instanceSig, mkLocalInstance, mkImportedInstance,
instanceDFunId, tidyClsInstDFun, instanceRoughTcs,
fuzzyClsInstCmp, orphNamesOfClsInst,
InstEnvs(..), VisibleOrphanModules, InstEnv,
emptyInstEnv, extendInstEnv, deleteFromInstEnv, identicalClsInstHead,
extendInstEnvList, lookupUniqueInstEnv, lookupInstEnv, instEnvElts,
memberInstEnv, instIsVisible,
classInstances, instanceBindFun,
instanceCantMatch, roughMatchTcs,
isOverlappable, isOverlapping, isIncoherent
) where
#include "HsVersions.h"
import GhcPrelude
import TcType -- InstEnv is really part of the type checker,
-- and depends on TcType in many ways
import CoreSyn ( IsOrphan(..), isOrphan, chooseOrphanAnchor )
import Module
import Class
import Var
import VarSet
import Name
import NameSet
import Unify
import Outputable
import ErrUtils
import BasicTypes
import UniqDFM
import Util
import Id
import Data.Data ( Data )
import Data.Maybe ( isJust, isNothing )
{-
************************************************************************
* *
ClsInst: the data type for type-class instances
* *
************************************************************************
-}
-- | A type-class instance. Note that there is some tricky laziness at work
-- here. See Note [ClsInst laziness and the rough-match fields] for more
-- details.
data ClsInst
= ClsInst { -- Used for "rough matching"; see
-- Note [ClsInst laziness and the rough-match fields]
-- INVARIANT: is_tcs = roughMatchTcs is_tys
is_cls_nm :: Name -- ^ Class name
, is_tcs :: [Maybe Name] -- ^ Top of type args
-- | @is_dfun_name = idName . is_dfun@.
--
-- We use 'is_dfun_name' for the visibility check,
-- 'instIsVisible', which needs to know the 'Module' which the
-- dictionary is defined in. However, we cannot use the 'Module'
-- attached to 'is_dfun' since doing so would mean we would
-- potentially pull in an entire interface file unnecessarily.
-- This was the cause of #12367.
, is_dfun_name :: Name
-- Used for "proper matching"; see Note [Proper-match fields]
, is_tvs :: [TyVar] -- Fresh template tyvars for full match
-- See Note [Template tyvars are fresh]
, is_cls :: Class -- The real class
, is_tys :: [Type] -- Full arg types (mentioning is_tvs)
-- INVARIANT: is_dfun Id has type
-- forall is_tvs. (...) => is_cls is_tys
-- (modulo alpha conversion)
, is_dfun :: DFunId -- See Note [Haddock assumptions]
, is_flag :: OverlapFlag -- See detailed comments with
-- the decl of BasicTypes.OverlapFlag
, is_orphan :: IsOrphan
}
deriving Data
-- | A fuzzy comparison function for class instances, intended for sorting
-- instances before displaying them to the user.
fuzzyClsInstCmp :: ClsInst -> ClsInst -> Ordering
fuzzyClsInstCmp x y =
stableNameCmp (is_cls_nm x) (is_cls_nm y) `mappend`
mconcat (map cmp (zip (is_tcs x) (is_tcs y)))
where
cmp (Nothing, Nothing) = EQ
cmp (Nothing, Just _) = LT
cmp (Just _, Nothing) = GT
cmp (Just x, Just y) = stableNameCmp x y
isOverlappable, isOverlapping, isIncoherent :: ClsInst -> Bool
isOverlappable i = hasOverlappableFlag (overlapMode (is_flag i))
isOverlapping i = hasOverlappingFlag (overlapMode (is_flag i))
isIncoherent i = hasIncoherentFlag (overlapMode (is_flag i))
{-
Note [ClsInst laziness and the rough-match fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we load 'instance A.C B.T' from A.hi, but suppose that the type B.T is
otherwise unused in the program. Then it's stupid to load B.hi, the data type
declaration for B.T -- and perhaps further instance declarations!
We avoid this as follows:
* is_cls_nm, is_tcs, is_dfun_name are all Names. We can poke them to our heart's
content.
* Proper-match fields. is_dfun, and its related fields is_tvs, is_cls, is_tys
contain TyVars, Class, Type, Class etc, and so are all lazy thunks. When we
poke any of these fields we'll typecheck the DFunId declaration, and hence
pull in interfaces that it refers to. See Note [Proper-match fields].
* Rough-match fields. During instance lookup, we use the is_cls_nm :: Name and
is_tcs :: [Maybe Name] fields to perform a "rough match", *without* poking
inside the DFunId. The rough-match fields allow us to say "definitely does not
match", based only on Names.
This laziness is very important; see Trac #12367. Try hard to avoid pulling on
the structured fields unless you really need the instance.
* Another place to watch is InstEnv.instIsVisible, which needs the module to
which the ClsInst belongs. We can get this from is_dfun_name.
* In is_tcs,
Nothing means that this type arg is a type variable
(Just n) means that this type arg is a
TyConApp with a type constructor of n.
This is always a real tycon, never a synonym!
(Two different synonyms might match, but two
different real tycons can't.)
NB: newtypes are not transparent, though!
-}
{-
Note [Template tyvars are fresh]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The is_tvs field of a ClsInst has *completely fresh* tyvars.
That is, they are
* distinct from any other ClsInst
* distinct from any tyvars free in predicates that may
be looked up in the class instance environment
Reason for freshness: we use unification when checking for overlap
etc, and that requires the tyvars to be distinct.
The invariant is checked by the ASSERT in lookupInstEnv'.
Note [Proper-match fields]
~~~~~~~~~~~~~~~~~~~~~~~~~
The is_tvs, is_cls, is_tys fields are simply cached values, pulled
out (lazily) from the dfun id. They are cached here simply so
that we don't need to decompose the DFunId each time we want
to match it. The hope is that the rough-match fields mean
that we often never poke the proper-match fields.
However, note that:
* is_tvs must be a superset of the free vars of is_tys
* is_tvs, is_tys may be alpha-renamed compared to the ones in
the dfun Id
Note [Haddock assumptions]
~~~~~~~~~~~~~~~~~~~~~~~~~~
For normal user-written instances, Haddock relies on
* the SrcSpan of
* the Name of
* the is_dfun of
* an Instance
being equal to
* the SrcSpan of
* the instance head type of
* the InstDecl used to construct the Instance.
-}
instanceDFunId :: ClsInst -> DFunId
instanceDFunId = is_dfun
tidyClsInstDFun :: (DFunId -> DFunId) -> ClsInst -> ClsInst
tidyClsInstDFun tidy_dfun ispec
= ispec { is_dfun = tidy_dfun (is_dfun ispec) }
instanceRoughTcs :: ClsInst -> [Maybe Name]
instanceRoughTcs = is_tcs
instance NamedThing ClsInst where
getName ispec = getName (is_dfun ispec)
instance Outputable ClsInst where
ppr = pprInstance
pprInstance :: ClsInst -> SDoc
-- Prints the ClsInst as an instance declaration
pprInstance ispec
= hang (pprInstanceHdr ispec)
2 (vcat [ text "--" <+> pprDefinedAt (getName ispec)
, whenPprDebug (ppr (is_dfun ispec)) ])
-- * pprInstanceHdr is used in VStudio to populate the ClassView tree
pprInstanceHdr :: ClsInst -> SDoc
-- Prints the ClsInst as an instance declaration
pprInstanceHdr (ClsInst { is_flag = flag, is_dfun = dfun })
= text "instance" <+> ppr flag <+> pprSigmaType (idType dfun)
pprInstances :: [ClsInst] -> SDoc
pprInstances ispecs = vcat (map pprInstance ispecs)
instanceHead :: ClsInst -> ([TyVar], Class, [Type])
-- Returns the head, using the fresh tyavs from the ClsInst
instanceHead (ClsInst { is_tvs = tvs, is_tys = tys, is_dfun = dfun })
= (tvs, cls, tys)
where
(_, _, cls, _) = tcSplitDFunTy (idType dfun)
-- | Collects the names of concrete types and type constructors that make
-- up the head of a class instance. For instance, given `class Foo a b`:
--
-- `instance Foo (Either (Maybe Int) a) Bool` would yield
-- [Either, Maybe, Int, Bool]
--
-- Used in the implementation of ":info" in GHCi.
--
-- The 'tcSplitSigmaTy' is because of
-- instance Foo a => Baz T where ...
-- The decl is an orphan if Baz and T are both not locally defined,
-- even if Foo *is* locally defined
orphNamesOfClsInst :: ClsInst -> NameSet
orphNamesOfClsInst (ClsInst { is_cls_nm = cls_nm, is_tys = tys })
= orphNamesOfTypes tys `unionNameSet` unitNameSet cls_nm
instanceSig :: ClsInst -> ([TyVar], [Type], Class, [Type])
-- Decomposes the DFunId
instanceSig ispec = tcSplitDFunTy (idType (is_dfun ispec))
mkLocalInstance :: DFunId -> OverlapFlag
-> [TyVar] -> Class -> [Type]
-> ClsInst
-- Used for local instances, where we can safely pull on the DFunId.
-- Consider using newClsInst instead; this will also warn if
-- the instance is an orphan.
mkLocalInstance dfun oflag tvs cls tys
= ClsInst { is_flag = oflag, is_dfun = dfun
, is_tvs = tvs
, is_dfun_name = dfun_name
, is_cls = cls, is_cls_nm = cls_name
, is_tys = tys, is_tcs = roughMatchTcs tys
, is_orphan = orph
}
where
cls_name = className cls
dfun_name = idName dfun
this_mod = ASSERT( isExternalName dfun_name ) nameModule dfun_name
is_local name = nameIsLocalOrFrom this_mod name
-- Compute orphanhood. See Note [Orphans] in InstEnv
(cls_tvs, fds) = classTvsFds cls
arg_names = [filterNameSet is_local (orphNamesOfType ty) | ty <- tys]
-- See Note [When exactly is an instance decl an orphan?]
orph | is_local cls_name = NotOrphan (nameOccName cls_name)
| all notOrphan mb_ns = ASSERT( not (null mb_ns) ) head mb_ns
| otherwise = IsOrphan
notOrphan NotOrphan{} = True
notOrphan _ = False
mb_ns :: [IsOrphan] -- One for each fundep; a locally-defined name
-- that is not in the "determined" arguments
mb_ns | null fds = [choose_one arg_names]
| otherwise = map do_one fds
do_one (_ltvs, rtvs) = choose_one [ns | (tv,ns) <- cls_tvs `zip` arg_names
, not (tv `elem` rtvs)]
choose_one nss = chooseOrphanAnchor (unionNameSets nss)
mkImportedInstance :: Name -- ^ the name of the class
-> [Maybe Name] -- ^ the types which the class was applied to
-> Name -- ^ the 'Name' of the dictionary binding
-> DFunId -- ^ the 'Id' of the dictionary.
-> OverlapFlag -- ^ may this instance overlap?
-> IsOrphan -- ^ is this instance an orphan?
-> ClsInst
-- Used for imported instances, where we get the rough-match stuff
-- from the interface file
-- The bound tyvars of the dfun are guaranteed fresh, because
-- the dfun has been typechecked out of the same interface file
mkImportedInstance cls_nm mb_tcs dfun_name dfun oflag orphan
= ClsInst { is_flag = oflag, is_dfun = dfun
, is_tvs = tvs, is_tys = tys
, is_dfun_name = dfun_name
, is_cls_nm = cls_nm, is_cls = cls, is_tcs = mb_tcs
, is_orphan = orphan }
where
(tvs, _, cls, tys) = tcSplitDFunTy (idType dfun)
{-
Note [When exactly is an instance decl an orphan?]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(see MkIface.instanceToIfaceInst, which implements this)
Roughly speaking, an instance is an orphan if its head (after the =>)
mentions nothing defined in this module.
Functional dependencies complicate the situation though. Consider
module M where { class C a b | a -> b }
and suppose we are compiling module X:
module X where
import M
data T = ...
instance C Int T where ...
This instance is an orphan, because when compiling a third module Y we
might get a constraint (C Int v), and we'd want to improve v to T. So
we must make sure X's instances are loaded, even if we do not directly
use anything from X.
More precisely, an instance is an orphan iff
If there are no fundeps, then at least of the names in
the instance head is locally defined.
If there are fundeps, then for every fundep, at least one of the
names free in a *non-determined* part of the instance head is
defined in this module.
(Note that these conditions hold trivially if the class is locally
defined.)
************************************************************************
* *
InstEnv, ClsInstEnv
* *
************************************************************************
A @ClsInstEnv@ all the instances of that class. The @Id@ inside a
ClsInstEnv mapping is the dfun for that instance.
If class C maps to a list containing the item ([a,b], [t1,t2,t3], dfun), then
forall a b, C t1 t2 t3 can be constructed by dfun
or, to put it another way, we have
instance (...) => C t1 t2 t3, witnessed by dfun
-}
---------------------------------------------------
{-
Note [InstEnv determinism]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We turn InstEnvs into a list in some places that don't directly affect
the ABI. That happens when we create output for `:info`.
Unfortunately that nondeterminism is nonlocal and it's hard to tell what it
affects without following a chain of functions. It's also easy to accidentally
make that nondeterminism affect the ABI. Furthermore the envs should be
relatively small, so it should be free to use deterministic maps here.
Testing with nofib and validate detected no difference between UniqFM and
UniqDFM. See also Note [Deterministic UniqFM]
-}
type InstEnv = UniqDFM ClsInstEnv -- Maps Class to instances for that class
-- See Note [InstEnv determinism]
-- | 'InstEnvs' represents the combination of the global type class instance
-- environment, the local type class instance environment, and the set of
-- transitively reachable orphan modules (according to what modules have been
-- directly imported) used to test orphan instance visibility.
data InstEnvs = InstEnvs {
ie_global :: InstEnv, -- External-package instances
ie_local :: InstEnv, -- Home-package instances
ie_visible :: VisibleOrphanModules -- Set of all orphan modules transitively
-- reachable from the module being compiled
-- See Note [Instance lookup and orphan instances]
}
-- | Set of visible orphan modules, according to what modules have been directly
-- imported. This is based off of the dep_orphs field, which records
-- transitively reachable orphan modules (modules that define orphan instances).
type VisibleOrphanModules = ModuleSet
newtype ClsInstEnv
= ClsIE [ClsInst] -- The instances for a particular class, in any order
instance Outputable ClsInstEnv where
ppr (ClsIE is) = pprInstances is
-- INVARIANTS:
-- * The is_tvs are distinct in each ClsInst
-- of a ClsInstEnv (so we can safely unify them)
-- Thus, the @ClassInstEnv@ for @Eq@ might contain the following entry:
-- [a] ===> dfun_Eq_List :: forall a. Eq a => Eq [a]
-- The "a" in the pattern must be one of the forall'd variables in
-- the dfun type.
emptyInstEnv :: InstEnv
emptyInstEnv = emptyUDFM
instEnvElts :: InstEnv -> [ClsInst]
instEnvElts ie = [elt | ClsIE elts <- eltsUDFM ie, elt <- elts]
-- See Note [InstEnv determinism]
-- | Test if an instance is visible, by checking that its origin module
-- is in 'VisibleOrphanModules'.
-- See Note [Instance lookup and orphan instances]
instIsVisible :: VisibleOrphanModules -> ClsInst -> Bool
instIsVisible vis_mods ispec
-- NB: Instances from the interactive package always are visible. We can't
-- add interactive modules to the set since we keep creating new ones
-- as a GHCi session progresses.
| isInteractiveModule mod = True
| IsOrphan <- is_orphan ispec = mod `elemModuleSet` vis_mods
| otherwise = True
where
mod = nameModule $ is_dfun_name ispec
classInstances :: InstEnvs -> Class -> [ClsInst]
classInstances (InstEnvs { ie_global = pkg_ie, ie_local = home_ie, ie_visible = vis_mods }) cls
= get home_ie ++ get pkg_ie
where
get env = case lookupUDFM env cls of
Just (ClsIE insts) -> filter (instIsVisible vis_mods) insts
Nothing -> []
-- | Checks for an exact match of ClsInst in the instance environment.
-- We use this when we do signature checking in TcRnDriver
memberInstEnv :: InstEnv -> ClsInst -> Bool
memberInstEnv inst_env ins_item@(ClsInst { is_cls_nm = cls_nm } ) =
maybe False (\(ClsIE items) -> any (identicalDFunType ins_item) items)
(lookupUDFM inst_env cls_nm)
where
identicalDFunType cls1 cls2 =
eqType (varType (is_dfun cls1)) (varType (is_dfun cls2))
extendInstEnvList :: InstEnv -> [ClsInst] -> InstEnv
extendInstEnvList inst_env ispecs = foldl extendInstEnv inst_env ispecs
extendInstEnv :: InstEnv -> ClsInst -> InstEnv
extendInstEnv inst_env ins_item@(ClsInst { is_cls_nm = cls_nm })
= addToUDFM_C add inst_env cls_nm (ClsIE [ins_item])
where
add (ClsIE cur_insts) _ = ClsIE (ins_item : cur_insts)
deleteFromInstEnv :: InstEnv -> ClsInst -> InstEnv
deleteFromInstEnv inst_env ins_item@(ClsInst { is_cls_nm = cls_nm })
= adjustUDFM adjust inst_env cls_nm
where
adjust (ClsIE items) = ClsIE (filterOut (identicalClsInstHead ins_item) items)
identicalClsInstHead :: ClsInst -> ClsInst -> Bool
-- ^ True when when the instance heads are the same
-- e.g. both are Eq [(a,b)]
-- Used for overriding in GHCi
-- Obviously should be insenstive to alpha-renaming
identicalClsInstHead (ClsInst { is_cls_nm = cls_nm1, is_tcs = rough1, is_tys = tys1 })
(ClsInst { is_cls_nm = cls_nm2, is_tcs = rough2, is_tys = tys2 })
= cls_nm1 == cls_nm2
&& not (instanceCantMatch rough1 rough2) -- Fast check for no match, uses the "rough match" fields
&& isJust (tcMatchTys tys1 tys2)
&& isJust (tcMatchTys tys2 tys1)
{-
************************************************************************
* *
Looking up an instance
* *
************************************************************************
@lookupInstEnv@ looks up in a @InstEnv@, using a one-way match. Since
the env is kept ordered, the first match must be the only one. The
thing we are looking up can have an arbitrary "flexi" part.
Note [Instance lookup and orphan instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we are compiling a module M, and we have a zillion packages
loaded, and we are looking up an instance for C (T W). If we find a
match in module 'X' from package 'p', should be "in scope"; that is,
is p:X in the transitive closure of modules imported from M?
The difficulty is that the "zillion packages" might include ones loaded
through earlier invocations of the GHC API, or earlier module loads in GHCi.
They might not be in the dependencies of M itself; and if not, the instances
in them should not be visible. Trac #2182, #8427.
There are two cases:
* If the instance is *not an orphan*, then module X defines C, T, or W.
And in order for those types to be involved in typechecking M, it
must be that X is in the transitive closure of M's imports. So we
can use the instance.
* If the instance *is an orphan*, the above reasoning does not apply.
So we keep track of the set of orphan modules transitively below M;
this is the ie_visible field of InstEnvs, of type VisibleOrphanModules.
If module p:X is in this set, then we can use the instance, otherwise
we can't.
Note [Rules for instance lookup]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These functions implement the carefully-written rules in the user
manual section on "overlapping instances". At risk of duplication,
here are the rules. If the rules change, change this text and the
user manual simultaneously. The link may be this:
http://www.haskell.org/ghc/docs/latest/html/users_guide/glasgow_exts.html#instance-overlap
The willingness to be overlapped or incoherent is a property of the
instance declaration itself, controlled as follows:
* An instance is "incoherent"
if it has an INCOHERENT pragma, or
if it appears in a module compiled with -XIncoherentInstances.
* An instance is "overlappable"
if it has an OVERLAPPABLE or OVERLAPS pragma, or
if it appears in a module compiled with -XOverlappingInstances, or
if the instance is incoherent.
* An instance is "overlapping"
if it has an OVERLAPPING or OVERLAPS pragma, or
if it appears in a module compiled with -XOverlappingInstances, or
if the instance is incoherent.
compiled with -XOverlappingInstances.
Now suppose that, in some client module, we are searching for an instance
of the target constraint (C ty1 .. tyn). The search works like this.
* Find all instances I that match the target constraint; that is, the
target constraint is a substitution instance of I. These instance
declarations are the candidates.
* Find all non-candidate instances that unify with the target
constraint. Such non-candidates instances might match when the
target constraint is further instantiated. If all of them are
incoherent, proceed; if not, the search fails.
* Eliminate any candidate IX for which both of the following hold:
* There is another candidate IY that is strictly more specific;
that is, IY is a substitution instance of IX but not vice versa.
* Either IX is overlappable or IY is overlapping.
* If only one candidate remains, pick it. Otherwise if all remaining
candidates are incoherent, pick an arbitrary candidate. Otherwise fail.
Note [Overlapping instances] (NB: these notes are quite old)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Overlap is permitted, but only in such a way that one can make
a unique choice when looking up. That is, overlap is only permitted if
one template matches the other, or vice versa. So this is ok:
[a] [Int]
but this is not
(Int,a) (b,Int)
If overlap is permitted, the list is kept most specific first, so that
the first lookup is the right choice.
For now we just use association lists.
\subsection{Avoiding a problem with overlapping}
Consider this little program:
\begin{pseudocode}
class C a where c :: a
class C a => D a where d :: a
instance C Int where c = 17
instance D Int where d = 13
instance C a => C [a] where c = [c]
instance ({- C [a], -} D a) => D [a] where d = c
instance C [Int] where c = [37]
main = print (d :: [Int])
\end{pseudocode}
What do you think `main' prints (assuming we have overlapping instances, and
all that turned on)? Well, the instance for `D' at type `[a]' is defined to
be `c' at the same type, and we've got an instance of `C' at `[Int]', so the
answer is `[37]', right? (the generic `C [a]' instance shouldn't apply because
the `C [Int]' instance is more specific).
Ghc-4.04 gives `[37]', while ghc-4.06 gives `[17]', so 4.06 is wrong. That
was easy ;-) Let's just consult hugs for good measure. Wait - if I use old
hugs (pre-September99), I get `[17]', and stranger yet, if I use hugs98, it
doesn't even compile! What's going on!?
What hugs complains about is the `D [a]' instance decl.
\begin{pseudocode}
ERROR "mj.hs" (line 10): Cannot build superclass instance
*** Instance : D [a]
*** Context supplied : D a
*** Required superclass : C [a]
\end{pseudocode}
You might wonder what hugs is complaining about. It's saying that you
need to add `C [a]' to the context of the `D [a]' instance (as appears
in comments). But there's that `C [a]' instance decl one line above
that says that I can reduce the need for a `C [a]' instance to the
need for a `C a' instance, and in this case, I already have the
necessary `C a' instance (since we have `D a' explicitly in the
context, and `C' is a superclass of `D').
Unfortunately, the above reasoning indicates a premature commitment to the
generic `C [a]' instance. I.e., it prematurely rules out the more specific
instance `C [Int]'. This is the mistake that ghc-4.06 makes. The fix is to
add the context that hugs suggests (uncomment the `C [a]'), effectively
deferring the decision about which instance to use.
Now, interestingly enough, 4.04 has this same bug, but it's covered up
in this case by a little known `optimization' that was disabled in
4.06. Ghc-4.04 silently inserts any missing superclass context into
an instance declaration. In this case, it silently inserts the `C
[a]', and everything happens to work out.
(See `basicTypes/MkId:mkDictFunId' for the code in question. Search for
`Mark Jones', although Mark claims no credit for the `optimization' in
question, and would rather it stopped being called the `Mark Jones
optimization' ;-)
So, what's the fix? I think hugs has it right. Here's why. Let's try
something else out with ghc-4.04. Let's add the following line:
d' :: D a => [a]
d' = c
Everyone raise their hand who thinks that `d :: [Int]' should give a
different answer from `d' :: [Int]'. Well, in ghc-4.04, it does. The
`optimization' only applies to instance decls, not to regular
bindings, giving inconsistent behavior.
Old hugs had this same bug. Here's how we fixed it: like GHC, the
list of instances for a given class is ordered, so that more specific
instances come before more generic ones. For example, the instance
list for C might contain:
..., C Int, ..., C a, ...
When we go to look for a `C Int' instance we'll get that one first.
But what if we go looking for a `C b' (`b' is unconstrained)? We'll
pass the `C Int' instance, and keep going. But if `b' is
unconstrained, then we don't know yet if the more specific instance
will eventually apply. GHC keeps going, and matches on the generic `C
a'. The fix is to, at each step, check to see if there's a reverse
match, and if so, abort the search. This prevents hugs from
prematurely chosing a generic instance when a more specific one
exists.
--Jeff
v
BUT NOTE [Nov 2001]: we must actually *unify* not reverse-match in
this test. Suppose the instance envt had
..., forall a b. C a a b, ..., forall a b c. C a b c, ...
(still most specific first)
Now suppose we are looking for (C x y Int), where x and y are unconstrained.
C x y Int doesn't match the template {a,b} C a a b
but neither does
C a a b match the template {x,y} C x y Int
But still x and y might subsequently be unified so they *do* match.
Simple story: unify, don't match.
-}
type DFunInstType = Maybe Type
-- Just ty => Instantiate with this type
-- Nothing => Instantiate with any type of this tyvar's kind
-- See Note [DFunInstType: instantiating types]
type InstMatch = (ClsInst, [DFunInstType])
type ClsInstLookupResult
= ( [InstMatch] -- Successful matches
, [ClsInst] -- These don't match but do unify
, [InstMatch] ) -- Unsafe overlapped instances under Safe Haskell
-- (see Note [Safe Haskell Overlapping Instances] in
-- TcSimplify).
{-
Note [DFunInstType: instantiating types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A successful match is a ClsInst, together with the types at which
the dfun_id in the ClsInst should be instantiated
The instantiating types are (Either TyVar Type)s because the dfun
might have some tyvars that *only* appear in arguments
dfun :: forall a b. C a b, Ord b => D [a]
When we match this against D [ty], we return the instantiating types
[Just ty, Nothing]
where the 'Nothing' indicates that 'b' can be freely instantiated.
(The caller instantiates it to a flexi type variable, which will
presumably later become fixed via functional dependencies.)
-}
-- |Look up an instance in the given instance environment. The given class application must match exactly
-- one instance and the match may not contain any flexi type variables. If the lookup is unsuccessful,
-- yield 'Left errorMessage'.
lookupUniqueInstEnv :: InstEnvs
-> Class -> [Type]
-> Either MsgDoc (ClsInst, [Type])
lookupUniqueInstEnv instEnv cls tys
= case lookupInstEnv False instEnv cls tys of
([(inst, inst_tys)], _, _)
| noFlexiVar -> Right (inst, inst_tys')
| otherwise -> Left $ text "flexible type variable:" <+>
(ppr $ mkTyConApp (classTyCon cls) tys)
where
inst_tys' = [ty | Just ty <- inst_tys]
noFlexiVar = all isJust inst_tys
_other -> Left $ text "instance not found" <+>
(ppr $ mkTyConApp (classTyCon cls) tys)
lookupInstEnv' :: InstEnv -- InstEnv to look in
-> VisibleOrphanModules -- But filter against this
-> Class -> [Type] -- What we are looking for
-> ([InstMatch], -- Successful matches
[ClsInst]) -- These don't match but do unify
-- The second component of the result pair happens when we look up
-- Foo [a]
-- in an InstEnv that has entries for
-- Foo [Int]
-- Foo [b]
-- Then which we choose would depend on the way in which 'a'
-- is instantiated. So we report that Foo [b] is a match (mapping b->a)
-- but Foo [Int] is a unifier. This gives the caller a better chance of
-- giving a suitable error message
lookupInstEnv' ie vis_mods cls tys
= lookup ie
where
rough_tcs = roughMatchTcs tys
all_tvs = all isNothing rough_tcs
--------------
lookup env = case lookupUDFM env cls of
Nothing -> ([],[]) -- No instances for this class
Just (ClsIE insts) -> find [] [] insts
--------------
find ms us [] = (ms, us)
find ms us (item@(ClsInst { is_tcs = mb_tcs, is_tvs = tpl_tvs
, is_tys = tpl_tys }) : rest)
| not (instIsVisible vis_mods item)
= find ms us rest -- See Note [Instance lookup and orphan instances]
-- Fast check for no match, uses the "rough match" fields
| instanceCantMatch rough_tcs mb_tcs
= find ms us rest
| Just subst <- tcMatchTys tpl_tys tys
= find ((item, map (lookupTyVar subst) tpl_tvs) : ms) us rest
-- Does not match, so next check whether the things unify
-- See Note [Overlapping instances] and Note [Incoherent instances]
| isIncoherent item
= find ms us rest
| otherwise
= ASSERT2( tyCoVarsOfTypes tys `disjointVarSet` tpl_tv_set,
(ppr cls <+> ppr tys <+> ppr all_tvs) $$
(ppr tpl_tvs <+> ppr tpl_tys)
)
-- Unification will break badly if the variables overlap
-- They shouldn't because we allocate separate uniques for them
-- See Note [Template tyvars are fresh]
case tcUnifyTys instanceBindFun tpl_tys tys of
Just _ -> find ms (item:us) rest
Nothing -> find ms us rest
where
tpl_tv_set = mkVarSet tpl_tvs
---------------
-- This is the common way to call this function.
lookupInstEnv :: Bool -- Check Safe Haskell overlap restrictions
-> InstEnvs -- External and home package inst-env
-> Class -> [Type] -- What we are looking for
-> ClsInstLookupResult
-- ^ See Note [Rules for instance lookup]
-- ^ See Note [Safe Haskell Overlapping Instances] in TcSimplify
-- ^ See Note [Safe Haskell Overlapping Instances Implementation] in TcSimplify
lookupInstEnv check_overlap_safe
(InstEnvs { ie_global = pkg_ie
, ie_local = home_ie
, ie_visible = vis_mods })
cls
tys
= -- pprTrace "lookupInstEnv" (ppr cls <+> ppr tys $$ ppr home_ie) $
(final_matches, final_unifs, unsafe_overlapped)
where
(home_matches, home_unifs) = lookupInstEnv' home_ie vis_mods cls tys
(pkg_matches, pkg_unifs) = lookupInstEnv' pkg_ie vis_mods cls tys
all_matches = home_matches ++ pkg_matches
all_unifs = home_unifs ++ pkg_unifs
final_matches = foldr insert_overlapping [] all_matches
-- Even if the unifs is non-empty (an error situation)
-- we still prune the matches, so that the error message isn't
-- misleading (complaining of multiple matches when some should be
-- overlapped away)
unsafe_overlapped
= case final_matches of
[match] -> check_safe match
_ -> []
-- If the selected match is incoherent, discard all unifiers
final_unifs = case final_matches of
(m:_) | isIncoherent (fst m) -> []
_ -> all_unifs
-- NOTE [Safe Haskell isSafeOverlap]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- We restrict code compiled in 'Safe' mode from overriding code
-- compiled in any other mode. The rationale is that code compiled
-- in 'Safe' mode is code that is untrusted by the ghc user. So
-- we shouldn't let that code change the behaviour of code the
-- user didn't compile in 'Safe' mode since that's the code they
-- trust. So 'Safe' instances can only overlap instances from the
-- same module. A same instance origin policy for safe compiled
-- instances.
check_safe (inst,_)
= case check_overlap_safe && unsafeTopInstance inst of
-- make sure it only overlaps instances from the same module
True -> go [] all_matches
-- most specific is from a trusted location.
False -> []
where
go bad [] = bad
go bad (i@(x,_):unchecked) =
if inSameMod x || isOverlappable x
then go bad unchecked
else go (i:bad) unchecked
inSameMod b =
let na = getName $ getName inst
la = isInternalName na
nb = getName $ getName b
lb = isInternalName nb
in (la && lb) || (nameModule na == nameModule nb)
-- We consider the most specific instance unsafe when it both:
-- (1) Comes from a module compiled as `Safe`
-- (2) Is an orphan instance, OR, an instance for a MPTC
unsafeTopInstance inst = isSafeOverlap (is_flag inst) &&
(isOrphan (is_orphan inst) || classArity (is_cls inst) > 1)
---------------
insert_overlapping :: InstMatch -> [InstMatch] -> [InstMatch]
-- ^ Add a new solution, knocking out strictly less specific ones
-- See Note [Rules for instance lookup]
insert_overlapping new_item [] = [new_item]
insert_overlapping new_item@(new_inst,_) (old_item@(old_inst,_) : old_items)
| new_beats_old -- New strictly overrides old
, not old_beats_new
, new_inst `can_override` old_inst
= insert_overlapping new_item old_items
| old_beats_new -- Old strictly overrides new
, not new_beats_old
, old_inst `can_override` new_inst
= old_item : old_items
-- Discard incoherent instances; see Note [Incoherent instances]
| isIncoherent old_inst -- Old is incoherent; discard it
= insert_overlapping new_item old_items
| isIncoherent new_inst -- New is incoherent; discard it
= old_item : old_items
-- Equal or incomparable, and neither is incoherent; keep both
| otherwise
= old_item : insert_overlapping new_item old_items
where
new_beats_old = new_inst `more_specific_than` old_inst
old_beats_new = old_inst `more_specific_than` new_inst
-- `instB` can be instantiated to match `instA`
-- or the two are equal
instA `more_specific_than` instB
= isJust (tcMatchTys (is_tys instB) (is_tys instA))
instA `can_override` instB
= isOverlapping instA || isOverlappable instB
-- Overlap permitted if either the more specific instance
-- is marked as overlapping, or the more general one is
-- marked as overlappable.
-- Latest change described in: Trac #9242.
-- Previous change: Trac #3877, Dec 10.
{-
Note [Incoherent instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
For some classes, the choice of a particular instance does not matter, any one
is good. E.g. consider
class D a b where { opD :: a -> b -> String }
instance D Int b where ...
instance D a Int where ...
g (x::Int) = opD x x -- Wanted: D Int Int
For such classes this should work (without having to add an "instance D Int
Int", and using -XOverlappingInstances, which would then work). This is what
-XIncoherentInstances is for: Telling GHC "I don't care which instance you use;
if you can use one, use it."
Should this logic only work when *all* candidates have the incoherent flag, or
even when all but one have it? The right choice is the latter, which can be
justified by comparing the behaviour with how -XIncoherentInstances worked when
it was only about the unify-check (note [Overlapping instances]):
Example:
class C a b c where foo :: (a,b,c)
instance C [a] b Int
instance [incoherent] [Int] b c
instance [incoherent] C a Int c
Thanks to the incoherent flags,
[Wanted] C [a] b Int
works: Only instance one matches, the others just unify, but are marked
incoherent.
So I can write
(foo :: ([a],b,Int)) :: ([Int], Int, Int).
but if that works then I really want to be able to write
foo :: ([Int], Int, Int)
as well. Now all three instances from above match. None is more specific than
another, so none is ruled out by the normal overlapping rules. One of them is
not incoherent, but we still want this to compile. Hence the
"all-but-one-logic".
The implementation is in insert_overlapping, where we remove matching
incoherent instances as long as there are others.
************************************************************************
* *
Binding decisions
* *
************************************************************************
-}
instanceBindFun :: TyCoVar -> BindFlag
instanceBindFun tv | isOverlappableTyVar tv = Skolem
| otherwise = BindMe
-- Note [Binding when looking up instances]
{-
Note [Binding when looking up instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When looking up in the instance environment, or family-instance environment,
we are careful about multiple matches, as described above in
Note [Overlapping instances]
The key_tys can contain skolem constants, and we can guarantee that those
are never going to be instantiated to anything, so we should not involve
them in the unification test. Example:
class Foo a where { op :: a -> Int }
instance Foo a => Foo [a] -- NB overlap
instance Foo [Int] -- NB overlap
data T = forall a. Foo a => MkT a
f :: T -> Int
f (MkT x) = op [x,x]
The op [x,x] means we need (Foo [a]). Without the filterVarSet we'd
complain, saying that the choice of instance depended on the instantiation
of 'a'; but of course it isn't *going* to be instantiated.
We do this only for isOverlappableTyVar skolems. For example we reject
g :: forall a => [a] -> Int
g x = op x
on the grounds that the correct instance depends on the instantiation of 'a'
-}
|
ezyang/ghc
|
compiler/types/InstEnv.hs
|
bsd-3-clause
| 40,701 | 0 | 15 | 10,373 | 3,955 | 2,202 | 1,753 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GADTSyntax #-}
module T17379a where
import Language.Haskell.TH
$(let typ = mkName "T" in pure [ DataD [] typ [] Nothing [GadtC [] [] (ConT typ)] [] ])
|
sdiehl/ghc
|
testsuite/tests/th/T17379a.hs
|
bsd-3-clause
| 200 | 0 | 15 | 36 | 78 | 40 | 38 | 5 | 0 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Command
-- Copyright : Duncan Coutts 2007
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This is to do with command line handling. The Cabal command line is
-- organised into a number of named sub-commands (much like darcs). The
-- 'CommandUI' abstraction represents one of these sub-commands, with a name,
-- description, a set of flags. Commands can be associated with actions and
-- run. It handles some common stuff automatically, like the @--help@ and
-- command line completion flags. It is designed to allow other tools make
-- derived commands. This feature is used heavily in @cabal-install@.
module Distribution.Simple.Command (
-- * Command interface
CommandUI(..),
commandShowOptions,
CommandParse(..),
commandParseArgs,
getNormalCommandDescriptions,
helpCommandUI,
-- ** Constructing commands
ShowOrParseArgs(..),
usageDefault,
usageAlternatives,
mkCommandUI,
hiddenCommand,
-- ** Associating actions with commands
Command,
commandAddAction,
noExtraFlags,
-- ** Running commands
commandsRun,
-- * Option Fields
OptionField(..), Name,
-- ** Constructing Option Fields
option, multiOption,
-- ** Liftings & Projections
liftOption, viewAsFieldDescr,
-- * Option Descriptions
OptDescr(..), Description, SFlags, LFlags, OptFlags, ArgPlaceHolder,
-- ** OptDescr 'smart' constructors
MkOptDescr,
reqArg, reqArg', optArg, optArg', noArg,
boolOpt, boolOpt', choiceOpt, choiceOptFromEnum
) where
import Control.Monad
import Data.Char (isAlpha, toLower)
import Data.List (sortBy)
import Data.Maybe
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid
#endif
import qualified Distribution.GetOpt as GetOpt
import Distribution.Text
( Text(disp, parse) )
import Distribution.ParseUtils
import Distribution.ReadE
import Distribution.Simple.Utils (die, intercalate)
import Text.PrettyPrint ( punctuate, cat, comma, text )
import Text.PrettyPrint as PP ( empty )
data CommandUI flags = CommandUI {
-- | The name of the command as it would be entered on the command line.
-- For example @\"build\"@.
commandName :: String,
-- | A short, one line description of the command to use in help texts.
commandSynopsis :: String,
-- | A function that maps a program name to a usage summary for this
-- command.
commandUsage :: String -> String,
-- | Additional explanation of the command to use in help texts.
commandDescription :: Maybe (String -> String),
-- | Post-Usage notes and examples in help texts
commandNotes :: Maybe (String -> String),
-- | Initial \/ empty flags
commandDefaultFlags :: flags,
-- | All the Option fields for this command
commandOptions :: ShowOrParseArgs -> [OptionField flags]
}
data ShowOrParseArgs = ShowArgs | ParseArgs
type Name = String
type Description = String
-- | We usually have a data type for storing configuration values, where
-- every field stores a configuration option, and the user sets
-- the value either via command line flags or a configuration file.
-- An individual OptionField models such a field, and we usually
-- build a list of options associated to a configuration data type.
data OptionField a = OptionField {
optionName :: Name,
optionDescr :: [OptDescr a] }
-- | An OptionField takes one or more OptDescrs, describing the command line
-- interface for the field.
data OptDescr a = ReqArg Description OptFlags ArgPlaceHolder
(ReadE (a->a)) (a -> [String])
| OptArg Description OptFlags ArgPlaceHolder
(ReadE (a->a)) (a->a) (a -> [Maybe String])
| ChoiceOpt [(Description, OptFlags, a->a, a -> Bool)]
| BoolOpt Description OptFlags{-True-} OptFlags{-False-}
(Bool -> a -> a) (a-> Maybe Bool)
-- | Short command line option strings
type SFlags = [Char]
-- | Long command line option strings
type LFlags = [String]
type OptFlags = (SFlags,LFlags)
type ArgPlaceHolder = String
-- | Create an option taking a single OptDescr.
-- No explicit Name is given for the Option, the name is the first LFlag given.
option :: SFlags -> LFlags -> Description -> get -> set -> MkOptDescr get set a
-> OptionField a
option sf lf@(n:_) d get set arg = OptionField n [arg sf lf d get set]
option _ _ _ _ _ _ = error $ "Distribution.command.option: "
++ "An OptionField must have at least one LFlag"
-- | Create an option taking several OptDescrs.
-- You will have to give the flags and description individually to the
-- OptDescr constructor.
multiOption :: Name -> get -> set
-> [get -> set -> OptDescr a] -- ^MkOptDescr constructors partially
-- applied to flags and description.
-> OptionField a
multiOption n get set args = OptionField n [arg get set | arg <- args]
type MkOptDescr get set a = SFlags -> LFlags -> Description -> get -> set
-> OptDescr a
-- | Create a string-valued command line interface.
reqArg :: Monoid b => ArgPlaceHolder -> ReadE b -> (b -> [String])
-> MkOptDescr (a -> b) (b -> a -> a) a
reqArg ad mkflag showflag sf lf d get set =
ReqArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag)
(showflag . get)
-- | Create a string-valued command line interface with a default value.
optArg :: Monoid b => ArgPlaceHolder -> ReadE b -> b -> (b -> [Maybe String])
-> MkOptDescr (a -> b) (b -> a -> a) a
optArg ad mkflag def showflag sf lf d get set =
OptArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag)
(\b -> set (get b `mappend` def) b)
(showflag . get)
-- | (String -> a) variant of "reqArg"
reqArg' :: Monoid b => ArgPlaceHolder -> (String -> b) -> (b -> [String])
-> MkOptDescr (a -> b) (b -> a -> a) a
reqArg' ad mkflag showflag =
reqArg ad (succeedReadE mkflag) showflag
-- | (String -> a) variant of "optArg"
optArg' :: Monoid b => ArgPlaceHolder -> (Maybe String -> b)
-> (b -> [Maybe String])
-> MkOptDescr (a -> b) (b -> a -> a) a
optArg' ad mkflag showflag =
optArg ad (succeedReadE (mkflag . Just)) def showflag
where def = mkflag Nothing
noArg :: (Eq b) => b -> MkOptDescr (a -> b) (b -> a -> a) a
noArg flag sf lf d = choiceOpt [(flag, (sf,lf), d)] sf lf d
boolOpt :: (b -> Maybe Bool) -> (Bool -> b) -> SFlags -> SFlags
-> MkOptDescr (a -> b) (b -> a -> a) a
boolOpt g s sfT sfF _sf _lf@(n:_) d get set =
BoolOpt d (sfT, ["enable-"++n]) (sfF, ["disable-"++n]) (set.s) (g.get)
boolOpt _ _ _ _ _ _ _ _ _ = error
"Distribution.Simple.Setup.boolOpt: unreachable"
boolOpt' :: (b -> Maybe Bool) -> (Bool -> b) -> OptFlags -> OptFlags
-> MkOptDescr (a -> b) (b -> a -> a) a
boolOpt' g s ffT ffF _sf _lf d get set = BoolOpt d ffT ffF (set.s) (g . get)
-- | create a Choice option
choiceOpt :: Eq b => [(b,OptFlags,Description)]
-> MkOptDescr (a -> b) (b -> a -> a) a
choiceOpt aa_ff _sf _lf _d get set = ChoiceOpt alts
where alts = [(d,flags, set alt, (==alt) . get) | (alt,flags,d) <- aa_ff]
-- | create a Choice option out of an enumeration type.
-- As long flags, the Show output is used. As short flags, the first character
-- which does not conflict with a previous one is used.
choiceOptFromEnum :: (Bounded b, Enum b, Show b, Eq b) =>
MkOptDescr (a -> b) (b -> a -> a) a
choiceOptFromEnum _sf _lf d get =
choiceOpt [ (x, (sf, [map toLower $ show x]), d')
| (x, sf) <- sflags'
, let d' = d ++ show x]
_sf _lf d get
where sflags' = foldl f [] [firstOne..]
f prev x = let prevflags = concatMap snd prev in
prev ++ take 1 [(x, [toLower sf])
| sf <- show x, isAlpha sf
, toLower sf `notElem` prevflags]
firstOne = minBound `asTypeOf` get undefined
commandGetOpts :: ShowOrParseArgs -> CommandUI flags
-> [GetOpt.OptDescr (flags -> flags)]
commandGetOpts showOrParse command =
concatMap viewAsGetOpt (commandOptions command showOrParse)
viewAsGetOpt :: OptionField a -> [GetOpt.OptDescr (a->a)]
viewAsGetOpt (OptionField _n aa) = concatMap optDescrToGetOpt aa
where
optDescrToGetOpt (ReqArg d (cs,ss) arg_desc set _) =
[GetOpt.Option cs ss (GetOpt.ReqArg set' arg_desc) d]
where set' = readEOrFail set
optDescrToGetOpt (OptArg d (cs,ss) arg_desc set def _) =
[GetOpt.Option cs ss (GetOpt.OptArg set' arg_desc) d]
where set' Nothing = def
set' (Just txt) = readEOrFail set txt
optDescrToGetOpt (ChoiceOpt alts) =
[GetOpt.Option sf lf (GetOpt.NoArg set) d | (d,(sf,lf),set,_) <- alts ]
optDescrToGetOpt (BoolOpt d (sfT, lfT) ([], []) set _) =
[ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) d ]
optDescrToGetOpt (BoolOpt d ([], []) (sfF, lfF) set _) =
[ GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) d ]
optDescrToGetOpt (BoolOpt d (sfT,lfT) (sfF, lfF) set _) =
[ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) ("Enable " ++ d)
, GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) ("Disable " ++ d) ]
-- | to view as a FieldDescr, we sort the list of interfaces (Req > Bool >
-- Choice > Opt) and consider only the first one.
viewAsFieldDescr :: OptionField a -> FieldDescr a
viewAsFieldDescr (OptionField _n []) =
error "Distribution.command.viewAsFieldDescr: unexpected"
viewAsFieldDescr (OptionField n dd) = FieldDescr n get set
where
optDescr = head $ sortBy cmp dd
cmp :: OptDescr a -> OptDescr a -> Ordering
ReqArg{} `cmp` ReqArg{} = EQ
ReqArg{} `cmp` _ = GT
BoolOpt{} `cmp` ReqArg{} = LT
BoolOpt{} `cmp` BoolOpt{} = EQ
BoolOpt{} `cmp` _ = GT
ChoiceOpt{} `cmp` ReqArg{} = LT
ChoiceOpt{} `cmp` BoolOpt{} = LT
ChoiceOpt{} `cmp` ChoiceOpt{} = EQ
ChoiceOpt{} `cmp` _ = GT
OptArg{} `cmp` OptArg{} = EQ
OptArg{} `cmp` _ = LT
-- get :: a -> Doc
get t = case optDescr of
ReqArg _ _ _ _ ppr ->
(cat . punctuate comma . map text . ppr) t
OptArg _ _ _ _ _ ppr ->
case ppr t of [] -> PP.empty
(Nothing : _) -> text "True"
(Just a : _) -> text a
ChoiceOpt alts ->
fromMaybe PP.empty $ listToMaybe
[ text lf | (_,(_,lf:_), _,enabled) <- alts, enabled t]
BoolOpt _ _ _ _ enabled -> (maybe PP.empty disp . enabled) t
-- set :: LineNo -> String -> a -> ParseResult a
set line val a =
case optDescr of
ReqArg _ _ _ readE _ -> ($ a) `liftM` runE line n readE val
-- We parse for a single value instead of a
-- list, as one can't really implement
-- parseList :: ReadE a -> ReadE [a] with
-- the current ReadE definition
ChoiceOpt{} ->
case getChoiceByLongFlag optDescr val of
Just f -> return (f a)
_ -> syntaxError line val
BoolOpt _ _ _ setV _ -> (`setV` a) `liftM` runP line n parse val
OptArg _ _ _ readE _ _ -> ($ a) `liftM` runE line n readE val
-- Optional arguments are parsed just like
-- required arguments here; we don't
-- provide a method to set an OptArg field
-- to the default value.
getChoiceByLongFlag :: OptDescr b -> String -> Maybe (b->b)
getChoiceByLongFlag (ChoiceOpt alts) val = listToMaybe
[ set | (_,(_sf,lf:_), set, _) <- alts
, lf == val]
getChoiceByLongFlag _ _ =
error "Distribution.command.getChoiceByLongFlag: expected a choice option"
getCurrentChoice :: OptDescr a -> a -> [String]
getCurrentChoice (ChoiceOpt alts) a =
[ lf | (_,(_sf,lf:_), _, currentChoice) <- alts, currentChoice a]
getCurrentChoice _ _ = error "Command.getChoice: expected a Choice OptDescr"
liftOption :: (b -> a) -> (a -> (b -> b)) -> OptionField a -> OptionField b
liftOption get' set' opt =
opt { optionDescr = liftOptDescr get' set' `map` optionDescr opt}
liftOptDescr :: (b -> a) -> (a -> (b -> b)) -> OptDescr a -> OptDescr b
liftOptDescr get' set' (ChoiceOpt opts) =
ChoiceOpt [ (d, ff, liftSet get' set' set , (get . get'))
| (d, ff, set, get) <- opts]
liftOptDescr get' set' (OptArg d ff ad set def get) =
OptArg d ff ad (liftSet get' set' `fmap` set)
(liftSet get' set' def) (get . get')
liftOptDescr get' set' (ReqArg d ff ad set get) =
ReqArg d ff ad (liftSet get' set' `fmap` set) (get . get')
liftOptDescr get' set' (BoolOpt d ffT ffF set get) =
BoolOpt d ffT ffF (liftSet get' set' . set) (get . get')
liftSet :: (b -> a) -> (a -> (b -> b)) -> (a -> a) -> b -> b
liftSet get' set' set x = set' (set $ get' x) x
-- | Show flags in the standard long option command line format
commandShowOptions :: CommandUI flags -> flags -> [String]
commandShowOptions command v = concat
[ showOptDescr v od | o <- commandOptions command ParseArgs
, od <- optionDescr o]
where
maybePrefix [] = []
maybePrefix (lOpt:_) = ["--" ++ lOpt]
showOptDescr :: a -> OptDescr a -> [String]
showOptDescr x (BoolOpt _ (_,lfTs) (_,lfFs) _ enabled)
= case enabled x of
Nothing -> []
Just True -> maybePrefix lfTs
Just False -> maybePrefix lfFs
showOptDescr x c@ChoiceOpt{}
= ["--" ++ val | val <- getCurrentChoice c x]
showOptDescr x (ReqArg _ (_ssff,lf:_) _ _ showflag)
= [ "--"++lf++"="++flag
| flag <- showflag x ]
showOptDescr x (OptArg _ (_ssff,lf:_) _ _ _ showflag)
= [ case flag of
Just s -> "--"++lf++"="++s
Nothing -> "--"++lf
| flag <- showflag x ]
showOptDescr _ _
= error "Distribution.Simple.Command.showOptDescr: unreachable"
commandListOptions :: CommandUI flags -> [String]
commandListOptions command =
concatMap listOption $
addCommonFlags ShowArgs $ -- This is a slight hack, we don't want
-- "--list-options" showing up in the
-- list options output, so use ShowArgs
commandGetOpts ShowArgs command
where
listOption (GetOpt.Option shortNames longNames _ _) =
[ "-" ++ [name] | name <- shortNames ]
++ [ "--" ++ name | name <- longNames ]
-- | The help text for this command with descriptions of all the options.
commandHelp :: CommandUI flags -> String -> String
commandHelp command pname =
commandSynopsis command
++ "\n\n"
++ commandUsage command pname
++ ( case commandDescription command of
Nothing -> ""
Just desc -> '\n': desc pname)
++ "\n"
++ ( if cname == ""
then "Global flags:"
else "Flags for " ++ cname ++ ":" )
++ ( GetOpt.usageInfo ""
. addCommonFlags ShowArgs
$ commandGetOpts ShowArgs command )
++ ( case commandNotes command of
Nothing -> ""
Just notes -> '\n': notes pname)
where cname = commandName command
-- | Default "usage" documentation text for commands.
usageDefault :: String -> String -> String
usageDefault name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n\n"
++ "Flags for " ++ name ++ ":"
-- | Create "usage" documentation from a list of parameter
-- configurations.
usageAlternatives :: String -> [String] -> String -> String
usageAlternatives name strs pname = unlines
[ start ++ pname ++ " " ++ name ++ " " ++ s
| let starts = "Usage: " : repeat " or: "
, (start, s) <- zip starts strs
]
-- | Make a Command from standard 'GetOpt' options.
mkCommandUI :: String -- ^ name
-> String -- ^ synopsis
-> [String] -- ^ usage alternatives
-> flags -- ^ initial\/empty flags
-> (ShowOrParseArgs -> [OptionField flags]) -- ^ options
-> CommandUI flags
mkCommandUI name synopsis usages flags options = CommandUI
{ commandName = name
, commandSynopsis = synopsis
, commandDescription = Nothing
, commandNotes = Nothing
, commandUsage = usageAlternatives name usages
, commandDefaultFlags = flags
, commandOptions = options
}
-- | Common flags that apply to every command
data CommonFlag = HelpFlag | ListOptionsFlag
commonFlags :: ShowOrParseArgs -> [GetOpt.OptDescr CommonFlag]
commonFlags showOrParseArgs = case showOrParseArgs of
ShowArgs -> [help]
ParseArgs -> [help, list]
where
help = GetOpt.Option helpShortFlags ["help"] (GetOpt.NoArg HelpFlag)
"Show this help text"
helpShortFlags = case showOrParseArgs of
ShowArgs -> ['h']
ParseArgs -> ['h', '?']
list = GetOpt.Option [] ["list-options"] (GetOpt.NoArg ListOptionsFlag)
"Print a list of command line flags"
addCommonFlags :: ShowOrParseArgs
-> [GetOpt.OptDescr a]
-> [GetOpt.OptDescr (Either CommonFlag a)]
addCommonFlags showOrParseArgs options =
map (fmapOptDesc Left) (commonFlags showOrParseArgs)
++ map (fmapOptDesc Right) options
where fmapOptDesc f (GetOpt.Option s l d m) =
GetOpt.Option s l (fmapArgDesc f d) m
fmapArgDesc f (GetOpt.NoArg a) = GetOpt.NoArg (f a)
fmapArgDesc f (GetOpt.ReqArg s d) = GetOpt.ReqArg (f . s) d
fmapArgDesc f (GetOpt.OptArg s d) = GetOpt.OptArg (f . s) d
-- | Parse a bunch of command line arguments
--
commandParseArgs :: CommandUI flags
-> Bool -- ^ Is the command a global or subcommand?
-> [String]
-> CommandParse (flags -> flags, [String])
commandParseArgs command global args =
let options = addCommonFlags ParseArgs
$ commandGetOpts ParseArgs command
order | global = GetOpt.RequireOrder
| otherwise = GetOpt.Permute
in case GetOpt.getOpt' order options args of
(flags, _, _, _)
| any listFlag flags -> CommandList (commandListOptions command)
| any helpFlag flags -> CommandHelp (commandHelp command)
where listFlag (Left ListOptionsFlag) = True; listFlag _ = False
helpFlag (Left HelpFlag) = True; helpFlag _ = False
(flags, opts, opts', [])
| global || null opts' -> CommandReadyToGo (accum flags, mix opts opts')
| otherwise -> CommandErrors (unrecognised opts')
(_, _, _, errs) -> CommandErrors errs
where -- Note: It is crucial to use reverse function composition here or to
-- reverse the flags here as we want to process the flags left to right
-- but data flow in function composition is right to left.
accum flags = foldr (flip (.)) id [ f | Right f <- flags ]
unrecognised opts = [ "unrecognized "
++ "'" ++ (commandName command) ++ "'"
++ " option `" ++ opt ++ "'\n"
| opt <- opts ]
-- For unrecognised global flags we put them in the position just after
-- the command, if there is one. This gives us a chance to parse them
-- as sub-command rather than global flags.
mix [] ys = ys
mix (x:xs) ys = x:ys++xs
data CommandParse flags = CommandHelp (String -> String)
| CommandList [String]
| CommandErrors [String]
| CommandReadyToGo flags
instance Functor CommandParse where
fmap _ (CommandHelp help) = CommandHelp help
fmap _ (CommandList opts) = CommandList opts
fmap _ (CommandErrors errs) = CommandErrors errs
fmap f (CommandReadyToGo flags) = CommandReadyToGo (f flags)
data CommandType = NormalCommand | HiddenCommand
data Command action =
Command String String ([String] -> CommandParse action) CommandType
-- | Mark command as hidden. Hidden commands don't show up in the 'progname
-- help' or 'progname --help' output.
hiddenCommand :: Command action -> Command action
hiddenCommand (Command name synopsys f _cmdType) =
Command name synopsys f HiddenCommand
commandAddAction :: CommandUI flags
-> (flags -> [String] -> action)
-> Command action
commandAddAction command action =
Command (commandName command)
(commandSynopsis command)
(fmap (uncurry applyDefaultArgs) . commandParseArgs command False)
NormalCommand
where applyDefaultArgs mkflags args =
let flags = mkflags (commandDefaultFlags command)
in action flags args
commandsRun :: CommandUI a
-> [Command action]
-> [String]
-> CommandParse (a, CommandParse action)
commandsRun globalCommand commands args =
case commandParseArgs globalCommand True args of
CommandHelp help -> CommandHelp help
CommandList opts -> CommandList (opts ++ commandNames)
CommandErrors errs -> CommandErrors errs
CommandReadyToGo (mkflags, args') -> case args' of
("help":cmdArgs) -> handleHelpCommand cmdArgs
(name:cmdArgs) -> case lookupCommand name of
[Command _ _ action _]
-> CommandReadyToGo (flags, action cmdArgs)
_ -> CommandReadyToGo (flags, badCommand name)
[] -> CommandReadyToGo (flags, noCommand)
where flags = mkflags (commandDefaultFlags globalCommand)
where
lookupCommand cname = [ cmd | cmd@(Command cname' _ _ _) <- commands'
, cname' == cname ]
noCommand = CommandErrors ["no command given (try --help)\n"]
badCommand cname = CommandErrors ["unrecognised command: " ++ cname
++ " (try --help)\n"]
commands' = commands ++ [commandAddAction helpCommandUI undefined]
commandNames = [ name | (Command name _ _ NormalCommand) <- commands' ]
-- A bit of a hack: support "prog help" as a synonym of "prog --help"
-- furthermore, support "prog help command" as "prog command --help"
handleHelpCommand cmdArgs =
case commandParseArgs helpCommandUI True cmdArgs of
CommandHelp help -> CommandHelp help
CommandList list -> CommandList (list ++ commandNames)
CommandErrors _ -> CommandHelp globalHelp
CommandReadyToGo (_,[]) -> CommandHelp globalHelp
CommandReadyToGo (_,(name:cmdArgs')) ->
case lookupCommand name of
[Command _ _ action _] ->
case action ("--help":cmdArgs') of
CommandHelp help -> CommandHelp help
CommandList _ -> CommandList []
_ -> CommandHelp globalHelp
_ -> badCommand name
where globalHelp = commandHelp globalCommand
-- | Utility function, many commands do not accept additional flags. This
-- action fails with a helpful error message if the user supplies any extra.
--
noExtraFlags :: [String] -> IO ()
noExtraFlags [] = return ()
noExtraFlags extraFlags =
die $ "Unrecognised flags: " ++ intercalate ", " extraFlags
--TODO: eliminate this function and turn it into a variant on commandAddAction
-- instead like commandAddActionNoArgs that doesn't supply the [String]
-- | Helper function for creating globalCommand description
getNormalCommandDescriptions :: [Command action] -> [(String, String)]
getNormalCommandDescriptions cmds =
[ (name, description)
| Command name description _ NormalCommand <- cmds ]
helpCommandUI :: CommandUI ()
helpCommandUI = mkCommandUI
"help"
"Help about commands."
["[FLAGS]", "COMMAND [FLAGS]"]
()
(const [])
|
Helkafen/cabal
|
Cabal/Distribution/Simple/Command.hs
|
bsd-3-clause
| 24,442 | 0 | 17 | 7,044 | 6,859 | 3,634 | 3,225 | 410 | 20 |
{-# LANGUAGE ScopedTypeVariables, TemplateHaskell, GADTs #-}
module Main where
--------------------------------------------------------------------------
-- imports
import Test.QuickCheck
import Test.QuickCheck.Text
import Test.QuickCheck.All
import Test.QuickCheck.Poly
import Test.QuickCheck.Property
import Data.List
( sort
, nub
, (\\)
)
import Data.Maybe
( fromJust
)
import Control.Monad
( liftM
, liftM2
)
--------------------------------------------------------------------------
-- skew heaps
data Heap a
= Node a (Heap a) (Heap a)
| Nil
deriving ( Eq, Ord, Show )
empty :: Heap a
empty = Nil
isEmpty :: Heap a -> Bool
isEmpty Nil = True
isEmpty _ = False
unit :: a -> Heap a
unit x = Node x empty empty
size :: Heap a -> Int
size Nil = 0
size (Node _ h1 h2) = 1 + size h1 + size h2
insert :: Ord a => a -> Heap a -> Heap a
insert x h = unit x `merge` h
removeMin :: Ord a => Heap a -> Maybe (a, Heap a)
removeMin Nil = Nothing
removeMin (Node x h1 h2) = Just (x, h1 `merge` h2)
merge :: Ord a => Heap a -> Heap a -> Heap a
h1 `merge` Nil = h1
Nil `merge` h2 = h2
h1@(Node x h11 h12) `merge` h2@(Node y h21 h22)
| x <= y = Node x (h12 `merge` h2) h11
| otherwise = Node y (h22 `merge` h1) h21
fromList :: Ord a => [a] -> Heap a
fromList xs = merging [ unit x | x <- xs ] []
where
merging [] [] = empty
merging [p] [] = p
merging (p:q:ps) qs = merging ps ((p`merge`q):qs)
merging ps qs = merging (ps ++ reverse qs) []
toList :: Heap a -> [a]
toList h = toList' [h]
where
toList' [] = []
toList' (Nil : hs) = toList' hs
toList' (Node x h1 h2 : hs) = x : toList' (h1:h2:hs)
toSortedList :: Ord a => Heap a -> [a]
toSortedList Nil = []
toSortedList (Node x h1 h2) = x : toSortedList (h1 `merge` h2)
--------------------------------------------------------------------------
-- heap programs
data HeapP a
= Empty
| Unit a
| Insert a (HeapP a)
| SafeRemoveMin (HeapP a)
| Merge (HeapP a) (HeapP a)
| FromList [a]
deriving (Show)
safeRemoveMin :: Ord a => Heap a -> Heap a
safeRemoveMin h = case removeMin h of
Nothing -> empty -- arbitrary choice
Just (_,h) -> h
heap :: Ord a => HeapP a -> Heap a
heap Empty = empty
heap (Unit x) = unit x
heap (Insert x p) = insert x (heap p)
heap (SafeRemoveMin p) = safeRemoveMin (heap p)
heap (Merge p q) = heap p `merge` heap q
heap (FromList xs) = fromList xs
instance (Ord a, Arbitrary a) => Arbitrary (HeapP a) where
arbitrary = sized arbHeapP
where
arbHeapP s =
frequency
[ (1, do return Empty)
, (1, do x <- arbitrary
return (Unit x))
, (s, do x <- arbitrary
p <- arbHeapP s1
return (Insert x p))
, (s, do p <- arbHeapP s1
return (SafeRemoveMin p))
, (s, do p <- arbHeapP s2
q <- arbHeapP s2
return (Merge p q))
, (1, do xs <- arbitrary
return (FromList xs))
]
where
s1 = s-1
s2 = s`div`2
shrink Empty = []
shrink (Unit x) = [ Unit x' | x' <- shrink x ]
shrink (FromList xs) = [ Unit x | x <- xs ]
++ [ FromList xs' | xs' <- shrink xs ]
shrink p =
[ FromList (toList (heap p)) ] ++
case p of
Insert x p -> [ p ]
++ [ Insert x p' | p' <- shrink p ]
++ [ Insert x' p | x' <- shrink x ]
SafeRemoveMin p -> [ p ]
++ [ SafeRemoveMin p' | p' <- shrink p ]
Merge p q -> [ p, q ]
++ [ Merge p' q | p' <- shrink p ]
++ [ Merge p q' | q' <- shrink q ]
data HeapPP a = HeapPP (HeapP a) (Heap a)
deriving (Show)
instance (Ord a, Arbitrary a) => Arbitrary (HeapPP a) where
arbitrary =
do p <- arbitrary
return (HeapPP p (heap p))
shrink (HeapPP p _) =
[ HeapPP p' (heap p') | p' <- shrink p ]
--------------------------------------------------------------------------
-- properties
data Context a where
Context :: Eq b => (Heap a -> b) -> Context a
instance (Ord a, Arbitrary a) => Arbitrary (Context a) where
arbitrary =
do f <- sized arbContext
let vec h = (size h, toSortedList h, isEmpty h)
return (Context (vec . f))
where
arbContext s =
frequency
[ (1, do return id)
, (s, do x <- arbitrary
f <- arbContext (s-1)
return (insert x . f))
, (s, do f <- arbContext (s-1)
return (safeRemoveMin . f))
, (s, do HeapPP _ h <- arbitrary
f <- arbContext (s`div`2)
elements [ (h `merge`) . f, (`merge` h) . f ])
]
instance Show (Context a) where
show _ = "*"
(=~) :: Heap Char -> Heap Char -> Property
--h1 =~ h2 = sort (toList h1) == sort (toList h2)
--h1 =~ h2 = property (nub (sort (toList h1)) == nub (sort (toList h2))) -- bug!
h1 =~ h2 = property (\(Context c) -> c h1 == c h2)
{-
The normal form is:
insert x1 (insert x2 (... empty)...)
where x1 <= x2 <= ...
-}
-- heap creating operations
prop_Unit x =
unit x =~ insert x empty
prop_RemoveMin_Empty =
removeMin (empty :: Heap OrdA) == Nothing
prop_RemoveMin_Insert1 x =
removeMin (insert x empty :: Heap OrdA) == Just (x, empty)
prop_RemoveMin_Insert2 x y (HeapPP _ h) =
removeMin (insert x (insert y h)) ==~
(insert (max x y) `maph` removeMin (insert (min x y) h))
where
f `maph` Just (x,h) = Just (x, f h)
f `maph` Nothing = Nothing
Nothing ==~ Nothing = property True
Just (x,h1) ==~ Just (y,h2) = x==y .&&. h1 =~ h2
prop_InsertSwap x y (HeapPP _ h) =
insert x (insert y h) =~ insert y (insert x h)
prop_MergeInsertLeft x (HeapPP _ h1) (HeapPP _ h2) =
(insert x h1 `merge` h2) =~ insert x (h1 `merge` h2)
prop_MergeInsertRight x (HeapPP _ h1) (HeapPP _ h2) =
(h1 `merge` insert x h2) =~ insert x (h1 `merge` h2)
-- heap observing operations
prop_Size_Empty =
size empty == 0
prop_Size_Insert x (HeapPP _ (h :: Heap OrdA)) =
size (insert x h) == 1 + size h
prop_ToList_Empty =
toList empty == ([] :: [OrdA])
prop_ToList_Insert x (HeapPP _ (h :: Heap OrdA)) =
sort (toList (insert x h)) == sort (x : toList h)
prop_ToSortedList (HeapPP _ (h :: Heap OrdA)) =
toSortedList h == sort (toList h)
--------------------------------------------------------------------------
-- main
return []
main = $(quickCheckAll)
--------------------------------------------------------------------------
-- the end.
|
srhb/quickcheck
|
examples/Heap_ProgramAlgebraic.hs
|
bsd-3-clause
| 6,651 | 0 | 16 | 1,993 | 2,815 | 1,437 | 1,378 | 169 | 4 |
{-
This is a very high-level test of the hackage server. It forks a fresh server
instance, and then uses HTTP to run various requests on that server.
System requirements:
1. Port `testPort` (currently 8392) must be available on localhost
2. You must allow for outgoing HTTP traffic, as we POST to html5.validator.nu
for HTML validation.
-}
module Main (main) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Exception
import Control.Monad
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.List (isInfixOf)
import Data.String ()
import System.Directory
import System.Exit (ExitCode(..))
import System.FilePath
import System.IO
import Package
import Util
import HttpUtils ( isOk
, isNoContent
, isForbidden
, Authorization(..)
)
import HackageClientUtils
main :: IO ()
main = do hSetBuffering stdout LineBuffering
info "Initialising"
root <- getCurrentDirectory
info "Setting up test directory"
exists <- doesDirectoryExist (testDir root)
when exists $ removeDirectoryRecursive (testDir root)
createDirectory (testDir root)
(setCurrentDirectory (testDir root) >> doit root)
`finally` removeDirectoryRecursive (testDir root)
testName :: FilePath
testName = "HighLevelTestTemp"
testDir :: FilePath -> FilePath
testDir root = root </> "tests" </> testName
doit :: FilePath -> IO ()
doit root
= do info "initialising hackage database"
runServerChecked root ["init"]
withServerRunning root $ do void $ validate NoAuth "/"
void $ validate NoAuth "/accounts"
void $ validate (Auth "admin" "admin") "/admin"
void $ validate NoAuth "/upload"
runUserTests
runPackageUploadTests
runPackageTests
withServerRunning root $ runPackageTests
info "Making database backup"
tarGz1 <- createBackup testName root "1"
info "Removing old state"
removeDirectoryRecursive "state"
info "Checking server doesn't work"
mec <- runServer root serverRunningArgs
case mec of
Just (ExitFailure 1) -> return ()
Just (ExitFailure _) -> die "Server failed with wrong exit code"
Just ExitSuccess -> die "Server worked unexpectedly"
Nothing -> die "Got a signal?"
info $ "Restoring database from " ++ tarGz1
runServerChecked root ["restore", tarGz1]
info "Making another database backup"
tarGz2 <- createBackup testName root "2"
info "Checking databases match"
db1 <- LBS.readFile tarGz1
db2 <- LBS.readFile tarGz2
unless (db1 == db2) $ die "Databases don't match"
info "Checking server still works, and data is intact"
withServerRunning root $ runPackageTests
runUserTests :: IO ()
runUserTests = do
do info "Getting user list"
xs <- fmap (map userName) getUsers
unless (xs == ["admin"]) $
die ("Bad user list: " ++ show xs)
do info "Getting admin user list"
xs <- getAdmins
unless (map userName (groupMembers xs) == ["admin"]) $
die ("Bad admin user list: " ++ show xs)
do -- For this test we just create the users directly using the admin
-- interface, there's a separate test that tests the self-signup.
createUserDirect (Auth "admin" "admin") "HackageTestUser1" "testpass1"
createUserDirect (Auth "admin" "admin") "HackageTestUser2" "testpass2"
do info "Checking new users are now in user list"
xs <- fmap (map userName) getUsers
unless (xs == ["admin","HackageTestUser1","HackageTestUser2"]) $
die ("Bad user list: " ++ show xs)
do info "Checking new users are not in admin list"
xs <- getAdmins
unless (map userName (groupMembers xs) == ["admin"]) $
die ("Bad admin user list: " ++ show xs)
do info "Getting password change page for HackageTestUser1"
void $ validate (Auth "HackageTestUser1" "testpass1") "/user/HackageTestUser1/password"
do info "Getting password change page for HackageTestUser1 as an admin"
void $ validate (Auth "admin" "admin") "/user/HackageTestUser1/password"
do info "Getting password change page for HackageTestUser1 as another user"
checkIsForbidden (Auth "HackageTestUser2" "testpass2") "/user/HackageTestUser1/password"
do info "Getting password change page for HackageTestUser1 with bad password"
checkIsUnauthorized (Auth "HackageTestUser1" "badpass") "/user/HackageTestUser1/password"
do info "Getting password change page for HackageTestUser1 with bad username"
checkIsUnauthorized (Auth "baduser" "testpass1") "/user/HackageTestUser1/password"
do info "Changing password for HackageTestUser2"
post (Auth "HackageTestUser2" "testpass2") "/user/HackageTestUser2/password" [
("password", "newtestpass2")
, ("repeat-password", "newtestpass2")
, ("_method", "PUT")
]
do info "Checking password has changed"
void $ validate (Auth "HackageTestUser2" "newtestpass2") "/user/HackageTestUser2/password"
checkIsUnauthorized (Auth "HackageTestUser2" "testpass2") "/user/HackageTestUser2/password"
do info "Trying to delete HackageTestUser2 as HackageTestUser2"
delete isForbidden (Auth "HackageTestUser2" "newtestpass2") "/user/HackageTestUser2"
xs <- fmap (map userName) getUsers
unless (xs == ["admin","HackageTestUser1","HackageTestUser2"]) $
die ("Bad user list: " ++ show xs)
do info "Deleting HackageTestUser2 as admin"
delete isNoContent (Auth "admin" "admin") "/user/HackageTestUser2"
xs <- fmap (map userName) getUsers
unless (xs == ["admin","HackageTestUser1"]) $
die ("Bad user list: " ++ show xs)
do info "Getting user info for HackageTestUser1"
xs <- validate NoAuth "/user/HackageTestUser1"
--TODO: set the user's real name, and then look for that here
unless ("HackageTestUser1" `isInfixOf` xs) $
die ("Bad user info: " ++ show xs)
runPackageUploadTests :: IO ()
runPackageUploadTests = do
do info "Getting package list"
xs <- map packageName `liftM` getPackages
unless (xs == []) $
die ("Bad package list: " ++ show xs)
do info "Trying to upload testpackage"
postFile isForbidden
(Auth "HackageTestUser1" "testpass1")
"/packages/" "package"
(testpackageTarFilename, testpackageTarFileContent)
do info "Adding HackageTestUser1 to uploaders"
post (Auth "admin" "admin") "/packages/uploaders/" [
("user", "HackageTestUser1")
]
do info "Uploading testpackage"
postFile isOk
(Auth "HackageTestUser1" "testpass1")
"/packages/" "package"
(testpackageTarFilename, testpackageTarFileContent)
where
(testpackageTarFilename, testpackageTarFileContent, _, _, _, _) =
testpackage
runPackageTests :: IO ()
runPackageTests = do
do info "Getting package list"
xs <- map packageName `liftM` getPackages
unless (xs == ["testpackage"]) $
die ("Bad package list: " ++ show xs)
do info "Getting package index"
targz <- getUrl NoAuth "/packages/index.tar.gz"
let tar = GZip.decompress $ LBS.pack targz
entries = Tar.foldEntries (:) [] (error . show) $ Tar.read tar
entryFilenames = map Tar.entryPath entries
entryContents = map Tar.entryContent entries
unless (entryFilenames == [testpackageCabalIndexFilename]) $
die ("Bad index filenames: " ++ show entryFilenames)
case entryContents of
[Tar.NormalFile bs _]
| LBS.unpack bs == testpackageCabalFile ->
return ()
_ ->
die "Bad index contents"
do info "Getting package index with etag"
validateETagHandling "/packages/index.tar.gz"
do info "Getting testpackage info"
xs <- validate NoAuth "/package/testpackage"
unless ("The testpackage package" `isInfixOf` xs) $
die ("Bad package info: " ++ show xs)
do info "Getting testpackage-1.0.0.0 info"
xs <- validate NoAuth "/package/testpackage-1.0.0.0"
unless ("The testpackage package" `isInfixOf` xs) $
die ("Bad package info: " ++ show xs)
do info "Getting testpackage Cabal file"
cabalFile <- getUrl NoAuth "/package/testpackage-1.0.0.0/testpackage.cabal"
unless (cabalFile == testpackageCabalFile) $
die "Bad Cabal file"
do info "Getting testpackage tar file"
tarFile <- getUrl NoAuth "/package/testpackage/testpackage-1.0.0.0.tar.gz"
unless (tarFile == testpackageTarFileContent) $
die "Bad tar file"
do info "Getting testpackage source"
hsFile <- getUrl NoAuth ("/package/testpackage/src" </> testpackageHaskellFilename)
unless (hsFile == testpackageHaskellFileContent) $
die "Bad Haskell file"
do info "Getting testpackage source with etag"
validateETagHandling ("/package/testpackage/src" </> testpackageHaskellFilename)
do info "Getting testpackage maintainer info"
xs <- getGroup "/package/testpackage/maintainers/.json"
unless (map userName (groupMembers xs) == ["HackageTestUser1"]) $
die "Bad maintainers list"
where
(_, testpackageTarFileContent,
testpackageCabalIndexFilename, testpackageCabalFile,
testpackageHaskellFilename, testpackageHaskellFileContent)
= testpackage
testpackage :: (FilePath, String, FilePath, String, FilePath, String)
testpackage = mkPackage "testpackage"
|
chrisdotcode/hackage-server
|
tests/HighLevelTest.hs
|
bsd-3-clause
| 10,106 | 0 | 17 | 2,730 | 2,238 | 1,046 | 1,192 | 194 | 4 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module UnitTests.Distribution.Client.Glob (tests) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import Data.Char
import Data.List
import Distribution.Text (display, parse, simpleParse)
import Distribution.Compat.ReadP
import Distribution.Client.Glob
import UnitTests.Distribution.Client.ArbitraryInstances
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Tasty.HUnit
import Control.Exception
tests :: [TestTree]
tests =
[ testProperty "print/parse roundtrip" prop_roundtrip_printparse
, testCase "parse examples" testParseCases
]
--TODO: [nice to have] tests for trivial globs, tests for matching,
-- tests for windows style file paths
prop_roundtrip_printparse :: FilePathGlob -> Bool
prop_roundtrip_printparse pathglob =
-- can't use simpleParse because it mis-handles trailing spaces
case [ x | (x, []) <- readP_to_S parse (display pathglob) ] of
xs@(_:_) -> last xs == pathglob
_ -> False
-- first run, where we don't even call updateMonitor
testParseCases :: Assertion
testParseCases = do
FilePathGlob (FilePathRoot "/") GlobDirTrailing <- testparse "/"
FilePathGlob FilePathHomeDir GlobDirTrailing <- testparse "~/"
FilePathGlob (FilePathRoot "A:\\") GlobDirTrailing <- testparse "A:/"
FilePathGlob (FilePathRoot "Z:\\") GlobDirTrailing <- testparse "z:/"
FilePathGlob (FilePathRoot "C:\\") GlobDirTrailing <- testparse "C:\\"
FilePathGlob FilePathRelative (GlobFile [Literal "_:"]) <- testparse "_:"
FilePathGlob FilePathRelative
(GlobFile [Literal "."]) <- testparse "."
FilePathGlob FilePathRelative
(GlobFile [Literal "~"]) <- testparse "~"
FilePathGlob FilePathRelative
(GlobDir [Literal "."] GlobDirTrailing) <- testparse "./"
FilePathGlob FilePathRelative
(GlobFile [Literal "foo"]) <- testparse "foo"
FilePathGlob FilePathRelative
(GlobDir [Literal "foo"]
(GlobFile [Literal "bar"])) <- testparse "foo/bar"
FilePathGlob FilePathRelative
(GlobDir [Literal "foo"]
(GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "foo/bar/"
FilePathGlob (FilePathRoot "/")
(GlobDir [Literal "foo"]
(GlobDir [Literal "bar"] GlobDirTrailing)) <- testparse "/foo/bar/"
FilePathGlob FilePathRelative
(GlobFile [WildCard]) <- testparse "*"
FilePathGlob FilePathRelative
(GlobFile [WildCard,WildCard]) <- testparse "**" -- not helpful but valid
FilePathGlob FilePathRelative
(GlobFile [WildCard, Literal "foo", WildCard]) <- testparse "*foo*"
FilePathGlob FilePathRelative
(GlobFile [Literal "foo", WildCard, Literal "bar"]) <- testparse "foo*bar"
FilePathGlob FilePathRelative
(GlobFile [Union [[WildCard], [Literal "foo"]]]) <- testparse "{*,foo}"
parseFail "{"
parseFail "}"
parseFail ","
parseFail "{"
parseFail "{{}"
parseFail "{}"
parseFail "{,}"
parseFail "{foo,}"
parseFail "{,foo}"
return ()
testparse :: String -> IO FilePathGlob
testparse s =
case simpleParse s of
Just p -> return p
Nothing -> throwIO $ HUnitFailure ("expected parse of: " ++ s)
parseFail :: String -> Assertion
parseFail s =
case simpleParse s :: Maybe FilePathGlob of
Just _ -> throwIO $ HUnitFailure ("expected no parse of: " ++ s)
Nothing -> return ()
instance Arbitrary FilePathGlob where
arbitrary = (FilePathGlob <$> arbitrary <*> arbitrary)
`suchThat` validFilePathGlob
shrink (FilePathGlob root pathglob) =
[ FilePathGlob root' pathglob'
| (root', pathglob') <- shrink (root, pathglob)
, validFilePathGlob (FilePathGlob root' pathglob') ]
validFilePathGlob :: FilePathGlob -> Bool
validFilePathGlob (FilePathGlob FilePathRelative pathglob) =
case pathglob of
GlobDirTrailing -> False
GlobDir [Literal "~"] _ -> False
GlobDir [Literal (d:":")] _
| isLetter d -> False
_ -> True
validFilePathGlob _ = True
instance Arbitrary FilePathRoot where
arbitrary =
frequency
[ (3, pure FilePathRelative)
, (1, pure (FilePathRoot unixroot))
, (1, FilePathRoot <$> windrive)
, (1, pure FilePathHomeDir)
]
where
unixroot = "/"
windrive = do d <- choose ('A', 'Z'); return (d : ":\\")
shrink FilePathRelative = []
shrink (FilePathRoot _) = [FilePathRelative]
shrink FilePathHomeDir = [FilePathRelative]
instance Arbitrary FilePathGlobRel where
arbitrary = sized $ \sz ->
oneof $ take (max 1 sz)
[ pure GlobDirTrailing
, GlobFile <$> (getGlobPieces <$> arbitrary)
, GlobDir <$> (getGlobPieces <$> arbitrary)
<*> resize (sz `div` 2) arbitrary
]
shrink GlobDirTrailing = []
shrink (GlobFile glob) =
GlobDirTrailing
: [ GlobFile (getGlobPieces glob') | glob' <- shrink (GlobPieces glob) ]
shrink (GlobDir glob pathglob) =
pathglob
: GlobFile glob
: [ GlobDir (getGlobPieces glob') pathglob'
| (glob', pathglob') <- shrink (GlobPieces glob, pathglob) ]
newtype GlobPieces = GlobPieces { getGlobPieces :: [GlobPiece] }
deriving Eq
instance Arbitrary GlobPieces where
arbitrary = GlobPieces . mergeLiterals <$> shortListOf1 5 arbitrary
shrink (GlobPieces glob) =
[ GlobPieces (mergeLiterals (getNonEmpty glob'))
| glob' <- shrink (NonEmpty glob) ]
mergeLiterals :: [GlobPiece] -> [GlobPiece]
mergeLiterals (Literal a : Literal b : ps) = mergeLiterals (Literal (a++b) : ps)
mergeLiterals (Union as : ps) = Union (map mergeLiterals as) : mergeLiterals ps
mergeLiterals (p:ps) = p : mergeLiterals ps
mergeLiterals [] = []
instance Arbitrary GlobPiece where
arbitrary = sized $ \sz ->
frequency
[ (3, Literal <$> shortListOf1 10 (elements globLiteralChars))
, (1, pure WildCard)
, (1, Union <$> resize (sz `div` 2) (shortListOf1 5 (shortListOf1 5 arbitrary)))
]
shrink (Literal str) = [ Literal str'
| str' <- shrink str
, not (null str')
, all (`elem` globLiteralChars) str' ]
shrink WildCard = []
shrink (Union as) = [ Union (map getGlobPieces (getNonEmpty as'))
| as' <- shrink (NonEmpty (map GlobPieces as)) ]
globLiteralChars :: [Char]
globLiteralChars = ['\0'..'\128'] \\ "*{},/\\"
|
mydaum/cabal
|
cabal-install/tests/UnitTests/Distribution/Client/Glob.hs
|
bsd-3-clause
| 6,410 | 0 | 16 | 1,426 | 1,962 | 989 | 973 | 149 | 4 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies,
FlexibleInstances, UndecidableInstances #-}
-- UndecidableInstances because (L a b) is no smaller than (C a b)
-- This one shows up another rather subtle functional-dependecy
-- case. The error is:
--
-- Could not deduce (C a b') from the context (C a b)
-- arising from the superclasses of an instance declaration at Foo.hs:8:0
-- Probable fix: add (C a b') to the instance declaration superclass context
-- In the instance declaration for `C (Maybe a) a'
--
-- Since L is a superclass of the (sought) constraint (C a b'), you might
-- think that we'd generate the superclasses (L a b') and (L a b), and now
-- the fundep will force b=b'. But GHC is very cautious about generating
-- superclasses when doing context reduction for instance declarations,
-- because of the danger of superclass loops.
--
-- So, today, this program fails. It's trivial to fix by adding a fundep for C
-- class (G a, L a b) => C a b | a -> b
-- Note: Sept 08: when fixing Trac #1470, tc138 started working!
-- This test is a very strange one (fundeps, undecidable instances),
-- so I'm just marking it as "should-succeed". It's not very clear to
-- me what the "right" answer should be; when we have the type equality
-- story more worked out we might want to think about that.
module ShouldFail where
class G a
class L a b | a -> b
class (G a, L a b) => C a b
instance C a b' => G (Maybe a)
instance C a b => C (Maybe a) a
instance L (Maybe a) a
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_fail/tcfail138.hs
|
bsd-3-clause
| 1,580 | 0 | 7 | 328 | 137 | 79 | 58 | -1 | -1 |
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module ExtraNumAMROff where
foo :: _ => a
foo = 3
|
urbanslug/ghc
|
testsuite/tests/partial-sigs/should_compile/ExtraNumAMROff.hs
|
bsd-3-clause
| 133 | 0 | 6 | 20 | 21 | 13 | 8 | 5 | 1 |
{-# LANGUAGE TypeFamilies #-}
module T8227a where
type family V a :: *
type instance V Double = Double
type instance V (a -> b) = V b
|
urbanslug/ghc
|
testsuite/tests/indexed-types/should_fail/T8227a.hs
|
bsd-3-clause
| 140 | 0 | 6 | 34 | 44 | 27 | 17 | 5 | 0 |
-- | Should fail compilation because safe imports aren't enabled
-- not because of trying to import an unsafe module
module Mixed01 where
import safe System.IO.Unsafe
f :: Int
f = 1
|
urbanslug/ghc
|
testsuite/tests/safeHaskell/safeInfered/Mixed01.hs
|
bsd-3-clause
| 185 | 1 | 4 | 35 | 23 | 16 | 7 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module T5968 where
data Bar a = Bar $( [t| a |] )
|
wxwxwwxxx/ghc
|
testsuite/tests/th/T5968.hs
|
bsd-3-clause
| 86 | 0 | 8 | 20 | 23 | 16 | 7 | 3 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Station.Types.VersionContext where
import Import
import qualified Data.Hashable as HA
import Lens.Micro.TH
import Station.Types.Card
import Station.Types.Version
-- | @Nothing@ means a local version. @Just Text@ means a version found at
-- that URL.
newtype VersionLocation = VersionLocation
{ _unVersionLocation :: Maybe Text }
deriving (Eq, Ord, Show, Generic, FromJSON, ToJSON)
instance HA.Hashable VersionLocation
type VersionInfo = VersionContext (CardHash, CardBytes)
-- | Things we want to know about each version while the app is running.
data VersionContext a = VersionContext
{ _vcHash :: VersionHash
, _vcVersion :: Version a
, _vcLocation :: NonEmpty VersionLocation
} deriving (Eq, Show, Functor)
instance FromJSON a => FromJSON (VersionContext a) where
parseJSON = withObject "VersionContext a" $ \o -> VersionContext
<$> o .: "hash"
<*> o .: "version"
<*> o .: "locations"
instance ToJSON a => ToJSON (VersionContext a) where
toJSON a = object
[ "hash" .= _vcHash a
, "version" .= _vcVersion a
, "locations" .= _vcLocation a
]
linkFromVersionContext :: VersionContext a -> Link VersionHash
linkFromVersionContext vc = Link
{ _linkId = _versionId (_vcVersion vc)
, _linkHash = _vcHash vc
}
-- * Lenses
makeLenses ''VersionLocation
makeLenses ''VersionContext
|
seagreen/station
|
src/Station/Types/VersionContext.hs
|
mit
| 1,488 | 0 | 13 | 364 | 346 | 187 | 159 | 33 | 1 |
{-# LANGUAGE GADTs #-}
module AMx.TypeCheck where
import Prelude hiding (LT,GT)
import Data.Char(toUpper)
import Data.Map.Strict(Map)
import qualified Data.Map.Strict as Map
import AMx.Language(Argument(..), InstructionSpecification(..), Type(..))
import AMx.ParserMonad(ParserError(..), ParserMonad, Reason(..), getSpecifications, throwParserError)
getSpecification :: String -> ParserMonad i (InstructionSpecification i)
getSpecification name = do
specs <- getSpecifications
case Map.lookup upper_name specs of
Nothing -> throwParserError $ UnknownInstruction upper_name
Just spec -> return spec
where upper_name = map toUpper name
checkNullaryInstruction :: (InstructionSpecification a) -> ParserMonad i a
checkNullaryInstruction (Nullary _name f ) = return f
checkNullaryInstruction (Unary name _ _ ) = throwParserError $ WrongOperandCount name 1 0
checkNullaryInstruction (Binary name _ _ _) = throwParserError $ WrongOperandCount name 2 0
checkUnaryInstruction :: Argument -> (InstructionSpecification a) -> ParserMonad i a
checkUnaryInstruction _ (Nullary name _ ) = throwParserError $ WrongOperandCount name 0 1
checkUnaryInstruction arg (Unary _name f t ) = checkType arg t >>= \v -> return $ f v
checkUnaryInstruction _ (Binary name _ _ _) = throwParserError $ WrongOperandCount name 2 1
checkBinaryInstruction :: Argument -> Argument -> (InstructionSpecification a) -> ParserMonad i a
checkBinaryInstruction _ _ (Nullary name _ ) = throwParserError $ WrongOperandCount name 0 2
checkBinaryInstruction _ _ (Unary name _ _ ) = throwParserError $ WrongOperandCount name 1 2
checkBinaryInstruction arg1 arg2 (Binary _name f type1 type2) = do
value1 <- checkType arg1 type1
value2 <- checkType arg2 type2
return $ f value1 value2
checkType :: Argument -> Type a -> ParserMonad i a
checkType (IntArgument v) IntType = return v
checkType (StringArgument v) StringType = return v
checkType _ _ = throwParserError $ OtherError "Wrong type."
|
sebschrader/programmierung-ss2015
|
AMx/TypeCheck.hs
|
mit
| 2,097 | 0 | 11 | 417 | 647 | 330 | 317 | 34 | 2 |
module Present where
import Text.ParserCombinators.Parsec
import Control.Applicative hiding (many, (<|>))
data Present = Present Integer Integer Integer
deriving Show
integer = rd <$> many1 digit
where rd = read :: String -> Integer
eol = (char '\n' <|> (char '\r' >> option '\n' (char '\n'))) >> return ()
presents = many present <* eof
present :: GenParser Char st Present
present =
Present <$> integer
<* char 'x'
<*> integer
<* char 'x'
<*> integer
<* eol
|
corajr/adventofcode2015
|
2/Present.hs
|
mit
| 535 | 0 | 12 | 152 | 172 | 92 | 80 | 17 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleInstances #-}
module Homework.Week07.Sized where
import Data.Monoid
newtype Size = Size Int
deriving (Eq, Ord, Show, Num)
getSize :: Size -> Int
getSize (Size i) = i
class Sized a where
size :: a -> Size
instance Sized Size where
size = id
-- This instance means that things like
-- (Foo, Size)
-- (Foo, (Bar, Size))
-- ...
-- are all instances of Sized.
instance Sized b => Sized (a,b) where
size = size . snd
instance Monoid Size where
mempty = Size 0
mappend = (+)
|
laser/cis-194-spring-2017
|
src/Homework/Week07/Sized.hs
|
mit
| 544 | 0 | 7 | 118 | 152 | 86 | 66 | 16 | 1 |
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Database.Persist.Quasi
import Database.Persist.MongoDB hiding (master)
import Language.Haskell.TH.Syntax
import Yesod.Markdown (Markdown)
import Yesod.Auth.HashDB (HashDBUser(..))
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
-- note that the UniqueUsername constraint on User will not work with Mongo,
-- to stop duplicate usernames from being entered you should create a
-- unique index on the user collection like so:
-- db.user.ensureIndex({ username : 1}, { unique : true })
let mongoSettings = (mkPersistSettings (ConT ''MongoBackend))
{ mpsGeneric = True
}
in share [mkPersist mongoSettings]
$(persistFileWith lowerCaseSettings "config/models")
instance HashDBUser (UserGeneric backend) where
userPasswordHash = Just . userPassword
userPasswordSalt = Just . userSalt
setSaltAndPasswordHash s h u = u { userSalt = s
, userPassword = h
}
|
ShaneKilkelly/YesodExample
|
Model.hs
|
mit
| 1,213 | 0 | 14 | 288 | 192 | 113 | 79 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, NoImplicitPrelude #-}
module AppGlobalState where
import Data.Int (Int)
import Prelude (String, Bool)
import qualified Data.Text as T
import qualified DB
import qualified LocalAuth
import qualified Cache
import qualified GoogleAuth
import qualified Database.SQLite.Simple as SQL
data ArchiveSettings = ArchiveSettings {
asAllowedHosts :: [String],
asSearchPageSize :: Int
}
data AppGlobalState = AppGlobalState {
sqlConnection :: DB.Db,
sqlConnection2 :: SQL.Connection,
gauth :: GoogleAuth.GoogleAuthState T.Text,
lauth :: LocalAuth.LocalAuthState,
messageCache :: Cache.Cache Int T.Text,
host :: String,
appSettings :: ArchiveSettings,
googleAPIKey :: T.Text,
autoPoll :: Bool,
listenAddress :: T.Text,
listenPort :: Int
}
|
itsuart/fdc_archivist
|
src/AppGlobalState.hs
|
mit
| 791 | 0 | 10 | 125 | 184 | 118 | 66 | 25 | 0 |
module System.GratteExternalCommands
( execTesseract
, execConvert
, execConvertAppend
, execPDFToText
) where
import System.Process
import System.Exit
execTesseract :: FilePath -> FilePath -> IO (ExitCode, String)
execTesseract file tempFile = do
(exitCode, _, err) <-
readProcessWithExitCode
"tesseract"
[file, tempFile]
""
return (exitCode, err)
execConvert :: FilePath -> FilePath -> IO (ExitCode, String)
execConvert file tempDir = do
(exitCode, _, err) <-
readProcessWithExitCode
"convert"
[file, tempDir ++ "/temp-png.png"]
""
return (exitCode, err)
execConvertAppend :: [FilePath] -> FilePath -> IO (ExitCode, String)
execConvertAppend images singleImage = do
(exitCode, _, err) <-
readProcessWithExitCode
"convert"
(images ++ ["-append", singleImage])
""
return (exitCode, err)
execPDFToText :: FilePath -> FilePath -> IO (ExitCode, String)
execPDFToText file tempFile = do
(exitCode, _, err) <-
readProcessWithExitCode
"pdftotext"
[file, tempFile]
""
return (exitCode, err)
|
ostapneko/gratte-papier
|
src/System/GratteExternalCommands.hs
|
mit
| 1,103 | 0 | 11 | 243 | 336 | 183 | 153 | 39 | 1 |
module Correctify (correctify) where
import DNBFormat
import YNABFormat
correctify :: String -> String
correctify text =
unlines (dummyLine : result)
where
dummyLine = "Date,Payee,Category,Memo,Outflow,Inflow"
result = map convert textlines
textlines = tail $ lines text
convert = show . ynabFromDnb . readDnb
|
lstor/traxform
|
src/Correctify.hs
|
mit
| 361 | 0 | 8 | 93 | 84 | 46 | 38 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM
import Data.List
import Data.Maybe
import Data.Text (Text)
import qualified Data.Tuple.Strict as S
import qualified Data.Text as T
import qualified Data.Text.IO as T
import System.Environment
import Lang.Zh.Anki
type ZiMap = HashMap (S.Pair Char Text) Text
noteToMap :: ZNote -> ZiMap
noteToMap (ZNote word _ pronDefs parts _ _) = HM.fromList $ zip
(zipWith (S.:!:) (T.unpack word)
(map T.toLower $ concatMap pSylls pronDefs))
parts
bestCharGloss g1 g2 = if T.length g1 > T.length g2 then g1 else g2
improveParts :: ZiMap -> ZNote -> ZNote
improveParts ziMap z@(ZNote word _ pronDefs _ _ _) = z {zParts = newParts}
where
zis = T.unpack word
PronDef sylls _ = head pronDefs
newParts = zipWith
(\zi syll -> fromJust $ HM.lookup (zi S.:!: T.toLower syll) ziMap)
zis sylls
tryImprove :: Bool -> ZiMap -> ZNote -> IO ()
tryImprove saveChanges ziMap note = do
let note2 = improveParts ziMap note
text = noteToText note
text2 = noteToText note2
noteDisp = T.intercalate "\n" . zParts
when (note /= note2 && text /= text2) $ do
T.putStrLn $ noteDisp note
T.putStrLn " ---> "
T.putStrLn $ noteDisp note2
T.putStrLn ""
when saveChanges $ updateNote note2
main :: IO ()
main = do
args <- getArgs
saveChanges <- case args of
[] -> return False
["--dry-run"] -> return False
["--save-changes"] -> return True
_ -> fail "Usage"
notes <- loadZhAnkiNotes
let ziMap = foldl' (HM.unionWith bestCharGloss) HM.empty $
map noteToMap notes
mapM_ (tryImprove saveChanges ziMap) notes
|
dancor/melang
|
src/Main/anki-hsk-share-best-parts.hs
|
mit
| 1,788 | 0 | 15 | 444 | 632 | 322 | 310 | 50 | 4 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TemplateHaskell #-}
module Control.Eff.Logic.Test where
import Test.HUnit hiding (State)
import Control.Eff.Logic.Core
import Control.Monad
-- the inferred signature of testCut is insightful
testCut runChoice =
let cases = [tcut1, tcut2, tcut3, tcut4, tcut5, tcut6, tcut7, tcut8
, tcut9]
runCall = runChoice . call
in
forM_ cases $ \(test, result) ->
assertEqual "Cut: tcut" result (runCall test)
where
-- signature is inferred
-- tcut1 :: (Member Choose r, Member (Exc CutFalse) r) => Eff r Int
tc1 = (return (1::Int) `mplus` return 2) `mplus`
((cutfalse `mplus` return 4) `mplus`
return 5)
rc1 = [1,2]
tcut1 = (tc1, rc1)
-- Here we see nested call. It poses no problems...
tc2 = return (1::Int) `mplus`
call (return 2 `mplus` (cutfalse `mplus` return 3) `mplus`
return 4)
`mplus` return 5
rc2 = [1,2,5]
tcut2 = (tc2, rc2)
tcut3 = ((call tc1 `mplus` call (tc2 `mplus` cutfalse))
, rc1 ++ rc2)
tcut4 = ((call tc1 `mplus` (tc2 `mplus` cutfalse))
, rc1 ++ rc2)
tcut5 = ((call tc1 `mplus` (cutfalse `mplus` tc2))
, rc1)
tcut6 = ((call tc1 `mplus` call (cutfalse `mplus` tc2))
, rc1)
tcut7 = ((call tc1 `mplus` (cutfalse `mplus` tc2) `mplus` tc2)
, rc1)
tcut8 = ((call tc1 `mplus` call (cutfalse `mplus` tc2) `mplus` tc2)
, rc1 ++ rc2)
incrOrDecr = \x -> (return $! x + 1)
`mplus` cutfalse
`mplus` (return $! x - 1)
tc9 = tc1 >>= incrOrDecr
rc9 = [2]
tcut9 = (tc9, rc9)
-- tcut10 = ((return rc1 >>= incrOrDecr)
-- , rc9)
|
suhailshergill/extensible-effects
|
test/Control/Eff/Logic/Test.hs
|
mit
| 1,833 | 0 | 15 | 580 | 593 | 359 | 234 | 42 | 1 |
{-|
Module : Data.RDF.Encode.NQuads
Description : Representation and Incremental Processing of RDF Data
Copyright : Travis Whitaker 2016
License : MIT
Maintainer : [email protected]
Stability : Provisional
Portability : Portable
An encoder for
<https://www.w3.org/TR/2014/REC-n-quads-20140225/ RDF 1.1 N-Quads>.
'B.Builder's are used to support efficient incremental output.
-}
{-# LANGUAGE OverloadedStrings #-}
module Data.RDF.Encoder.NQuads (
-- * Graph Encoding
encodeRDFGraph
, encodeRDFGraphs
, encodeTriple
, encodeQuad
) where
import qualified Data.ByteString.Builder as B
import Data.RDF.Types
import Data.RDF.Encoder.Common
-- | Encodes a 'Triple' as a single line, i.e. with no graph label. Includes the
-- terminating period and newline.
encodeTriple :: Triple -> B.Builder
encodeTriple (Triple s p o) = encodeSubject s
<> B.byteString " "
<> encodePredicate p
<> B.byteString " "
<> encodeObject o
<> B.byteString " .\n"
-- | Encodes a 'Quad' as a single line. Includes the terminating period and
-- newline.
encodeQuad :: Quad -> B.Builder
encodeQuad (Quad t Nothing) = encodeTriple t
encodeQuad (Quad (Triple s p o) (Just g)) = encodeSubject s
<> B.byteString " "
<> encodePredicate p
<> B.byteString " "
<> encodeObject o
<> B.byteString " "
<> encodeEscapedIRI g
<> B.byteString " .\n"
-- | Encode a single 'RDFGraph' as a 'B.Builder'.
encodeRDFGraph :: RDFGraph -> B.Builder
encodeRDFGraph (RDFGraph Nothing ts) = mconcat $ map encodeTriple ts
encodeRDFGraph (RDFGraph (Just g) ts) = let qs = map (\t -> Quad t (Just g)) ts
in mconcat $ map encodeQuad qs
-- | Encode multiple 'RDFGraph's as a 'B.Builder'.
encodeRDFGraphs :: Foldable f => f RDFGraph -> B.Builder
encodeRDFGraphs = foldMap encodeRDFGraph
|
TravisWhitaker/rdf
|
src/Data/RDF/Encoder/NQuads.hs
|
mit
| 2,248 | 0 | 14 | 779 | 379 | 195 | 184 | 32 | 1 |
-- The sequence of triangle numbers is generated by adding the natural numbers
-- So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28
-- The first ten terms would be:
-- 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
-- Let us list the factors of the first seven triangle numbers:
-- 1: 1
-- 3: 1,3
-- 6: 1,2,3,6
-- 10: 1,2,5,10
-- 15: 1,3,5,15
-- 21: 1,3,7,21
-- 28: 1,2,4,7,14,28
-- We can see that 28 is the first triangle number to have over five divisors
-- What is the value of the first triangle number
-- to have over five hundred divisors?
-- http://stackoverflow.com/a/32172277
triangulars = scanl (+) 1 [2 ..]
isqrt :: Int -> Int
isqrt = floor . sqrt . fromIntegral
numDivisors :: Int -> Int
numDivisors num = (length [x | x <- [1 .. isqrt num], num `mod` x == 0]) * 2
firsTriangleWithNDivisors n = takeWhile (==n) (map numDivisors triangulars)
|
sravan-s/euler
|
euler-0012/divisibleTriangleNum.hs
|
mit
| 876 | 0 | 12 | 185 | 143 | 85 | 58 | 6 | 1 |
{-# LANGUAGE DoRec, TypeSynonymInstances, FlexibleInstances #-}
module Fenfire.Cache where
-- Copyright (c) 2007, Benja Fallenstein, Tuukka Hastrup
-- This file is part of Fenfire.
--
-- Fenfire is free software; you can redistribute it and/or modify it under
-- the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- Fenfire is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
-- or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
-- Public License for more details.
--
-- You should have received a copy of the GNU General
-- Public License along with Fenfire; if not, write to the Free
-- Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
-- MA 02111-1307 USA
import Fenfire.Utils
import Data.Bits
import Data.HashTable (HashTable)
import qualified Data.HashTable as HashTable
import Data.Int
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (isJust, fromJust)
import Data.Unique
import Control.Monad (when)
import System.IO.Unsafe
import System.Mem.StableName
class Hashable a where
hash :: a -> Int32
instance Hashable String where
hash s = HashTable.hashString s
instance Hashable Int where
hash i = HashTable.hashInt i
instance Hashable Unique where
hash u = hash (hashUnique u)
instance Hashable (StableName a) where
hash n = hash (hashStableName n)
instance (Hashable a, Hashable b) => Hashable (a,b) where
hash (x,y) = hash x `xor` HashTable.hashInt (fromIntegral $ hash y)
type LinkedList a = IORef (LinkedNode a)
data LinkedNode a =
LinkedNode { lnPrev :: LinkedList a, lnValue :: IORef a,
lnNext :: LinkedList a }
| End { lnPrev :: LinkedList a, lnNext :: LinkedList a }
isEnd (LinkedNode _ _ _) = False
isEnd (End _ _) = True
newList :: IO (LinkedList a)
newList = do
rec let end = End p n
p <- newIORef end
n <- newIORef end
list <- newIORef end
return list
newNode :: a -> IO (LinkedNode a)
newNode x = do let err = error "Cache: access to not-yet-linked node"
p <- newIORef err; val <- newIORef x; n <- newIORef err
return (LinkedNode p val n)
appendNode :: LinkedNode a -> LinkedList a -> IO ()
appendNode node list = do n <- readIORef list; p <- readIORef (lnPrev n)
writeIORef (lnNext p) node; writeIORef (lnPrev n) node
writeIORef (lnPrev node) p; writeIORef (lnNext node) n
removeFirst :: LinkedList a -> IO a
removeFirst list = do l <- readIORef list; node <- readIORef (lnNext l)
removeNode node
readIORef (lnValue node)
removeNode :: LinkedNode a -> IO ()
removeNode node = do when (isEnd node) $ error "Cache: remove from empty list"
p <- readIORef (lnPrev node); n <- readIORef (lnNext node)
let err = error "Cache: access to unlinked node"
writeIORef (lnPrev node) err; writeIORef (lnNext node) err
writeIORef (lnNext p) n; writeIORef (lnPrev n) p
access :: LinkedList a -> LinkedNode a -> IO ()
access list node = do removeNode node; appendNode node list
add :: a -> LinkedList a -> IO (LinkedNode a)
add x list = do node <- newNode x; appendNode node list; return node
byAddress :: a -> StableName a
byAddress = unsafePerformIO . makeStableName
type Cache key value =
(IORef Int, Int, HashTable key (value, LinkedNode key), LinkedList key)
newCache :: (Eq key, Hashable key) => Int -> Cache key value
newCache maxsize = unsafePerformIO $ do ht <- HashTable.new (==) hash
lru <- newList; size <- newIORef 0
return (size, maxsize, ht, lru)
cached :: (Eq k, Hashable k) => k -> Cache k v -> v -> v
cached key (sizeRef, maxsize, cache, lru) val = unsafePerformIO $ do
mval' <- HashTable.lookup cache key
if isJust mval' then do
let (val', node) = fromJust mval'
access lru node
--putStrLn "Cache access"
return val'
else do
size <- readIORef sizeRef
--putStrLn ("Cache add, former size " ++ show size)
if size < maxsize then writeIORef sizeRef (size+1)
else do dropped <- removeFirst lru
HashTable.delete cache dropped
node <- add key lru
HashTable.insert cache key (val, node)
return val
|
timthelion/fenfire
|
Fenfire/Cache.hs
|
gpl-2.0
| 4,699 | 0 | 15 | 1,315 | 1,390 | 692 | 698 | 86 | 3 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE NoMonomorphismRestriction, ScopedTypeVariables #-}
module XSWifi where
import System
import System.IO
import System.Posix.Syslog
import Rpc
import Rpc.Core
import App
import Error
import Tools.Log
import Tools.XenStore
import Tools.Process
import Tools.Misc
import Data.Int
import Data.Word
import Data.String
import Data.Bits
import Data.Maybe
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString as B
import qualified Data.IntSet as IntSet
import qualified Data.Map as M
import qualified Data.Text.Lazy as TL
import Text.Printf
import Text.Regex.Posix
import Control.Applicative
import Control.Monad.Trans
import Control.Monad.Error
import qualified Control.Exception as E
import Control.Concurrent
import Control.Monad
import Control.Monad.Reader
import Rpc.Autogen.NetworkClient as NC
import NwTypes
import NetworkSlaveMethods
data WifiAuth = Wep | NoAuth
deriving (Show)
data WifiAp = WifiAp {
apAuth :: WifiAuth
, apSsid :: [Word8]
, apEssid :: String
, apMac :: String
, apFreq :: Int
, apMaxBitrate :: Int
, apQuality :: Int
} deriving (Show)
-- security capabilities
nm_802_11_AP_SEC_NONE = 0x0
nm_802_11_AP_SEC_PAIR_WEP40 = 0x1
nm_802_11_AP_SEC_PAIR_WEP104 = 0x2
nm_802_11_AP_SEC_PAIR_TKIP = 0x4
nm_802_11_AP_SEC_PAIR_CCMP = 0x8
nm_802_11_AP_SEC_GROUP_WEP40 = 0x10
nm_802_11_AP_SEC_GROUP_WEP104 = 0x20
nm_802_11_AP_SEC_GROUP_TKIP = 0x40
nm_802_11_AP_SEC_GROUP_CCMP = 0x80
nm_802_11_AP_SEC_KEY_MGMT_PSK = 0x100
nm_802_11_AP_SEC_KEY_MGMT_802_1X = 0x200
-- Update guest nodes instead of stubdom nodes
-- Note: this serves as a workaround for guest nodes not getting updated
deStubdom :: String -> IO (DomainId)
deStubdom domid = do
target <- xsRead ("/local/domain/" ++ domid ++ "/target")
case target of
Just x -> return (read x)
Nothing -> return (read domid)
-- Query first, then export
wifiXsQueryAndExport' :: DomainId -> String -> DomainId -> Rpc ()
wifiXsQueryAndExport' slaveDomid slaveObj guestDomid = do -- info "exporting wifi information to xenstore"
wifiQueryNm slaveDomid slaveObj >>= f
where
f Nothing = liftIO $ do
debug $ printf "Remove xenstore wlan nodes for domain %d" guestDomid
xsRm (activeApPath guestDomid)
xsRm (fakeInfoApPath guestDomid)
f (Just info) = do
debug $ printf "Export AP information %s " (activeApPath guestDomid)
wifiXsExportAp (activeApPath guestDomid) info
wifiXsExportFakeInfoAp guestDomid
activeApPath domid = "/local/domain/" ++ show domid ++ "/wlan/0"
fakeInfoApPath domid = "/local/domain/" ++ show domid ++ "/wlan/1"
wifiXsQueryAndExport :: DomainId -> String -> DomainId -> Rpc ()
wifiXsQueryAndExport slaveDomid slaveObj guestOrStubdomDomid = do
guestDomid <- liftIO $ deStubdom (show guestOrStubdomDomid)
wifiXsQueryAndExport' slaveDomid slaveObj guestDomid
wifiQueryNm domid slaveNw = do
apInfo <- withNetworkSlave domid (NC.comCitrixXenclientNetworkConfigGetExtraInfo slaveService slaveNw)
if (M.null apInfo)
then return Nothing
else do
let wifiAp = WifiAp {apAuth = authMode apInfo
, apSsid = B.unpack $ BC.pack $ M.findWithDefault "" eACTIVE_AP_SSID apInfo
, apEssid = "OpenXT Wireless"
, apMac = M.findWithDefault "" eACTIVE_AP_HWADDRESS apInfo
, apFreq = (read (M.findWithDefault "246200" eACTIVE_AP_FREQUENCY apInfo) :: Int) * 1000
, apMaxBitrate = read (M.findWithDefault "0" eACTIVE_AP_MAXBITRATE apInfo) :: Int
, apQuality = read (M.findWithDefault "100" eACTIVE_AP_STRENGTH apInfo) :: Int }
return $ Just wifiAp
where
authMode apInfo
| (wpa .&. wepMask) /= 0 = Wep
| (rsn .&. wepMask) /= 0 = Wep
| otherwise = NoAuth
where wpa = read (M.findWithDefault "0" eACTIVE_AP_WPAFLAGS apInfo) :: Word32
rsn = read (M.findWithDefault "0" eACTIVE_AP_RSNFLAGS apInfo) :: Word32
wepMask =
nm_802_11_AP_SEC_PAIR_WEP40 .|.
nm_802_11_AP_SEC_PAIR_WEP104 .|.
nm_802_11_AP_SEC_GROUP_WEP40 .|.
nm_802_11_AP_SEC_GROUP_WEP104
-- Write ap information to XenStore
wifiXsExportAp :: String -> WifiAp -> Rpc ()
wifiXsExportAp path ap = do
liftIO $ do
-- Write SSID in hex format
xsWrite (path ++ "/" ++ "ssid") hexSsid
-- Write network name
xsWrite (path ++ "/" ++ "essid") (apEssid ap)
-- Mac, frequency, bitrate, quality, security
xsWrite (path ++ "/" ++ "mac") (apMac ap)
xsWrite (path ++ "/" ++ "frequency") (show $ apFreq ap)
xsWrite (path ++ "/" ++ "quality") (show $ apQuality ap)
xsWrite (path ++ "/" ++ "auth") (strAuth $ apAuth ap)
where
hexSsid = concat $ map hex (apSsid ap)
hex b = printf "%02X" (fromIntegral b :: Int)
strAuth NoAuth = "none"
strAuth Wep = "wep"
wifiXsExportFakeGsmAp :: DomainId -> Rpc ()
wifiXsExportFakeGsmAp domid =
wifiXsExportAp (activeApPath domid) ap
where
activeApPath domid = "/local/domain/" ++ (show domid) ++ "/wlan/0"
ap = WifiAp { apAuth = NoAuth
, apSsid = [0x6A,0x65,0x64,0x74,0x65,0x73,0x74,0x36]
, apEssid = "OpenXT Wireless"
, apMac = "02:DE:1A:AD:BE:EF"
, apFreq = 2462000
, apMaxBitrate = 0
, apQuality = 100 }
wifiXsExportFakeInfoAp :: DomainId -> Rpc ()
wifiXsExportFakeInfoAp domid =
wifiXsExportAp (activeApPath domid) ap
where
activeApPath domid = "/local/domain/" ++ (show domid) ++ "/wlan/1"
ap = WifiAp { apAuth = NoAuth
, apSsid = [0x55, 0x73, 0x65, 0x20, 0x55, 0x49, 0x20, 0x56]
, apEssid = essid
, apMac = "02:DE:1B:AD:BE:EF"
, apFreq = 2462000
, apMaxBitrate = 0
, apQuality = 100 }
essid = "Hit Ctrl+0 to change network"
|
OpenXT/network
|
nwd/XSWifi.hs
|
gpl-2.0
| 7,123 | 0 | 20 | 1,959 | 1,527 | 835 | 692 | 140 | 2 |
module Ocram.Analysis.Types where
import Language.C.Data.Node (NodeInfo)
import Ocram.Symbols (Symbol)
import qualified Data.Graph.Inductive.Graph as G
import qualified Data.Graph.Inductive.PatriciaTree as G
import qualified Data.Map as Map
data Attribute =
Blocking
| Start
| Critical
deriving (Eq, Show)
data Label = Label {
lblName :: Symbol
, lblAttr :: [Attribute]
} deriving Show
type Node = (G.Node, Label)
type Edge = (G.Node, G.Node, NodeInfo)
type GraphData = G.Gr Label NodeInfo
type GraphIndex = Map.Map Symbol G.Node
data CallGraph = CallGraph {
grData :: GraphData
, grIndex :: GraphIndex
}
|
copton/ocram
|
ocram/src/Ocram/Analysis/Types.hs
|
gpl-2.0
| 635 | 0 | 9 | 116 | 190 | 121 | 69 | 22 | 0 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable, UndecidableInstances, FlexibleContexts, FlexibleInstances #-}
module Code.LZ.Data where
import Code.Type ( BitSize (..), bits )
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Size
import Autolib.Set
import Autolib.FiniteMap
import Data.Typeable
data Lempel_Ziv_Welch = Lempel_Ziv_Welch deriving ( Eq, Ord, Typeable )
data Lempel_Ziv_77 = Lempel_Ziv_77 deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc] [''Lempel_Ziv_Welch])
$(derives [makeReader, makeToDoc] [''Lempel_Ziv_77])
data Code_Letter a = Letter a
| Entry Int -- ^ num in dict
| Block { width :: Int, dist :: Int }
-- ^ relative position in stream
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc] [''Code_Letter])
instance Size ( Code_Letter a ) where
size _ = 1 -- not used
instance Ord a => BitSize [ Code_Letter a ] where
bitSize xs =
let alpha = mkSet $ do Letter x <- xs ; return x
weight ( Letter _ ) = 1 + bits ( cardinality alpha )
weight ( Entry i ) = 1 + bits i
weight ( Block { dist = d, width = w })
= 1 + bits d + bits w
in sum $ map weight xs
data ( ToDoc [a], Ord a, Reader [a] )
=> Book a = Book
{ short :: Set a
, long :: FiniteMap [a] Int
}
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc] [''Book])
leer :: ( ToDoc [a], Reader [a], Ord a ) => Book a
leer = Book { short = emptySet , long = emptyFM }
data ( ToDoc [a], ToDoc [b], Ord a, Reader [a], Reader [b] )
=> Cache a b = Cache
{ book :: Book a
, output :: [ b ]
}
$(derives [makeReader, makeToDoc] [''Cache])
blank :: ( ToDoc [a], ToDoc [b], Ord a, Reader [a], Reader [b] )
=> Cache a b
blank = Cache { book = leer , output = [] }
-- Local variables:
-- mode: haskell
-- End:
|
Erdwolf/autotool-bonn
|
src/Code/LZ/Data.hs
|
gpl-2.0
| 1,986 | 0 | 14 | 589 | 726 | 399 | 327 | 44 | 1 |
{-|
Module : SQLite
Description : used for database-manipulation and such stuff
Copyright : (c) Stefan Naumann, 2016
License : GPL-3
Maintainer : [email protected]
Stability : experimental
This module incluse functions for retrieving mostly games,
deleting games and managing all the stuff around it, like transferring
the SQLvalues into Game-Objects
-}
module SQLite where
import Game
import Control.Monad
import Database.HDBC
import Database.HDBC.Sqlite3
import Data.Bool
-- | executes packGame on a list of database-rows
unpackList :: [[SqlValue]] -- ^ list of SQL-rows
-> [Game] -- ^ list of resulting games
unpackList [] = []
unpackList (x:xs) = (packGame x) ++ unpackList xs
-- | creates a Game-object from a list of SQLvalues, ie a SQL-row
packGame :: [SqlValue] -- ^ sql-row
-> [Game] -- ^ the resulting game
packGame [] = []
packGame (i:x:y:z:xs) = [(Game (fromSql i) (fromSql x) (fromSql y) (fromSql z))]
-- | prints the rows of a database query to stdout
printRows :: [[SqlValue]] -- ^ SQL-rows
-> IO()
printRows [] = return ()
printRows (x:xs) = do printRow x
printRows xs
-- | prints a rows of a game-query
printRow :: [SqlValue] -- ^ a sql-row
-> IO()
printRow [] = return ()
printRow (i:x:y:z:xs) = putStrLn ( "ID: ["++ fromSql(i) ++"] "++ fromSql(x) ++ " -- " ++ fromSql(y) ++ " -- " ++ fromSql(z) )
-- | initiates the database with two tables
initDatabase :: Connection -- ^ Database-Connection
-> IO(Bool) -- ^ returns True
initDatabase conn =
do quickQuery' conn "CREATE TABLE IF NOT EXISTS game (id INT, title TEXT, developer TEXT, publisher TEXT" []
quickQuery' conn "CREATE TABLE IF NOT EXISTS game_price (gameId INT, ebayTitle TEXT, ebayURL TEXT, ebayPrice FLOAT, ebayGallery TEXT)" []
return True;
-- | calls the connect-function
connectSQLite :: String -- ^ filepath to the sqlite-file
-> IO(Connection) -- ^ returns a database-connection
connectSQLite filepath = connectSqlite3 filepath
-- connectMySQL defaultMySQLConnectInfo { mysqlHost = srv, mysqlUser = usr, mysqlPassword = pass, mysqlDatabase=dbname }
-- | closes the database-connection again
closeSQLite :: Connection -- ^ Database-connection to shutdown
-> IO()
closeSQLite conn = disconnect conn
-- | queries the list of games from the database
queryGameList :: Connection -- ^ Database-connection
-> IO([[SqlValue]]) -- ^ returns the list of resulting sql-rows
queryGameList conn = quickQuery' conn "SELECT id, title, developer, publisher FROM game ORDER BY id ASC" []
-- | queries a game from the database, by ID
queryGame :: Connection -- ^ Database-connection
-> Integer -- ^ GameID
-> IO(Game) -- ^ returns the resulting game-object
queryGame conn id = do rows <- quickQuery' conn ("SELECT id, title, developer, publisher FROM game WHERE id=" ++ show id) []
(g:gs) <- (\ (x:xs) -> return (packGame x)) rows
return g
-- | prints the stored games as "table"
printGameTable :: Connection -- ^ Database-connection
-> IO()
printGameTable conn = do rows <- queryGameList conn
printRows rows
-- | retrieves a list of games from the database and packs it as game-objects
recvGameList :: Connection -- ^ Database-connection
-> IO([Game]) -- ^ returns the list of games
recvGameList conn = do rows <- queryGameList conn
return (unpackList rows)
-- | inserts a game into the database
insertGame :: Connection -- ^ Database-connection
-> Game -- ^ Game-object to be saved into the database
-> IO(Game) -- ^ returns the game-object again
insertGame conn (Game i t d p)
= do x <- run conn ("INSERT INTO game (title, developer, publisher) VALUES ('"++t++"', '"++d++"','"++p++"');") []
commit conn
putStrLn (show x ++ " Rows modified")
return (Game i t d p)
-- | edits a game and updates the database
editGame :: Connection -- ^ Database-connection
-> Game -- ^ the game-object with the old data
-> String -- ^ new title
-> String -- ^ new developer
-> String -- ^ new publisher
-> IO(Bool) -- ^ returns True if updated, False otherwiese
editGame conn (Game id t d p) title developer publisher =
if (i <= 0)
then return (False)
else
do x <- run conn queryString []
commit conn
putStrLn (show x ++ "Rows modified")
if (x <= 0)
then return ( False )
else return ( True )
where queryString = "UPDATE game SET " ++ x ++ "WHERE id=" ++ show id
(x, i) = complQString "title" (complQString "publisher" (complQString "developer" ("", 0) d developer) p publisher) t title
-- | complete Query String only of the entry has really changed
complQString :: String -- ^ the database-attribute to be changed
-> (String, Integer) -- ^ the resulting string with the number of changed attributed
-> String -- ^ the original string
-> String -- ^ the changed string
-> (String, Integer) -- ^ returns the resulting string with the number of changed attributes
complQString mode (result, count) orig new
| orig == new = (result, count)
| count == 0 = (mode ++ " = '" ++ new ++ "'", count +1)
| otherwise = (result ++ ", " ++ mode ++ " = '" ++ new ++ "'", count +1)
-- | deletes a game (found via id) from the database
-- does not delete the price-information about the game!
deleteGame :: Connection -- ^ database-connection
-> Integer -- ^ the GameID
-> IO(Bool) -- ^ returns True if the game existed and is now removed
deleteGame conn id = do x <- run conn ("DELETE FROM game WHERE id=" ++ show id) []
commit conn
putStrLn ( show x ++ " Rows modified")
if (x <= 0) then return ( False )
else return ( True )
|
naums/GameCollection
|
haskell/SQLite.hs
|
gpl-3.0
| 6,791 | 0 | 15 | 2,348 | 1,338 | 700 | 638 | 94 | 3 |
module Chat.Cmd ( Command (..)
, cmdToMstring
, commandHelp
, parseCmd
)
where
import Text.Parsec
import Data.Functor.Identity
data Command = Quit
| Nick String
| Who
| Help
| CmdError String
| Message String
| PM String String
deriving (Show)
type Parser a = ParsecT String a Data.Functor.Identity.Identity Command
parseOnly :: String -> Command -> Parser a
parseOnly str x =
string str >> eof >> return x
parseQuit :: Parser a
parseQuit =
parseOnly "quit" Quit
parseNick :: Parser a
parseNick =
string "nick " >>
many1 letter >>= \name ->
eof >>
return (Nick name)
parseWho :: Parser a
parseWho =
parseOnly "who" Who
parseHelp :: Parser a
parseHelp =
parseOnly "help" Help
parsePM :: Parser a
parsePM =
string "pm" >>
many1 space >>
many1 letter >>= \target ->
many1 space >>
many1 anyChar >>= \msg ->
eof >>
return (PM target msg)
parseCommand :: Parser a
parseCommand =
(many.char) ' ' >>
char '/' >>
(try parseQuit <|>
try parseNick <|>
try parseWho <|>
try parseHelp <|>
try parsePM <|>
(many anyChar >> eof >> return (CmdError "Command not recognized") )
)
parseMessage :: Parser a
parseMessage = many anyChar >>= \msg ->
eof >>
return (Message msg)
cmdLine :: Parser a
cmdLine = try parseCommand <|> parseMessage
parseCmd :: String -> Command
parseCmd line =
either (\_ -> error "parse error")
id
(parse cmdLine "(unknown)" line)
cmdToMstring :: Command -> Maybe String
cmdToMstring (Message s) = Just s
cmdToMstring _ = Nothing
commandHelp :: String
commandHelp =
unlines
[ "Commands:"
, " /quit - quit the chat"
, " /help - get this help message"
, " /nick STRING - change nickname to STRING"
, " /who - check who is logged in"
, " /pm NAME MESSAGE - send privat MESSAGE to NAME"
]
|
seppeljordan/simplechat
|
Chat/Cmd.hs
|
gpl-3.0
| 2,072 | 0 | 12 | 680 | 573 | 296 | 277 | 75 | 1 |
-- Brainfuck compiler
import Parse
import System.Environment (getArgs)
import qualified System.Directory
import System.Console.Docopt (optionsWithUsageFile, getArg, isPresent, command, argument, shortOption)
import Control.Monad (when, unless)
import System.IO (hGetContents, stdin)
import FileUtilities (splitAtExt)
import System.Exit (exitSuccess, exitFailure)
import Link
import Assemble
bfcLibraryPath = "./Linux64Lib.o"
version = "0.1.0"
defaultOverhead = [ "[section .text]"
, "extern puts"
, "extern gets"
, "extern exit"
, "extern init"
, "global main"
, "main:"
, "call init"
]
oops :: String -> IO ()
oops string = putStrLn $ "[OOPS]: " ++ string
parseAssembleAndLink :: String -> String -> IO ()
parseAssembleAndLink code output = do
putStrLn "Compiling..."
writeFile asmfile nasmcode
putStrLn "Assembling..."
assemble asmfile objectfile
putStrLn "Linking..."
link [objectfile, bfcLibraryPath] output
putStrLn "Done!"
where asmfile = output ++ ".asm"
objectfile = output ++ ".o"
nasmcode = (unlines.(generateNASM defaultOverhead).parse) code
main :: IO ()
main = do
args <- optionsWithUsageFile "USAGE.txt"
print args
when (args `isPresent` (argument "<file>")) $ do
path <- args `getArg` (argument "<file>")
exists <- System.Directory.doesFileExist path
unless (exists) $ do
oops $ "The file " ++ show path ++ " does not exist in the current directory"
exitFailure
outputfile <- if args `isPresent` (shortOption 'o')
then args `getArg` (shortOption 'o')
else return $ (fst.splitAtExt) path
contents <- readFile path
parseAssembleAndLink contents outputfile
exitSuccess
contents <- hGetContents stdin
(putStr.unlines.(generateNASM defaultOverhead).parse) contents
exitSuccess
|
UndeadMastodon/HsBFC
|
Main.hs
|
gpl-3.0
| 2,047 | 2 | 15 | 577 | 524 | 271 | 253 | 53 | 2 |
-- | This module provides statistics needed for substitution matrices. It is a
-- very modest attempt to replicate some of the Blast statistics.
module Biobase.SubstMatrix.Statistics where
import Data.Vector.Unboxed (Unbox)
import Data.Vector.Fusion.Util
import Data.Vector.Fusion.Stream.Monadic as SM
import Debug.Trace
import Data.PrimitiveArray as PA
import Biobase.Primary.Letter
import Biobase.SubstMatrix.Types
-- | estimate Blast lambda.
--
-- TODO use ExceptT
estimateLambda
:: (Unbox s, Num s, Real s)
=> AASubstMat t s a b -> Double
{-# Inlinable estimateLambda #-}
estimateLambda (AASubstMat mat _) = go 1000 1 2 0 where
go count lambda high low
| count <= 0 = error "no convergence?!"
| (high-low) <= 0.001 = lambda
| s > 1 = go (count-1) ((lambda+low)/2) lambda low
| s <= 1 = go (count-1) ((lambda+high)/2) high lambda
where
-- get the rows and columns, needed to get the probs for each row/column right.
(ZZ:..r':..c') = PA.upperBound mat
r = fromRational $ toRational $ size r'
c = fromRational $ toRational $ size c'
-- sum of all scores
s = unId . SM.foldl' (+) 0 $ SM.map eachElem $ PA.assocsS mat
eachElem (Z:.i:.j, z) = (1/r) * (1/c) * exp (lambda * (fromRational $ toRational z))
|
choener/BiobaseBlast
|
Biobase/SubstMatrix/Statistics.hs
|
gpl-3.0
| 1,320 | 0 | 15 | 312 | 422 | 228 | 194 | 22 | 1 |
{-# Language CPP #-}
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings where
import ClassyPrelude.Yesod
import qualified Control.Exception as Exception
import Data.Aeson (Result (..), fromJSON, withObject, (.!=),
(.:?))
import Data.FileEmbed (embedFile)
import Data.Yaml (decodeEither')
import Database.Persist.Sqlite (SqliteConf)
import Language.Haskell.TH.Syntax (Exp, Name, Q)
import Network.Wai.Handler.Warp (HostPreference)
import Yesod.Default.Config2 (applyEnvValue, configSettingsYml)
import Yesod.Default.Util (WidgetFileSettings, widgetFileNoReload,
widgetFileReload)
-- | Runtime settings to configure this application. These settings can be
-- loaded from various sources: defaults, environment variables, config files,
-- theoretically even a database.
data AppSettings = AppSettings
{ appStaticDir :: String
-- ^ Directory from which to serve static files.
, appDatabaseConf :: SqliteConf
-- ^ Configuration settings for accessing the database.
, appRoot :: Maybe Text
-- ^ Base for all generated URLs. If @Nothing@, determined
-- from the request headers.
, appHost :: HostPreference
-- ^ Host/interface the server should bind to.
, appPort :: Int
-- ^ Port to listen on
, appIpFromHeader :: Bool
-- ^ Get the IP address from the header when logging. Useful when sitting
-- behind a reverse proxy.
, appDetailedRequestLogging :: Bool
-- ^ Use detailed request logging system
, appShouldLogAll :: Bool
-- ^ Should all log messages be displayed?
, appReloadTemplates :: Bool
-- ^ Use the reload version of templates
, appMutableStatic :: Bool
-- ^ Assume that files in the static dir may change after compilation
, appSkipCombining :: Bool
-- ^ Perform no stylesheet/script combining
-- Example app-specific configuration values.
, appCopyright :: Text
-- ^ Copyright text to appear in the footer of the page
, appAnalytics :: Maybe Text
-- ^ Google Analytics code
, appAuthDummyLogin :: Bool
-- ^ Indicate if auth dummy login should be enabled.
}
data OAuthKeys = OAuthKeys
{ oauthKeysClientId :: Text
, oauthKeysClientSecret :: Text
}
instance FromJSON AppSettings where
parseJSON = withObject "AppSettings" $ \o -> do
let defaultDev =
#if DEVELOPMENT
True
#else
False
#endif
appStaticDir <- o .: "static-dir"
appDatabaseConf <- o .: "database"
appRoot <- o .:? "approot"
appHost <- fromString <$> o .: "host"
appPort <- o .: "port"
appIpFromHeader <- o .: "ip-from-header"
appDetailedRequestLogging <- o .:? "detailed-logging" .!= defaultDev
appShouldLogAll <- o .:? "should-log-all" .!= defaultDev
appReloadTemplates <- o .:? "reload-templates" .!= defaultDev
appMutableStatic <- o .:? "mutable-static" .!= defaultDev
appSkipCombining <- o .:? "skip-combining" .!= defaultDev
appCopyright <- o .: "copyright"
appAnalytics <- o .:? "analytics"
appAuthDummyLogin <- o .:? "auth-dummy-login" .!= defaultDev
return AppSettings {..}
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
--
-- For more information on modifying behavior, see:
--
-- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
-- | How static files should be combined.
combineSettings :: CombineSettings
combineSettings = def
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if appReloadTemplates compileTimeAppSettings
then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
-- | Raw bytes at compile time of @config/settings.yml@
configSettingsYmlBS :: ByteString
configSettingsYmlBS = $(embedFile configSettingsYml)
-- | @config/settings.yml@, parsed to a @Value@.
configSettingsYmlValue :: Value
configSettingsYmlValue = either Exception.throw id
$ decodeEither' configSettingsYmlBS
-- | A version of @AppSettings@ parsed at compile time from @config/settings.yml@.
compileTimeAppSettings :: AppSettings
compileTimeAppSettings =
case fromJSON $ applyEnvValue False mempty configSettingsYmlValue of
Error e -> error e
Success settings -> settings
-- The following two functions can be used to combine multiple CSS or JS files
-- at compile time to decrease the number of http requests.
-- Sample usage (inside a Widget):
--
-- > $(combineStylesheets 'StaticR [style1_css, style2_css])
combineStylesheets :: Name -> [Route Static] -> Q Exp
combineStylesheets = combineStylesheets'
(appSkipCombining compileTimeAppSettings)
combineSettings
combineScripts :: Name -> [Route Static] -> Q Exp
combineScripts = combineScripts'
(appSkipCombining compileTimeAppSettings)
combineSettings
|
ackao/APRICoT
|
web/address-manager/Settings.hs
|
gpl-3.0
| 5,769 | 0 | 12 | 1,565 | 749 | 431 | 318 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Adapter.Tree where
import Prelude hiding (FilePath)
import Types.Base
import Types.ETree
import Types.EState
import Types.AppConfig(Config(..))
import qualified Adapter.Entry as Entry
import qualified Logic.ETree as ETree
textTreeToETree :: Tree Text -> ETree
textTreeToETree = ETree.reveal . fmap Entry.fromText
textTreeToEState :: Config -> Tree Text -> EState
textTreeToEState conf = ETree.toState conf . textTreeToETree
|
diegospd/pol
|
src/Adapter/Tree.hs
|
gpl-3.0
| 528 | 0 | 7 | 118 | 122 | 71 | 51 | 13 | 1 |
module Filter.TruthTables (makeTruthTables) where
import Text.Pandoc
import Filter.Util (splitIt, intoChunks,formatChunk, unlines', exerciseWrapper)
import Data.Map (fromList, toList, unions)
import Prelude
makeTruthTables :: Block -> Block
makeTruthTables cb@(CodeBlock (_,classes,extra) contents)
| "TruthTable" `elem` classes = Div ("",[],[]) $ map (activate classes extra) $ intoChunks contents
| otherwise = cb
makeTruthTables x = x
activate cls extra chunk
| "Simple" `elem` cls = template (opts [("tabletype","simple")])
| "Validity" `elem` cls = template (opts [("tabletype","validity")])
| "Partial" `elem` cls = template (opts [("tabletype","partial")])
| otherwise = RawBlock "html" "<div>No Matching Truth Table Type</div>"
where numof x = takeWhile (/= ' ') x
contentOf x = dropWhile (== ' ') . dropWhile (/= ' ') $ x
(h:t) = formatChunk chunk
opts adhoc = unions [fromList extra, fromList fixed, fromList adhoc]
fixed = [ ("type","truthtable")
, ("goal", contentOf h)
, ("submission", "saveAs:" ++ numof h)
]
template opts = exerciseWrapper (toList opts) (numof h) $ Div
("",[],map (\(x,y) -> ("data-carnap-" ++ x,y)) $ toList opts)
[Plain [Str (unlines' t)]]
|
gleachkr/Carnap
|
Carnap-Server/Filter/TruthTables.hs
|
gpl-3.0
| 1,378 | 0 | 15 | 368 | 516 | 278 | 238 | 25 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Route53Domains.CheckDomainAvailability
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation checks the availability of one domain name. You can access
-- this API without authenticating. Note that if the availability status of a
-- domain is pending, you must submit another request to determine the
-- availability of the domain name.
--
-- <http://docs.aws.amazon.com/Route53/latest/APIReference/api-CheckDomainAvailability.html>
module Network.AWS.Route53Domains.CheckDomainAvailability
(
-- * Request
CheckDomainAvailability
-- ** Request constructor
, checkDomainAvailability
-- ** Request lenses
, cdaDomainName
, cdaIdnLangCode
-- * Response
, CheckDomainAvailabilityResponse
-- ** Response constructor
, checkDomainAvailabilityResponse
-- ** Response lenses
, cdarAvailability
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.Route53Domains.Types
import qualified GHC.Exts
data CheckDomainAvailability = CheckDomainAvailability
{ _cdaDomainName :: Text
, _cdaIdnLangCode :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'CheckDomainAvailability' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cdaDomainName' @::@ 'Text'
--
-- * 'cdaIdnLangCode' @::@ 'Maybe' 'Text'
--
checkDomainAvailability :: Text -- ^ 'cdaDomainName'
-> CheckDomainAvailability
checkDomainAvailability p1 = CheckDomainAvailability
{ _cdaDomainName = p1
, _cdaIdnLangCode = Nothing
}
-- | The name of a domain.
--
-- Type: String
--
-- Default: None
--
-- Constraints: The domain name can contain only the letters a through z, the
-- numbers 0 through 9, and hyphen (-). Internationalized Domain Names are not
-- supported.
--
-- Required: Yes
cdaDomainName :: Lens' CheckDomainAvailability Text
cdaDomainName = lens _cdaDomainName (\s a -> s { _cdaDomainName = a })
-- | Reserved for future use.
cdaIdnLangCode :: Lens' CheckDomainAvailability (Maybe Text)
cdaIdnLangCode = lens _cdaIdnLangCode (\s a -> s { _cdaIdnLangCode = a })
newtype CheckDomainAvailabilityResponse = CheckDomainAvailabilityResponse
{ _cdarAvailability :: DomainAvailability
} deriving (Eq, Read, Show)
-- | 'CheckDomainAvailabilityResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cdarAvailability' @::@ 'DomainAvailability'
--
checkDomainAvailabilityResponse :: DomainAvailability -- ^ 'cdarAvailability'
-> CheckDomainAvailabilityResponse
checkDomainAvailabilityResponse p1 = CheckDomainAvailabilityResponse
{ _cdarAvailability = p1
}
-- | Whether the domain name is available for registering.
--
-- You can only register domains designated as 'AVAILABLE'.
--
-- Type: String
--
-- Valid values:
--
-- 'AVAILABLE' – The domain name is available. 'AVAILABLE_RESERVED' – The domain
-- name is reserved under specific conditions. 'AVAILABLE_PREORDER' – The domain
-- name is available and can be preordered. 'UNAVAILABLE' – The domain name is
-- not available. 'UNAVAILABLE_PREMIUM' – The domain name is not available. 'UNAVAILABLE_RESTRICTED' – The domain name is forbidden. 'RESERVED' – The domain name has been
-- reserved for another person or organization. 'DONT_KNOW' – The TLD registry
-- didn't reply with a definitive answer about whether the domain name is
-- available. Amazon Route 53 can return this response for a variety of reasons,
-- for example, the registry is performing maintenance. Try again later.
cdarAvailability :: Lens' CheckDomainAvailabilityResponse DomainAvailability
cdarAvailability = lens _cdarAvailability (\s a -> s { _cdarAvailability = a })
instance ToPath CheckDomainAvailability where
toPath = const "/"
instance ToQuery CheckDomainAvailability where
toQuery = const mempty
instance ToHeaders CheckDomainAvailability
instance ToJSON CheckDomainAvailability where
toJSON CheckDomainAvailability{..} = object
[ "DomainName" .= _cdaDomainName
, "IdnLangCode" .= _cdaIdnLangCode
]
instance AWSRequest CheckDomainAvailability where
type Sv CheckDomainAvailability = Route53Domains
type Rs CheckDomainAvailability = CheckDomainAvailabilityResponse
request = post "CheckDomainAvailability"
response = jsonResponse
instance FromJSON CheckDomainAvailabilityResponse where
parseJSON = withObject "CheckDomainAvailabilityResponse" $ \o -> CheckDomainAvailabilityResponse
<$> o .: "Availability"
|
romanb/amazonka
|
amazonka-route53-domains/gen/Network/AWS/Route53Domains/CheckDomainAvailability.hs
|
mpl-2.0
| 5,572 | 0 | 9 | 1,077 | 542 | 339 | 203 | 63 | 1 |
{-
This file is part of Tractor.
Tractor is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Tractor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Tractor. If not, see <http://www.gnu.org/licenses/>.
-}
{- |
Module : BackOffice.Agency
Description :
Copyright : (c) 2016 Akihiro Yamamoto
License : AGPLv3
Maintainer : https://github.com/ak1211
Stability : unstable
Portability : POSIX
-}
{-# LANGUAGE StrictData #-}
module BackOffice.Agency
where
import BackOffice.StockQuotesCrawler
import qualified Conf
-- |
-- 情報更新
updateSomeRates :: Conf.Info -> IO ()
updateSomeRates conf =
webCrawling
where
-- |
-- Webクローリング
webCrawling = runWebCrawlingPortfolios conf
-- |
-- 集計
-- aggregate = Aggregate.runAggregateOfPortfolios conf
|
ak1211/tractor
|
src/BackOffice/Agency.hs
|
agpl-3.0
| 1,311 | 0 | 7 | 292 | 59 | 37 | 22 | 8 | 1 |
module Git.Command.NameRev (run) where
run :: [String] -> IO ()
run args = return ()
|
wereHamster/yag
|
Git/Command/NameRev.hs
|
unlicense
| 85 | 0 | 7 | 15 | 42 | 23 | 19 | 3 | 1 |
{-# LANGUAGE ImplicitParams #-}
module ProgramCounting (
main,
countTag,
expandTag,
Context,
newContext,
newContextFromStrings,
defaultContext, -- ^ all operations allowed
defaultAllowedOp1,
defaultAllowedOp2,
EvalContext,
evalCtx,
eval,
expectedComplexity,
denumeralize,
buildCaches,
getCached,
getInputIdx,
getNumCachedProgs,
allFunctionsSpace,
findAllCachedMatches,
tag2expr
) where
import System.Environment (getArgs)
import Data.Array
import qualified Data.Array.Unboxed as AU
import Data.Array.IArray (amap)
import Data.List (sort, nub, findIndex)
import Control.Monad
import Text.Printf
import Data.Bits
import Data.Word
import Data.Map (Map)
import qualified Data.Map as M
import qualified Types as T
size = 43
allFunctionsSpace = [ AF n | n <- [1..size] ]
--inFoldExprCount, topLevelNoFoldCount, topLevelCount, tfoldCount :: Array Int Integer
type D = Int -- depth
data Tag = UF D | AFS D | AF D | TF D
| C0 | C1 | X | Y | Z
| Not Tag | Shl1 Tag | Shr1 Tag | Shr4 Tag | Shr16 Tag
| And Tag Tag | Or Tag Tag | Xor Tag Tag | Plus Tag Tag
| If0 Tag Tag Tag | Fold Tag Tag Tag
deriving (Show, Eq, Ord)
tag2expr :: Tag -> T.ExpC
tag2expr C0 = T.zero
tag2expr C1 = T.one
tag2expr X = T.mainArg
tag2expr Y = T.fold1Arg
tag2expr Z = T.fold2Arg
tag2expr (Not x) = T.not_ (tag2expr x)
tag2expr (Shl1 x) = T.shl1 (tag2expr x)
tag2expr (Shr4 x) = T.shr4 (tag2expr x)
tag2expr (Shr16 x) = T.shr16 (tag2expr x)
tag2expr (And a b) = T.and_ (tag2expr a) (tag2expr b)
tag2expr (Or a b) = T.and_ (tag2expr a) (tag2expr b)
tag2expr (Xor a b) = T.and_ (tag2expr a) (tag2expr b)
tag2expr (Plus a b) = T.and_ (tag2expr a) (tag2expr b)
tag2expr (If0 a b c) = T.if0 (tag2expr a) (tag2expr b) (tag2expr c)
tag2expr (Fold a b c) = T.fold_ (tag2expr a) (tag2expr b) (tag2expr c)
isLeaf :: Tag -> Bool
isLeaf tag = tag `elem` [C0,C1,X,Y,Z]
-- Use newContext to construct this!
data Context = Ctx { allowedOp1 :: [AllowedOp1]
, allowedOp2 :: [AllowedOp2]
, isIfAllowed :: !Bool
, isFoldAllowed :: !Bool
, inFoldExprExpands :: Array Int [Tag]
, topLevelNoFoldExpands :: Array Int [Tag]
, topLevelExpands :: Array Int [Tag]
, inTFoldExprExpands :: Array Int [Tag]
, inFoldExprCounts :: Array Int Integer
, topLevelNoFoldCounts :: Array Int Integer
, topLevelCounts :: Array Int Integer
, inTFoldExprCounts :: Array Int Integer
}
type AllowedOp1 = Tag -> Tag
type AllowedOp2 = Tag -> Tag -> Tag
defaultAllowedOp1 = [Not, Shl1, Shr1, Shr4, Shr16]
defaultAllowedOp2 = [And, Or, Xor, Plus]
defaultContext :: Context
defaultContext = newContext defaultAllowedOp1 defaultAllowedOp2 True True
newContext :: [AllowedOp1] -> [AllowedOp2] -> Bool -> Bool -> Context
newContext allowedOp1 allowedOp2 isIfAllowed isFoldAllowed =
let ctx = (let ?ctx = ctx in Ctx
{ allowedOp1 = allowedOp1
, allowedOp2 = allowedOp2
, isIfAllowed = isIfAllowed
, isFoldAllowed = isFoldAllowed
, inFoldExprExpands = listArray (1,size) [ expandTag (UF n) | n <- [1..size] ]
, topLevelNoFoldExpands = listArray (1,size) [ expandTag (AFS n) | n <- [1..size] ]
, topLevelExpands = listArray (1,size) [ expandTag (AF n) | n <- [1..size] ]
, inTFoldExprExpands = listArray (1,size) [ expandTag (TF n) | n <- [1..size] ]
, inFoldExprCounts = amap (\expands -> sum $ map countTag expands) (inFoldExprExpands ctx)
, topLevelNoFoldCounts = amap (\expands -> sum $ map countTag expands) (topLevelNoFoldExpands ctx)
, topLevelCounts = amap (\expands -> sum $ map countTag expands) (topLevelExpands ctx)
, inTFoldExprCounts = amap (\expands -> sum $ map countTag expands) (inTFoldExprExpands ctx)
}) in ctx
newContextFromStrings :: [String] -> Context
newContextFromStrings operations = newContext op1 op2 ifOk foldOk
where
addOp :: String -> a -> [a] -> [a]
addOp op v vs | op `elem` operations = v:vs
| otherwise = vs
op1 = addOp "not" Not $
addOp "shl1" Shl1 $
addOp "shr1" Shr1 $
addOp "shr4" Shr4 $
addOp "shr16" Shr16 $
[]
op2 = addOp "and" And $
addOp "or" Or $
addOp "xor" Xor $
addOp "plus" Plus $
[]
ifOk = "if0" `elem` operations
foldOk = "fold" `elem` operations
expandTag :: (?ctx :: Context) => Tag -> [Tag]
expandTag (UF 1) = [C0, C1, X, Y, Z]
expandTag (UF n) = concat
[ [ op1 (UF $ n-1) | op1 <- allowedOp1 ?ctx ]
, [ op2 (UF i) (UF j) | op2 <- allowedOp2 ?ctx, i <- [1..n-2], let j = n-1-i, j <= i ]
, [ If0 (UF i) (UF j) (UF $ n-1-i-j) | isIfAllowed ?ctx, i <- [1..n-3], j <- [1..n-2-i] ]
]
expandTag (AFS 1) = [C0, C1, X]
expandTag (AFS n) = concat
[ [ op1 (AFS $ n-1) | op1 <- allowedOp1 ?ctx ]
, [ op2 (AFS i) (AFS j) | op2 <- allowedOp2 ?ctx, i <- [1..n-2], let j=n-1-i, j <= i ]
, [ If0 (AFS i) (AFS j) (AFS $ n-1-i-j) | isIfAllowed ?ctx, i <- [1..n-3], j <- [1..n-2-i] ]
]
expandTag af@(AF n) | isFoldAllowed ?ctx = expandTag' af
| otherwise = expandTag (AFS n)
where
expandTag' :: (?ctx :: Context) => Tag -> [Tag]
expandTag' (AF 1) = []
expandTag' (AF 2) = []
expandTag' (AF 3) = []
expandTag' (AF 4) = []
expandTag' (AF n) = concat
[ [ op1 (AF $ n-1) | op1 <- allowedOp1 ?ctx ]
, [ op2 (AF i) (AFS j) | op2 <- allowedOp2 ?ctx, i <- [1..n-2], let j=n-1-i, i >= 5 ]
, [ If0 (AF i) (AFS j) (AFS k) | isIfAllowed ?ctx, i <- [1..n-3], j <- [1..n-2-i], let k=n-1-i-j, i >= 5 ]
, [ If0 (AFS i) (AF j) (AFS k) | isIfAllowed ?ctx, i <- [1..n-3], j <- [1..n-2-i], let k=n-1-i-j, j >= 5 ]
, [ If0 (AFS i) (AFS j) (AF k) | isIfAllowed ?ctx, i <- [1..n-3], j <- [1..n-2-i], let k=n-1-i-j, k >= 5 ]
, [ Fold (AFS i) (AFS j) (UF k) | i <- [1..n-4], j <- [1..n-3-i], let k=n-2-i-j ]
]
expandTag (TF 1) = [C0, C1, Y, Z] -- can't use X!
expandTag (TF n) = concat
[ [ op1 (TF $ n-1) | op1 <- allowedOp1 ?ctx ]
, [ op2 (TF i) (TF j) | op2 <- allowedOp2 ?ctx, i <- [1..n-2], let j=n-1-i, j <= i ]
, [ If0 (TF i) (TF j) (TF $ n-1-i-j) | isIfAllowed ?ctx, i <- [1..n-3], j <- [1..n-2-i] ]
]
expandTag C0 = [C0]
expandTag C1 = [C1]
expandTag X = [X]
expandTag Y = [Y]
expandTag Z = [Z]
expandTag (Not tag) = map Not (expandTag tag)
expandTag (Shl1 tag) = map Shl1 (expandTag tag)
expandTag (Shr1 tag) = map Shr1 (expandTag tag)
expandTag (Shr4 tag) = map Shr4 (expandTag tag)
expandTag (Shr16 tag) = map Shr16 (expandTag tag)
expandTag (And tag1 tag2) = [ And a b | a <- expandTag tag1, b <- expandTag tag2 ]
expandTag (Or tag1 tag2) = [ Or a b | a <- expandTag tag1, b <- expandTag tag2 ]
expandTag (Xor tag1 tag2) = [ Xor a b | a <- expandTag tag1, b <- expandTag tag2 ]
expandTag (Plus tag1 tag2) = [ Plus a b | a <- expandTag tag1, b <- expandTag tag2 ]
expandTag (If0 tag1 tag2 tag3) = [ If0 a b c | a <- expandTag tag1, b <- expandTag tag2, c <- expandTag tag3 ]
expandTag (Fold tag1 tag2 tag3) = [ Fold a b c | a <- expandTag tag1, b <- expandTag tag2, c <- expandTag tag3 ]
countTag :: (?ctx :: Context) => Tag -> Integer
countTag C0 = 1
countTag C1 = 1
countTag X = 1
countTag Y = 1
countTag Z = 1
countTag (Not tag) = countTag tag
countTag (Shl1 tag) = countTag tag
countTag (Shr1 tag) = countTag tag
countTag (Shr4 tag) = countTag tag
countTag (Shr16 tag) = countTag tag
countTag (And tag1 tag2) = countTag tag1 * countTag tag2
countTag (Or tag1 tag2) = countTag tag1 * countTag tag2
countTag (Xor tag1 tag2) = countTag tag1 * countTag tag2
countTag (Plus tag1 tag2) = countTag tag1 * countTag tag2
countTag (If0 tag1 tag2 tag3) = countTag tag1 * countTag tag2 * countTag tag3
countTag (Fold tag1 tag2 tag3) = countTag tag1 * countTag tag2 * countTag tag3
countTag (UF n) = (inFoldExprCounts ?ctx) ! n
countTag (AFS n) = (topLevelNoFoldCounts ?ctx) ! n
countTag (AF n) = (topLevelCounts ?ctx) ! n
countTag (TF n) = (inTFoldExprCounts ?ctx) ! n
data EvalContext = ECtx { x :: !Word64, y :: !Word64, z :: !Word64 }
eval :: (?ectx :: EvalContext) => Tag -> Word64
eval C0 = 0
eval C1 = 1
eval X = x ?ectx
eval Y = y ?ectx
eval Z = z ?ectx
eval (Not tag) = complement (eval tag)
eval (Shl1 tag) = shiftL (eval tag) 1
eval (Shr1 tag) = shiftR (eval tag) 1
eval (Shr4 tag) = shiftR (eval tag) 4
eval (Shr16 tag) = shiftR (eval tag) 16
eval (And tag1 tag2) = (eval tag1) .&. (eval tag2)
eval (Or tag1 tag2) = (eval tag1) .|. (eval tag2)
eval (Xor tag1 tag2) = (eval tag1) `xor` (eval tag2)
eval (Plus tag1 tag2) = (eval tag1) + (eval tag2)
eval (If0 tag1 tag2 tag3) = if eval tag1 == 0 then eval tag2 else eval tag3
eval (Fold tag1 tag2 tag3) = foldr foldOp seed [b1, b2, b3, b4, b5, b6, b7, b8]
where
foldOp :: (?ectx :: EvalContext) => Word64 -> Word64 -> Word64
foldOp y z = let ?ectx = ?ectx { y = y, z = z } in eval (tag3)
val = eval tag1
seed = eval tag2
b8 = val .&. 0xff
b7 = (val `shiftR` 8) .&. 0xff
b6 = (val `shiftR` 16) .&. 0xff
b5 = (val `shiftR` 24) .&. 0xff
b4 = (val `shiftR` 32) .&. 0xff
b3 = (val `shiftR` 40) .&. 0xff
b2 = (val `shiftR` 48) .&. 0xff
b1 = (val `shiftR` 56) .&. 0xff
evalCtx :: Word64 -> Word64 -> Word64 -> EvalContext
evalCtx x y z = ECtx { x = x, y = y, z = z }
-- | Give the expected complexity score (how many possible functions exist, given the restrictions).
-- Example:
-- > expectedComplexity (words "not shl1 shr1 shr4 shr16 and or xor plus if0 fold tfold") 16
expectedComplexity :: [String] -> Int -> Integer
expectedComplexity operations size_ =
if size_ > size then error "increase size in ProgramCounting.hs"
else let ?ctx = newContextFromStrings operations in
sum (map countTag tags)
where
isTFold = "tfold" `elem` operations
tags | isTFold = [ TF i | i <- [1..size_-4] ]
| otherwise = [ AF i | i <- [1..size_] ]
isBasicExpr :: Tag -> Bool
isBasicExpr UF{} = False
isBasicExpr AFS{} = False
isBasicExpr AF{} = False
isBasicExpr TF{} = False
isBasicExpr C0 = True
isBasicExpr C1 = True
isBasicExpr X = True
isBasicExpr Y = True
isBasicExpr Z = True
isBasicExpr (Not tag) = isBasicExpr tag
isBasicExpr (Shl1 tag) = isBasicExpr tag
isBasicExpr (Shr1 tag) = isBasicExpr tag
isBasicExpr (Shr4 tag) = isBasicExpr tag
isBasicExpr (Shr16 tag) = isBasicExpr tag
-- tag2 usually has smaller size
isBasicExpr (And tag1 tag2) = isBasicExpr tag2 && isBasicExpr tag1
isBasicExpr (Or tag1 tag2) = isBasicExpr tag2 && isBasicExpr tag1
isBasicExpr (Xor tag1 tag2) = isBasicExpr tag2 && isBasicExpr tag1
isBasicExpr (Plus tag1 tag2) = isBasicExpr tag2 && isBasicExpr tag1
isBasicExpr (If0 tag1 tag2 tag3) = isBasicExpr tag1 && isBasicExpr tag2 && isBasicExpr tag3
isBasicExpr (Fold tag1 tag2 tag3) = isBasicExpr tag1 && isBasicExpr tag2 && isBasicExpr tag3
isConst :: Tag -> Bool
isConst UF{} = False
isConst AFS{} = False
isConst AF{} = False
isConst TF{} = False
isConst C0 = True
isConst C1 = True
isConst (Not tag) = isConst tag
isConst (Shl1 tag) = isConst tag
isConst (Shr1 tag) = isConst tag
isConst (Shr4 tag) = isConst tag
isConst (Shr16 tag) = isConst tag
-- tag2 usually has smaller size
isConst (And tag1 tag2) = isConst tag2 && isConst tag1 || tag1 == C0 || tag2 == C0
isConst (Or tag1 tag2) = isConst tag2 && isConst tag1
isConst (Xor tag1 tag2) = isConst tag2 && isConst tag1
isConst (Plus tag1 tag2) = isConst tag2 && isConst tag1
isConst (If0 tag1 tag2 tag3) = isConst tag1 && isConst tag2 && isConst tag3
isConst (Fold tag1 tag2 tag3) = isConst tag1 && isConst tag2 && isConst tag3
isConst _ = False
denumeralize :: (?ctx :: Context) => Integer -> [Tag] -> Tag
denumeralize x ts | length ts == 1 && isBasicExpr firstElem = firstElem
| otherwise = denumeralize (x-base) (expandTag xType)
where
firstElem = head ts
counts = map countTag ts
accCounts = scanl (+) 0 counts
precedingCounts = takeWhile (<=x) accCounts
xType = ts !! (length precedingCounts - 1)
base = last precedingCounts
data InterleavedCache = ICache !(AU.UArray Int Word64) !Int !Int [Word64] deriving Show
getInputIdx :: InterleavedCache -> Word64 -> Maybe Int
getInputIdx (ICache _ _ _ inputs) v = findIndex (==v) inputs
getNumCachedProgs :: InterleavedCache -> Int
getNumCachedProgs (ICache _ n _ _) = n
getCached :: InterleavedCache -> Int -> Int -> Word64
getCached (ICache vs cSize nInputs inputs) progId inputIdx = vs AU.! (nInputs*progId + inputIdx)
findAllCachedMatches :: InterleavedCache -> Int -> Word64 -> [Int]
findAllCachedMatches (ICache vs cSize nInputs inputs) outputIdx output =
[ progId | progId <- [0..cSize-1], let v = vs AU.! (progId*nInputs + outputIdx), v == output ]
buildCaches :: (?ctx :: Context) => [Word64] -> Int -> InterleavedCache
buildCaches inputs cacheSize = ICache (AU.listArray (0,iCacheSize-1) computedValues) cacheSize nInputs inputs
where
nInputs = length inputs
iCacheSize = (cacheSize*nInputs)
computedValues = [ evalProg prog x
| n <- [0..cacheSize-1]
, let prog = denumeralize (fromIntegral n) allFunctionsSpace
, x <- inputs
]
evalProg program x = eval program
where
?ectx = evalCtx x 0 0
main = do
[sizeStr] <- getArgs
let ts = [AF n | n <- [1..42]]
size = read sizeStr
maxN = sum $ map countTag [AF n | n <- [1..size]]
inputV = [0x1122334455667788, 0xFEDCBA9876543210, 1]
ICache cache _ _ _ = buildCaches inputV (fromIntegral maxN)
print maxN
print $ map (\i -> cache AU.! (fromIntegral $ (i+1)*maxN - 1)) [0,1,2]
where
?ctx = newContext defaultAllowedOp1 defaultAllowedOp2 True False
main' = do
putStrLn "size,inFoldExprCount,topLevelNoFoldCount,topLevelCount,tfoldCount"
forM_ [1..size] $ \s ->
printf "%d,%d,%d,%d,%d\n" s (countTag (UF s)) (countTag (AFS s)) (countTag (AF s)) (countTag (TF s))
where
?ctx = defaultContext
|
atemerev/icfpc2013
|
src/solver-lib/ProgramCounting.hs
|
apache-2.0
| 14,273 | 0 | 18 | 3,527 | 6,351 | 3,284 | 3,067 | 321 | 5 |
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings where
import ClassyPrelude.Yesod
import Control.Exception (throw)
import Data.Aeson (Result (..), fromJSON, withObject, (.!=),
(.:?))
import Data.FileEmbed (embedFile)
import Data.Yaml (decodeEither')
import Database.Persist.Sqlite (SqliteConf)
import Language.Haskell.TH.Syntax (Exp, Name, Q)
import Network.Wai.Handler.Warp (HostPreference)
import Yesod.Default.Config2 (applyEnvValue, configSettingsYml)
import Yesod.Default.Util (WidgetFileSettings, widgetFileNoReload,
widgetFileReload)
-- | Runtime settings to configure this application. These settings can be
-- loaded from various sources: defaults, environment variables, config files,
-- theoretically even a database.
data AppSettings = AppSettings
{ appStaticDir :: String
-- ^ Directory from which to serve static files.
, appDatabaseConf :: SqliteConf
-- ^ Configuration settings for accessing the database.
, appRoot :: Text
-- ^ Base for all generated URLs.
, appHost :: HostPreference
-- ^ Host/interface the server should bind to.
, appPort :: Int
-- ^ Port to listen on
, appIpFromHeader :: Bool
-- ^ Get the IP address from the header when logging. Useful when sitting
-- behind a reverse proxy.
, appDetailedRequestLogging :: Bool
-- ^ Use detailed request logging system
, appShouldLogAll :: Bool
-- ^ Should all log messages be displayed?
, appReloadTemplates :: Bool
-- ^ Use the reload version of templates
, appMutableStatic :: Bool
-- ^ Assume that files in the static dir may change after compilation
, appSkipCombining :: Bool
-- ^ Perform no stylesheet/script combining
-- Example app-specific configuration values.
, appCopyright :: Text
-- ^ Copyright text to appear in the footer of the page
, appAnalytics :: Maybe Text
-- ^ Google Analytics code
, staticRoot :: Text
-- ^ Base for generated URLs for static content
, authentication :: Text
-- ^ The authentication method to use. Can only be 'dummy'.
}
instance FromJSON AppSettings where
parseJSON = withObject "AppSettings" $ \o -> do
let defaultDev =
#if DEVELOPMENT
True
#else
False
#endif
appStaticDir <- o .: "static-dir"
appDatabaseConf <- o .: "database"
appRoot <- o .: "approot"
appHost <- fromString <$> o .: "host"
appPort <- o .: "port"
appIpFromHeader <- o .: "ip-from-header"
appDetailedRequestLogging <- o .:? "detailed-logging" .!= defaultDev
appShouldLogAll <- o .:? "should-log-all" .!= defaultDev
appReloadTemplates <- o .:? "reload-templates" .!= defaultDev
appMutableStatic <- o .:? "mutable-static" .!= defaultDev
appSkipCombining <- o .:? "skip-combining" .!= defaultDev
appCopyright <- o .: "copyright"
appAnalytics <- o .:? "analytics"
staticRoot <- o .: "static-root"
authentication <- o .: "authentication"
return AppSettings {..}
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
--
-- For more information on modifying behavior, see:
--
-- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
-- | How static files should be combined.
combineSettings :: CombineSettings
combineSettings = def
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if appReloadTemplates compileTimeAppSettings
then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
-- | Raw bytes at compile time of @config/settings.yml@
configSettingsYmlBS :: ByteString
configSettingsYmlBS = $(embedFile configSettingsYml)
-- | @config/settings.yml@, parsed to a @Value@.
configSettingsYmlValue :: Value
configSettingsYmlValue = either throw id $ decodeEither' configSettingsYmlBS
-- | A version of @AppSettings@ parsed at compile time from @config/settings.yml@.
compileTimeAppSettings :: AppSettings
compileTimeAppSettings =
case fromJSON $ applyEnvValue False mempty configSettingsYmlValue of
Error e -> error e
Success settings -> settings
-- The following two functions can be used to combine multiple CSS or JS files
-- at compile time to decrease the number of http requests.
-- Sample usage (inside a Widget):
--
-- > $(combineStylesheets 'StaticR [style1_css, style2_css])
combineStylesheets :: Name -> [Route Static] -> Q Exp
combineStylesheets = combineStylesheets'
(appSkipCombining compileTimeAppSettings)
combineSettings
combineScripts :: Name -> [Route Static] -> Q Exp
combineScripts = combineScripts'
(appSkipCombining compileTimeAppSettings)
combineSettings
|
jml/haverer-api
|
Settings.hs
|
apache-2.0
| 5,665 | 0 | 12 | 1,561 | 737 | 422 | 315 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Controller
( withAutofocus
) where
import Autofocus
import Settings
import Yesod.Helpers.Static
import Yesod.Helpers.Auth
import Database.Persist.GenericSql
-- Import all relevant handler modules here.
import Handler.Handlers
-- This line actually creates our YesodSite instance. It is the second half
-- of the call to mkYesodData which occurs in Autofocus.hs. Please see
-- the comments there for more details.
mkYesodDispatch "Autofocus" resourcesAutofocus
-- Some default handlers that ship with the Yesod site template. You will
-- very rarely need to modify this.
getFaviconR :: Handler ()
getFaviconR = sendFile "image/x-icon" "favicon.ico"
getRobotsR :: Handler RepPlain
getRobotsR = return $ RepPlain $ toContent "User-agent: *"
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
withAutofocus :: (Application -> IO a) -> IO a
withAutofocus f = Settings.withConnectionPool $ \p -> do
runConnectionPool (runMigration migrateAll) p
let h = Autofocus s p
toWaiApp h >>= f
where
s = fileLookupDir Settings.staticdir typeByExt
|
shepheb/autofocus
|
Controller.hs
|
bsd-2-clause
| 1,339 | 0 | 12 | 218 | 195 | 106 | 89 | 21 | 1 |
module Handler.Root where
import Import
import WebToInk.Converter.Logger (logi)
-- This is a handler function for the GET request method on the RootR
-- resource pattern. All of your resource patterns are defined in
-- config/routes
--
-- The majority of the code you will write in Yesod lives in these handler
-- functions. You can spread them across multiple files if you are so
-- inclined, or create a single monolithic file.
getRootR :: Handler RepHtml
getRootR = defaultLayout $ do
liftIO $ logd "GET Root"
aDomId <- lift newIdent
setTitle "WebToInk homepage"
$(widgetFile "homepage")
addStylesheet $ StaticR css_homepage_css
addScript $ StaticR js_spin_min_js
|
thlorenz/WebToInk
|
webtoink/Handler/Root.hs
|
bsd-2-clause
| 693 | 0 | 10 | 129 | 100 | 51 | 49 | 11 | 1 |
{-| Module describing an instance.
The instance data type holds very few fields, the algorithm
intelligence is in the "Node" and "Cluster" modules.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Instance
( Instance(..)
, Disk(..)
, AssocList
, List
, create
, isRunning
, isOffline
, notOffline
, instanceDown
, usesSecMem
, applyIfOnline
, setIdx
, setName
, setAlias
, setPri
, setSec
, setBoth
, setMovable
, specOf
, getTotalSpindles
, instBelowISpec
, instAboveISpec
, instMatchesPolicy
, shrinkByType
, localStorageTemplates
, hasSecondary
, requiredNodes
, allNodes
, usesLocalStorage
, mirrorType
, usesMemory
) where
import Control.Monad (liftM2)
import Ganeti.BasicTypes
import qualified Ganeti.HTools.Types as T
import qualified Ganeti.HTools.Container as Container
import Ganeti.HTools.Nic (Nic)
import Ganeti.Utils
-- * Type declarations
data Disk = Disk
{ dskSize :: Int -- ^ Size in bytes
, dskSpindles :: Maybe Int -- ^ Number of spindles
} deriving (Show, Eq)
-- | The instance type.
data Instance = Instance
{ name :: String -- ^ The instance name
, alias :: String -- ^ The shortened name
, mem :: Int -- ^ Memory of the instance
, dsk :: Int -- ^ Total disk usage of the instance
, disks :: [Disk] -- ^ Sizes of the individual disks
, vcpus :: Int -- ^ Number of VCPUs
, runSt :: T.InstanceStatus -- ^ Original run status
, pNode :: T.Ndx -- ^ Original primary node
, sNode :: T.Ndx -- ^ Original secondary node
, idx :: T.Idx -- ^ Internal index
, util :: T.DynUtil -- ^ Dynamic resource usage
, movable :: Bool -- ^ Can and should the instance be moved?
, autoBalance :: Bool -- ^ Is the instance auto-balanced?
, diskTemplate :: T.DiskTemplate -- ^ The disk template of the instance
, spindleUse :: Int -- ^ The numbers of used spindles
, allTags :: [String] -- ^ List of all instance tags
, exclTags :: [String] -- ^ List of instance exclusion tags
, arPolicy :: T.AutoRepairPolicy -- ^ Instance's auto-repair policy
, nics :: [Nic] -- ^ NICs of the instance
, forthcoming :: Bool -- ^ Is the instance is forthcoming?
} deriving (Show, Eq)
instance T.Element Instance where
nameOf = name
idxOf = idx
setAlias = setAlias
setIdx = setIdx
allNames n = [name n, alias n]
-- | Check if instance is running.
isRunning :: Instance -> Bool
isRunning (Instance {runSt = T.Running}) = True
isRunning (Instance {runSt = T.ErrorUp}) = True
isRunning _ = False
-- | Check if instance is offline.
isOffline :: Instance -> Bool
isOffline (Instance {runSt = T.StatusOffline}) = True
isOffline _ = False
-- | Helper to check if the instance is not offline.
notOffline :: Instance -> Bool
notOffline = not . isOffline
-- | Check if instance is down.
instanceDown :: Instance -> Bool
instanceDown inst | isRunning inst = False
instanceDown inst | isOffline inst = False
instanceDown _ = True
-- | Apply the function if the instance is online. Otherwise use
-- the initial value
applyIfOnline :: Instance -> (a -> a) -> a -> a
applyIfOnline = applyIf . notOffline
-- | Helper for determining whether an instance's memory needs to be
-- taken into account for secondary memory reservation.
usesSecMem :: Instance -> Bool
usesSecMem inst = notOffline inst && autoBalance inst
-- | Constant holding the local storage templates.
--
-- /Note:/ Currently Ganeti only exports node total/free disk space
-- for LVM-based storage; file-based storage is ignored in this model,
-- so even though file-based storage uses in reality disk space on the
-- node, in our model it won't affect it and we can't compute whether
-- there is enough disk space for a file-based instance. Therefore we
-- will treat this template as \'foreign\' storage.
localStorageTemplates :: [T.DiskTemplate]
localStorageTemplates = [ T.DTDrbd8, T.DTPlain ]
-- | Constant holding the movable disk templates.
--
-- This only determines the initial 'movable' state of the
-- instance. Further the movable state can be restricted more due to
-- user choices, etc.
movableDiskTemplates :: [T.DiskTemplate]
movableDiskTemplates =
[ T.DTDrbd8
, T.DTBlock
, T.DTSharedFile
, T.DTGluster
, T.DTRbd
, T.DTExt
]
-- | A simple name for the int, instance association list.
type AssocList = [(T.Idx, Instance)]
-- | A simple name for an instance map.
type List = Container.Container Instance
-- * Initialization
-- | Create an instance.
--
-- Some parameters are not initialized by function, and must be set
-- later (via 'setIdx' for example).
create :: String -> Int -> Int -> [Disk] -> Int -> T.InstanceStatus
-> [String] -> Bool -> T.Ndx -> T.Ndx -> T.DiskTemplate -> Int
-> [Nic] -> Bool -> Instance
create name_init mem_init dsk_init disks_init vcpus_init run_init tags_init
auto_balance_init pn sn dt su nics_init forthcoming_init =
Instance { name = name_init
, alias = name_init
, mem = mem_init
, dsk = dsk_init
, disks = disks_init
, vcpus = vcpus_init
, runSt = run_init
, pNode = pn
, sNode = sn
, idx = -1
, util = T.baseUtil
, movable = supportsMoves dt
, autoBalance = auto_balance_init
, diskTemplate = dt
, spindleUse = su
, allTags = tags_init
, exclTags = []
, arPolicy = T.ArNotEnabled
, nics = nics_init
, forthcoming = forthcoming_init
}
-- | Changes the index.
--
-- This is used only during the building of the data structures.
setIdx :: Instance -- ^ The original instance
-> T.Idx -- ^ New index
-> Instance -- ^ The modified instance
setIdx t i = t { idx = i }
-- | Changes the name.
--
-- This is used only during the building of the data structures.
setName :: Instance -- ^ The original instance
-> String -- ^ New name
-> Instance -- ^ The modified instance
setName t s = t { name = s, alias = s }
-- | Changes the alias.
--
-- This is used only during the building of the data structures.
setAlias :: Instance -- ^ The original instance
-> String -- ^ New alias
-> Instance -- ^ The modified instance
setAlias t s = t { alias = s }
-- * Update functions
-- | Changes the primary node of the instance.
setPri :: Instance -- ^ the original instance
-> T.Ndx -- ^ the new primary node
-> Instance -- ^ the modified instance
setPri t p = t { pNode = p }
-- | Changes the secondary node of the instance.
setSec :: Instance -- ^ the original instance
-> T.Ndx -- ^ the new secondary node
-> Instance -- ^ the modified instance
setSec t s = t { sNode = s }
-- | Changes both nodes of the instance.
setBoth :: Instance -- ^ the original instance
-> T.Ndx -- ^ new primary node index
-> T.Ndx -- ^ new secondary node index
-> Instance -- ^ the modified instance
setBoth t p s = t { pNode = p, sNode = s }
-- | Sets the movable flag on an instance.
setMovable :: Instance -- ^ The original instance
-> Bool -- ^ New movable flag
-> Instance -- ^ The modified instance
setMovable t m = t { movable = m }
-- | Try to shrink the instance based on the reason why we can't
-- allocate it.
shrinkByType :: Instance -> T.FailMode -> Result Instance
shrinkByType inst T.FailMem = let v = mem inst - T.unitMem
in if v < T.unitMem
then Bad "out of memory"
else Ok inst { mem = v }
shrinkByType inst T.FailDisk =
let newdisks = [d {dskSize = dskSize d - T.unitDsk}| d <- disks inst]
v = dsk inst - (length . disks $ inst) * T.unitDsk
in if any (< T.unitDsk) $ map dskSize newdisks
then Bad "out of disk"
else Ok inst { dsk = v, disks = newdisks }
shrinkByType inst T.FailCPU = let v = vcpus inst - T.unitCpu
in if v < T.unitCpu
then Bad "out of vcpus"
else Ok inst { vcpus = v }
shrinkByType inst T.FailSpindles =
case disks inst of
[Disk ds sp] -> case sp of
Nothing -> Bad "No spindles, shouldn't have happened"
Just sp' -> let v = sp' - T.unitSpindle
in if v < T.unitSpindle
then Bad "out of spindles"
else Ok inst { disks = [Disk ds (Just v)] }
d -> Bad $ "Expected one disk, but found " ++ show d
shrinkByType _ f = Bad $ "Unhandled failure mode " ++ show f
-- | Get the number of disk spindles
getTotalSpindles :: Instance -> Maybe Int
getTotalSpindles inst =
foldr (liftM2 (+) . dskSpindles ) (Just 0) (disks inst)
-- | Return the spec of an instance.
specOf :: Instance -> T.RSpec
specOf Instance { mem = m, dsk = d, vcpus = c, disks = dl } =
let sp = case dl of
[Disk _ (Just sp')] -> sp'
_ -> 0
in T.RSpec { T.rspecCpu = c, T.rspecMem = m,
T.rspecDsk = d, T.rspecSpn = sp }
-- | Checks if an instance is smaller/bigger than a given spec. Returns
-- OpGood for a correct spec, otherwise Bad one of the possible
-- failure modes.
instCompareISpec :: Ordering -> Instance-> T.ISpec -> Bool -> T.OpResult ()
instCompareISpec which inst ispec exclstor
| which == mem inst `compare` T.iSpecMemorySize ispec = Bad T.FailMem
| which `elem` map ((`compare` T.iSpecDiskSize ispec) . dskSize)
(disks inst) = Bad T.FailDisk
| which == vcpus inst `compare` T.iSpecCpuCount ispec = Bad T.FailCPU
| exclstor &&
case getTotalSpindles inst of
Nothing -> True
Just sp_sum -> which == sp_sum `compare` T.iSpecSpindleUse ispec
= Bad T.FailSpindles
| not exclstor && which == spindleUse inst `compare` T.iSpecSpindleUse ispec
= Bad T.FailSpindles
| diskTemplate inst /= T.DTDiskless &&
which == length (disks inst) `compare` T.iSpecDiskCount ispec
= Bad T.FailDiskCount
| otherwise = Ok ()
-- | Checks if an instance is smaller than a given spec.
instBelowISpec :: Instance -> T.ISpec -> Bool -> T.OpResult ()
instBelowISpec = instCompareISpec GT
-- | Checks if an instance is bigger than a given spec.
instAboveISpec :: Instance -> T.ISpec -> Bool -> T.OpResult ()
instAboveISpec = instCompareISpec LT
-- | Checks if an instance matches a min/max specs pair
instMatchesMinMaxSpecs :: Instance -> T.MinMaxISpecs -> Bool -> T.OpResult ()
instMatchesMinMaxSpecs inst minmax exclstor = do
instAboveISpec inst (T.minMaxISpecsMinSpec minmax) exclstor
instBelowISpec inst (T.minMaxISpecsMaxSpec minmax) exclstor
-- | Checks if an instance matches any specs of a policy
instMatchesSpecs :: Instance -> [T.MinMaxISpecs] -> Bool -> T.OpResult ()
-- Return Ok for no constraints, though this should never happen
instMatchesSpecs _ [] _ = Ok ()
instMatchesSpecs inst minmaxes exclstor =
-- The initial "Bad" should be always replaced by a real result
foldr eithermatch (Bad T.FailInternal) minmaxes
where eithermatch mm (Bad _) = instMatchesMinMaxSpecs inst mm exclstor
eithermatch _ y@(Ok ()) = y
-- | Checks if an instance matches a policy.
instMatchesPolicy :: Instance -> T.IPolicy -> Bool -> T.OpResult ()
instMatchesPolicy inst ipol exclstor = do
instMatchesSpecs inst (T.iPolicyMinMaxISpecs ipol) exclstor
if diskTemplate inst `elem` T.iPolicyDiskTemplates ipol
then Ok ()
else Bad T.FailDisk
-- | Checks whether the instance uses a secondary node.
--
-- /Note:/ This should be reconciled with @'sNode' ==
-- 'Node.noSecondary'@.
hasSecondary :: Instance -> Bool
hasSecondary = (== T.DTDrbd8) . diskTemplate
-- | Computed the number of nodes for a given disk template.
requiredNodes :: T.DiskTemplate -> Int
requiredNodes T.DTDrbd8 = 2
requiredNodes _ = 1
-- | Computes all nodes of an instance.
allNodes :: Instance -> [T.Ndx]
allNodes inst = case diskTemplate inst of
T.DTDrbd8 -> [pNode inst, sNode inst]
_ -> [pNode inst]
-- | Checks whether a given disk template uses local storage.
usesLocalStorage :: Instance -> Bool
usesLocalStorage = (`elem` localStorageTemplates) . diskTemplate
-- | Checks whether a given disk template supported moves.
supportsMoves :: T.DiskTemplate -> Bool
supportsMoves = (`elem` movableDiskTemplates)
-- | A simple wrapper over 'T.templateMirrorType'.
mirrorType :: Instance -> T.MirrorType
mirrorType = T.templateMirrorType . diskTemplate
-- | Whether the instance uses memory on its host node.
-- Depends on the `InstanceStatus` and on whether the instance is forthcoming;
-- instances that aren't running or existent don't use memory.
usesMemory :: Instance -> Bool
usesMemory inst
| forthcoming inst = False
| otherwise = case runSt inst of
T.StatusDown -> False
T.StatusOffline -> False
T.ErrorDown -> False
T.ErrorUp -> True
T.NodeDown -> True -- value has little meaning when node is down
T.NodeOffline -> True -- value has little meaning when node is offline
T.Running -> True
T.UserDown -> False
T.WrongNode -> True
|
apyrgio/ganeti
|
src/Ganeti/HTools/Instance.hs
|
bsd-2-clause
| 14,818 | 0 | 20 | 3,816 | 2,901 | 1,618 | 1,283 | 253 | 9 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcMatches: Typecheck some @Matches@
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
module TcMatches ( tcMatchesFun, tcGRHS, tcGRHSsPat, tcMatchesCase, tcMatchLambda,
TcMatchCtxt(..), TcStmtChecker, TcExprStmtChecker, TcCmdStmtChecker,
tcStmts, tcStmtsAndThen, tcDoStmts, tcBody,
tcDoStmt, tcGuardStmt
) where
import {-# SOURCE #-} TcExpr( tcSyntaxOp, tcInferSigmaNC, tcInferSigma
, tcCheckId, tcMonoExpr, tcMonoExprNC, tcPolyExpr )
import HsSyn
import TcRnMonad
import TcEnv
import TcPat
import TcMType
import TcType
import TcBinds
import TcUnify
import Name
import TysWiredIn
import Id
import TyCon
import TysPrim
import TcEvidence
import Outputable
import Util
import SrcLoc
import DynFlags
import PrelNames (monadFailClassName)
import qualified GHC.LanguageExtensions as LangExt
-- Create chunkified tuple tybes for monad comprehensions
import MkCore
import Control.Monad
import Control.Arrow ( second )
#include "HsVersions.h"
{-
************************************************************************
* *
\subsection{tcMatchesFun, tcMatchesCase}
* *
************************************************************************
@tcMatchesFun@ typechecks a @[Match]@ list which occurs in a
@FunMonoBind@. The second argument is the name of the function, which
is used in error messages. It checks that all the equations have the
same number of arguments before using @tcMatches@ to do the work.
Note [Polymorphic expected type for tcMatchesFun]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcMatchesFun may be given a *sigma* (polymorphic) type
so it must be prepared to use tcSkolemise to skolemise it.
See Note [sig_tau may be polymorphic] in TcPat.
-}
tcMatchesFun :: Located Name
-> MatchGroup Name (LHsExpr Name)
-> ExpRhoType -- Expected type of function
-> TcM (HsWrapper, MatchGroup TcId (LHsExpr TcId))
-- Returns type of body
tcMatchesFun fn@(L _ fun_name) matches exp_ty
= do { -- Check that they all have the same no of arguments
-- Location is in the monad, set the caller so that
-- any inter-equation error messages get some vaguely
-- sensible location. Note: we have to do this odd
-- ann-grabbing, because we don't always have annotations in
-- hand when we call tcMatchesFun...
traceTc "tcMatchesFun" (ppr fun_name $$ ppr exp_ty)
; checkArgs fun_name matches
; (wrap_gen, (wrap_fun, group))
<- tcSkolemiseET (FunSigCtxt fun_name True) exp_ty $ \ exp_rho ->
-- Note [Polymorphic expected type for tcMatchesFun]
do { (matches', wrap_fun)
<- matchExpectedFunTys herald arity exp_rho $
\ pat_tys rhs_ty ->
tcMatches match_ctxt pat_tys rhs_ty matches
; return (wrap_fun, matches') }
; return (wrap_gen <.> wrap_fun, group) }
where
arity = matchGroupArity matches
herald = text "The equation(s) for"
<+> quotes (ppr fun_name) <+> text "have"
match_ctxt = MC { mc_what = FunRhs fn Prefix, mc_body = tcBody }
{-
@tcMatchesCase@ doesn't do the argument-count check because the
parser guarantees that each equation has exactly one argument.
-}
tcMatchesCase :: (Outputable (body Name)) =>
TcMatchCtxt body -- Case context
-> TcSigmaType -- Type of scrutinee
-> MatchGroup Name (Located (body Name)) -- The case alternatives
-> ExpRhoType -- Type of whole case expressions
-> TcM (MatchGroup TcId (Located (body TcId)))
-- Translated alternatives
-- wrapper goes from MatchGroup's ty to expected ty
tcMatchesCase ctxt scrut_ty matches res_ty
= tcMatches ctxt [mkCheckExpType scrut_ty] res_ty matches
tcMatchLambda :: SDoc -- see Note [Herald for matchExpectedFunTys] in TcUnify
-> TcMatchCtxt HsExpr
-> MatchGroup Name (LHsExpr Name)
-> ExpRhoType -- deeply skolemised
-> TcM (MatchGroup TcId (LHsExpr TcId), HsWrapper)
tcMatchLambda herald match_ctxt match res_ty
= matchExpectedFunTys herald n_pats res_ty $ \ pat_tys rhs_ty ->
tcMatches match_ctxt pat_tys rhs_ty match
where
n_pats | isEmptyMatchGroup match = 1 -- must be lambda-case
| otherwise = matchGroupArity match
-- @tcGRHSsPat@ typechecks @[GRHSs]@ that occur in a @PatMonoBind@.
tcGRHSsPat :: GRHSs Name (LHsExpr Name) -> TcRhoType
-> TcM (GRHSs TcId (LHsExpr TcId))
-- Used for pattern bindings
tcGRHSsPat grhss res_ty = tcGRHSs match_ctxt grhss (mkCheckExpType res_ty)
where
match_ctxt = MC { mc_what = PatBindRhs,
mc_body = tcBody }
{-
************************************************************************
* *
\subsection{tcMatch}
* *
************************************************************************
Note [Case branches must never infer a non-tau type]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case ... of
... -> \(x :: forall a. a -> a) -> x
... -> \y -> y
Should that type-check? The problem is that, if we check the second branch
first, then we'll get a type (b -> b) for the branches, which won't unify
with the polytype in the first branch. If we check the first branch first,
then everything is OK. This order-dependency is terrible. So we want only
proper tau-types in branches (unless a sigma-type is pushed down).
This is what expTypeToType ensures: it replaces an Infer with a fresh
tau-type.
An even trickier case looks like
f x True = x undefined
f x False = x ()
Here, we see that the arguments must also be non-Infer. Thus, we must
use expTypeToType on the output of matchExpectedFunTys, not the input.
But we make a special case for a one-branch case. This is so that
f = \(x :: forall a. a -> a) -> x
still gets assigned a polytype.
-}
-- | When the MatchGroup has multiple RHSs, convert an Infer ExpType in the
-- expected type into TauTvs.
-- See Note [Case branches must never infer a non-tau type]
tauifyMultipleMatches :: [LMatch id body]
-> [ExpType] -> TcM [ExpType]
tauifyMultipleMatches group exp_tys
| isSingletonMatchGroup group = return exp_tys
| otherwise = mapM tauifyExpType exp_tys
-- NB: In the empty-match case, this ensures we fill in the ExpType
-- | Type-check a MatchGroup.
tcMatches :: (Outputable (body Name)) => TcMatchCtxt body
-> [ExpSigmaType] -- Expected pattern types
-> ExpRhoType -- Expected result-type of the Match.
-> MatchGroup Name (Located (body Name))
-> TcM (MatchGroup TcId (Located (body TcId)))
data TcMatchCtxt body -- c.f. TcStmtCtxt, also in this module
= MC { mc_what :: HsMatchContext Name, -- What kind of thing this is
mc_body :: Located (body Name) -- Type checker for a body of
-- an alternative
-> ExpRhoType
-> TcM (Located (body TcId)) }
tcMatches ctxt pat_tys rhs_ty (MG { mg_alts = L l matches
, mg_origin = origin })
= do { rhs_ty:pat_tys <- tauifyMultipleMatches matches (rhs_ty:pat_tys)
-- See Note [Case branches must never infer a non-tau type]
; matches' <- mapM (tcMatch ctxt pat_tys rhs_ty) matches
; pat_tys <- mapM readExpType pat_tys
; rhs_ty <- readExpType rhs_ty
; return (MG { mg_alts = L l matches'
, mg_arg_tys = pat_tys
, mg_res_ty = rhs_ty
, mg_origin = origin }) }
-------------
tcMatch :: (Outputable (body Name)) => TcMatchCtxt body
-> [ExpSigmaType] -- Expected pattern types
-> ExpRhoType -- Expected result-type of the Match.
-> LMatch Name (Located (body Name))
-> TcM (LMatch TcId (Located (body TcId)))
tcMatch ctxt pat_tys rhs_ty match
= wrapLocM (tc_match ctxt pat_tys rhs_ty) match
where
tc_match ctxt pat_tys rhs_ty match@(Match _ pats maybe_rhs_sig grhss)
= add_match_ctxt match $
do { (pats', grhss') <- tcPats (mc_what ctxt) pats pat_tys $
tc_grhss ctxt maybe_rhs_sig grhss rhs_ty
; return (Match (mc_what ctxt) pats' Nothing grhss') }
tc_grhss ctxt Nothing grhss rhs_ty
= tcGRHSs ctxt grhss rhs_ty -- No result signature
-- Result type sigs are no longer supported
tc_grhss _ (Just {}) _ _
= panic "tc_ghrss" -- Rejected by renamer
-- For (\x -> e), tcExpr has already said "In the expresssion \x->e"
-- so we don't want to add "In the lambda abstraction \x->e"
add_match_ctxt match thing_inside
= case mc_what ctxt of
LambdaExpr -> thing_inside
_ -> addErrCtxt (pprMatchInCtxt match) thing_inside
-------------
tcGRHSs :: TcMatchCtxt body -> GRHSs Name (Located (body Name)) -> ExpRhoType
-> TcM (GRHSs TcId (Located (body TcId)))
-- Notice that we pass in the full res_ty, so that we get
-- good inference from simple things like
-- f = \(x::forall a.a->a) -> <stuff>
-- We used to force it to be a monotype when there was more than one guard
-- but we don't need to do that any more
tcGRHSs ctxt (GRHSs grhss (L l binds)) res_ty
= do { (binds', grhss')
<- tcLocalBinds binds $
mapM (wrapLocM (tcGRHS ctxt res_ty)) grhss
; return (GRHSs grhss' (L l binds')) }
-------------
tcGRHS :: TcMatchCtxt body -> ExpRhoType -> GRHS Name (Located (body Name))
-> TcM (GRHS TcId (Located (body TcId)))
tcGRHS ctxt res_ty (GRHS guards rhs)
= do { (guards', rhs')
<- tcStmtsAndThen stmt_ctxt tcGuardStmt guards res_ty $
mc_body ctxt rhs
; return (GRHS guards' rhs') }
where
stmt_ctxt = PatGuard (mc_what ctxt)
{-
************************************************************************
* *
\subsection{@tcDoStmts@ typechecks a {\em list} of do statements}
* *
************************************************************************
-}
tcDoStmts :: HsStmtContext Name
-> Located [LStmt Name (LHsExpr Name)]
-> ExpRhoType
-> TcM (HsExpr TcId) -- Returns a HsDo
tcDoStmts ListComp (L l stmts) res_ty
= do { res_ty <- expTypeToType res_ty
; (co, elt_ty) <- matchExpectedListTy res_ty
; let list_ty = mkListTy elt_ty
; stmts' <- tcStmts ListComp (tcLcStmt listTyCon) stmts
(mkCheckExpType elt_ty)
; return $ mkHsWrapCo co (HsDo ListComp (L l stmts') list_ty) }
tcDoStmts PArrComp (L l stmts) res_ty
= do { res_ty <- expTypeToType res_ty
; (co, elt_ty) <- matchExpectedPArrTy res_ty
; let parr_ty = mkPArrTy elt_ty
; stmts' <- tcStmts PArrComp (tcLcStmt parrTyCon) stmts
(mkCheckExpType elt_ty)
; return $ mkHsWrapCo co (HsDo PArrComp (L l stmts') parr_ty) }
tcDoStmts DoExpr (L l stmts) res_ty
= do { stmts' <- tcStmts DoExpr tcDoStmt stmts res_ty
; res_ty <- readExpType res_ty
; return (HsDo DoExpr (L l stmts') res_ty) }
tcDoStmts MDoExpr (L l stmts) res_ty
= do { stmts' <- tcStmts MDoExpr tcDoStmt stmts res_ty
; res_ty <- readExpType res_ty
; return (HsDo MDoExpr (L l stmts') res_ty) }
tcDoStmts MonadComp (L l stmts) res_ty
= do { stmts' <- tcStmts MonadComp tcMcStmt stmts res_ty
; res_ty <- readExpType res_ty
; return (HsDo MonadComp (L l stmts') res_ty) }
tcDoStmts ctxt _ _ = pprPanic "tcDoStmts" (pprStmtContext ctxt)
tcBody :: LHsExpr Name -> ExpRhoType -> TcM (LHsExpr TcId)
tcBody body res_ty
= do { traceTc "tcBody" (ppr res_ty)
; tcMonoExpr body res_ty
}
{-
************************************************************************
* *
\subsection{tcStmts}
* *
************************************************************************
-}
type TcExprStmtChecker = TcStmtChecker HsExpr ExpRhoType
type TcCmdStmtChecker = TcStmtChecker HsCmd TcRhoType
type TcStmtChecker body rho_type
= forall thing. HsStmtContext Name
-> Stmt Name (Located (body Name))
-> rho_type -- Result type for comprehension
-> (rho_type -> TcM thing) -- Checker for what follows the stmt
-> TcM (Stmt TcId (Located (body TcId)), thing)
tcStmts :: (Outputable (body Name)) => HsStmtContext Name
-> TcStmtChecker body rho_type -- NB: higher-rank type
-> [LStmt Name (Located (body Name))]
-> rho_type
-> TcM [LStmt TcId (Located (body TcId))]
tcStmts ctxt stmt_chk stmts res_ty
= do { (stmts', _) <- tcStmtsAndThen ctxt stmt_chk stmts res_ty $
const (return ())
; return stmts' }
tcStmtsAndThen :: (Outputable (body Name)) => HsStmtContext Name
-> TcStmtChecker body rho_type -- NB: higher-rank type
-> [LStmt Name (Located (body Name))]
-> rho_type
-> (rho_type -> TcM thing)
-> TcM ([LStmt TcId (Located (body TcId))], thing)
-- Note the higher-rank type. stmt_chk is applied at different
-- types in the equations for tcStmts
tcStmtsAndThen _ _ [] res_ty thing_inside
= do { thing <- thing_inside res_ty
; return ([], thing) }
-- LetStmts are handled uniformly, regardless of context
tcStmtsAndThen ctxt stmt_chk (L loc (LetStmt (L l binds)) : stmts)
res_ty thing_inside
= do { (binds', (stmts',thing)) <- tcLocalBinds binds $
tcStmtsAndThen ctxt stmt_chk stmts res_ty thing_inside
; return (L loc (LetStmt (L l binds')) : stmts', thing) }
-- Don't set the error context for an ApplicativeStmt. It ought to be
-- possible to do this with a popErrCtxt in the tcStmt case for
-- ApplicativeStmt, but it did someting strange and broke a test (ado002).
tcStmtsAndThen ctxt stmt_chk (L loc stmt : stmts) res_ty thing_inside
| ApplicativeStmt{} <- stmt
= do { (stmt', (stmts', thing)) <-
stmt_chk ctxt stmt res_ty $ \ res_ty' ->
tcStmtsAndThen ctxt stmt_chk stmts res_ty' $
thing_inside
; return (L loc stmt' : stmts', thing) }
-- For the vanilla case, handle the location-setting part
| otherwise
= do { (stmt', (stmts', thing)) <-
setSrcSpan loc $
addErrCtxt (pprStmtInCtxt ctxt stmt) $
stmt_chk ctxt stmt res_ty $ \ res_ty' ->
popErrCtxt $
tcStmtsAndThen ctxt stmt_chk stmts res_ty' $
thing_inside
; return (L loc stmt' : stmts', thing) }
---------------------------------------------------
-- Pattern guards
---------------------------------------------------
tcGuardStmt :: TcExprStmtChecker
tcGuardStmt _ (BodyStmt guard _ _ _) res_ty thing_inside
= do { guard' <- tcMonoExpr guard (mkCheckExpType boolTy)
; thing <- thing_inside res_ty
; return (BodyStmt guard' noSyntaxExpr noSyntaxExpr boolTy, thing) }
tcGuardStmt ctxt (BindStmt pat rhs _ _ _) res_ty thing_inside
= do { (rhs', rhs_ty) <- tcInferSigmaNC rhs
-- Stmt has a context already
; (pat', thing) <- tcPat_O (StmtCtxt ctxt) (exprCtOrigin (unLoc rhs))
pat (mkCheckExpType rhs_ty) $
thing_inside res_ty
; return (mkTcBindStmt pat' rhs', thing) }
tcGuardStmt _ stmt _ _
= pprPanic "tcGuardStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- List comprehensions and PArrays
-- (no rebindable syntax)
---------------------------------------------------
-- Dealt with separately, rather than by tcMcStmt, because
-- a) PArr isn't (yet) an instance of Monad, so the generality seems overkill
-- b) We have special desugaring rules for list comprehensions,
-- which avoid creating intermediate lists. They in turn
-- assume that the bind/return operations are the regular
-- polymorphic ones, and in particular don't have any
-- coercion matching stuff in them. It's hard to avoid the
-- potential for non-trivial coercions in tcMcStmt
tcLcStmt :: TyCon -- The list/Parray type constructor ([] or PArray)
-> TcExprStmtChecker
tcLcStmt _ _ (LastStmt body noret _) elt_ty thing_inside
= do { body' <- tcMonoExprNC body elt_ty
; thing <- thing_inside (panic "tcLcStmt: thing_inside")
; return (LastStmt body' noret noSyntaxExpr, thing) }
-- A generator, pat <- rhs
tcLcStmt m_tc ctxt (BindStmt pat rhs _ _ _) elt_ty thing_inside
= do { pat_ty <- newFlexiTyVarTy liftedTypeKind
; rhs' <- tcMonoExpr rhs (mkCheckExpType $ mkTyConApp m_tc [pat_ty])
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat (mkCheckExpType pat_ty) $
thing_inside elt_ty
; return (mkTcBindStmt pat' rhs', thing) }
-- A boolean guard
tcLcStmt _ _ (BodyStmt rhs _ _ _) elt_ty thing_inside
= do { rhs' <- tcMonoExpr rhs (mkCheckExpType boolTy)
; thing <- thing_inside elt_ty
; return (BodyStmt rhs' noSyntaxExpr noSyntaxExpr boolTy, thing) }
-- ParStmt: See notes with tcMcStmt
tcLcStmt m_tc ctxt (ParStmt bndr_stmts_s _ _ _) elt_ty thing_inside
= do { (pairs', thing) <- loop bndr_stmts_s
; return (ParStmt pairs' noExpr noSyntaxExpr unitTy, thing) }
where
-- loop :: [([LStmt Name], [Name])] -> TcM ([([LStmt TcId], [TcId])], thing)
loop [] = do { thing <- thing_inside elt_ty
; return ([], thing) } -- matching in the branches
loop (ParStmtBlock stmts names _ : pairs)
= do { (stmts', (ids, pairs', thing))
<- tcStmtsAndThen ctxt (tcLcStmt m_tc) stmts elt_ty $ \ _elt_ty' ->
do { ids <- tcLookupLocalIds names
; (pairs', thing) <- loop pairs
; return (ids, pairs', thing) }
; return ( ParStmtBlock stmts' ids noSyntaxExpr : pairs', thing ) }
tcLcStmt m_tc ctxt (TransStmt { trS_form = form, trS_stmts = stmts
, trS_bndrs = bindersMap
, trS_by = by, trS_using = using }) elt_ty thing_inside
= do { let (bndr_names, n_bndr_names) = unzip bindersMap
unused_ty = pprPanic "tcLcStmt: inner ty" (ppr bindersMap)
-- The inner 'stmts' lack a LastStmt, so the element type
-- passed in to tcStmtsAndThen is never looked at
; (stmts', (bndr_ids, by'))
<- tcStmtsAndThen (TransStmtCtxt ctxt) (tcLcStmt m_tc) stmts unused_ty $ \_ -> do
{ by' <- traverse tcInferSigma by
; bndr_ids <- tcLookupLocalIds bndr_names
; return (bndr_ids, by') }
; let m_app ty = mkTyConApp m_tc [ty]
--------------- Typecheck the 'using' function -------------
-- using :: ((a,b,c)->t) -> m (a,b,c) -> m (a,b,c)m (ThenForm)
-- :: ((a,b,c)->t) -> m (a,b,c) -> m (m (a,b,c))) (GroupForm)
-- n_app :: Type -> Type -- Wraps a 'ty' into '[ty]' for GroupForm
; let n_app = case form of
ThenForm -> (\ty -> ty)
_ -> m_app
by_arrow :: Type -> Type -- Wraps 'ty' to '(a->t) -> ty' if the By is present
by_arrow = case by' of
Nothing -> \ty -> ty
Just (_,e_ty) -> \ty -> (alphaTy `mkFunTy` e_ty) `mkFunTy` ty
tup_ty = mkBigCoreVarTupTy bndr_ids
poly_arg_ty = m_app alphaTy
poly_res_ty = m_app (n_app alphaTy)
using_poly_ty = mkInvForAllTy alphaTyVar $
by_arrow $
poly_arg_ty `mkFunTy` poly_res_ty
; using' <- tcPolyExpr using using_poly_ty
; let final_using = fmap (HsWrap (WpTyApp tup_ty)) using'
-- 'stmts' returns a result of type (m1_ty tuple_ty),
-- typically something like [(Int,Bool,Int)]
-- We don't know what tuple_ty is yet, so we use a variable
; let mk_n_bndr :: Name -> TcId -> TcId
mk_n_bndr n_bndr_name bndr_id = mkLocalIdOrCoVar n_bndr_name (n_app (idType bndr_id))
-- Ensure that every old binder of type `b` is linked up with its
-- new binder which should have type `n b`
-- See Note [GroupStmt binder map] in HsExpr
n_bndr_ids = zipWith mk_n_bndr n_bndr_names bndr_ids
bindersMap' = bndr_ids `zip` n_bndr_ids
-- Type check the thing in the environment with
-- these new binders and return the result
; thing <- tcExtendIdEnv n_bndr_ids (thing_inside elt_ty)
; return (TransStmt { trS_stmts = stmts', trS_bndrs = bindersMap'
, trS_by = fmap fst by', trS_using = final_using
, trS_ret = noSyntaxExpr
, trS_bind = noSyntaxExpr
, trS_fmap = noExpr
, trS_bind_arg_ty = unitTy
, trS_form = form }, thing) }
tcLcStmt _ _ stmt _ _
= pprPanic "tcLcStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- Monad comprehensions
-- (supports rebindable syntax)
---------------------------------------------------
tcMcStmt :: TcExprStmtChecker
tcMcStmt _ (LastStmt body noret return_op) res_ty thing_inside
= do { (body', return_op')
<- tcSyntaxOp MCompOrigin return_op [SynRho] res_ty $
\ [a_ty] ->
tcMonoExprNC body (mkCheckExpType a_ty)
; thing <- thing_inside (panic "tcMcStmt: thing_inside")
; return (LastStmt body' noret return_op', thing) }
-- Generators for monad comprehensions ( pat <- rhs )
--
-- [ body | q <- gen ] -> gen :: m a
-- q :: a
--
tcMcStmt ctxt (BindStmt pat rhs bind_op fail_op _) res_ty thing_inside
-- (>>=) :: rhs_ty -> (pat_ty -> new_res_ty) -> res_ty
= do { ((rhs', pat', thing, new_res_ty), bind_op')
<- tcSyntaxOp MCompOrigin bind_op
[SynRho, SynFun SynAny SynRho] res_ty $
\ [rhs_ty, pat_ty, new_res_ty] ->
do { rhs' <- tcMonoExprNC rhs (mkCheckExpType rhs_ty)
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat
(mkCheckExpType pat_ty) $
thing_inside (mkCheckExpType new_res_ty)
; return (rhs', pat', thing, new_res_ty) }
-- If (but only if) the pattern can fail, typecheck the 'fail' operator
; fail_op' <- tcMonadFailOp (MCompPatOrigin pat) pat' fail_op new_res_ty
; return (BindStmt pat' rhs' bind_op' fail_op' new_res_ty, thing) }
-- Boolean expressions.
--
-- [ body | stmts, expr ] -> expr :: m Bool
--
tcMcStmt _ (BodyStmt rhs then_op guard_op _) res_ty thing_inside
= do { -- Deal with rebindable syntax:
-- guard_op :: test_ty -> rhs_ty
-- then_op :: rhs_ty -> new_res_ty -> res_ty
-- Where test_ty is, for example, Bool
; ((thing, rhs', rhs_ty, guard_op'), then_op')
<- tcSyntaxOp MCompOrigin then_op [SynRho, SynRho] res_ty $
\ [rhs_ty, new_res_ty] ->
do { (rhs', guard_op')
<- tcSyntaxOp MCompOrigin guard_op [SynAny]
(mkCheckExpType rhs_ty) $
\ [test_ty] ->
tcMonoExpr rhs (mkCheckExpType test_ty)
; thing <- thing_inside (mkCheckExpType new_res_ty)
; return (thing, rhs', rhs_ty, guard_op') }
; return (BodyStmt rhs' then_op' guard_op' rhs_ty, thing) }
-- Grouping statements
--
-- [ body | stmts, then group by e using f ]
-- -> e :: t
-- f :: forall a. (a -> t) -> m a -> m (m a)
-- [ body | stmts, then group using f ]
-- -> f :: forall a. m a -> m (m a)
-- We type [ body | (stmts, group by e using f), ... ]
-- f <optional by> [ (a,b,c) | stmts ] >>= \(a,b,c) -> ...body....
--
-- We type the functions as follows:
-- f <optional by> :: m1 (a,b,c) -> m2 (a,b,c) (ThenForm)
-- :: m1 (a,b,c) -> m2 (n (a,b,c)) (GroupForm)
-- (>>=) :: m2 (a,b,c) -> ((a,b,c) -> res) -> res (ThenForm)
-- :: m2 (n (a,b,c)) -> (n (a,b,c) -> res) -> res (GroupForm)
--
tcMcStmt ctxt (TransStmt { trS_stmts = stmts, trS_bndrs = bindersMap
, trS_by = by, trS_using = using, trS_form = form
, trS_ret = return_op, trS_bind = bind_op
, trS_fmap = fmap_op }) res_ty thing_inside
= do { let star_star_kind = liftedTypeKind `mkFunTy` liftedTypeKind
; m1_ty <- newFlexiTyVarTy star_star_kind
; m2_ty <- newFlexiTyVarTy star_star_kind
; tup_ty <- newFlexiTyVarTy liftedTypeKind
; by_e_ty <- newFlexiTyVarTy liftedTypeKind -- The type of the 'by' expression (if any)
-- n_app :: Type -> Type -- Wraps a 'ty' into '(n ty)' for GroupForm
; n_app <- case form of
ThenForm -> return (\ty -> ty)
_ -> do { n_ty <- newFlexiTyVarTy star_star_kind
; return (n_ty `mkAppTy`) }
; let by_arrow :: Type -> Type
-- (by_arrow res) produces ((alpha->e_ty) -> res) ('by' present)
-- or res ('by' absent)
by_arrow = case by of
Nothing -> \res -> res
Just {} -> \res -> (alphaTy `mkFunTy` by_e_ty) `mkFunTy` res
poly_arg_ty = m1_ty `mkAppTy` alphaTy
using_arg_ty = m1_ty `mkAppTy` tup_ty
poly_res_ty = m2_ty `mkAppTy` n_app alphaTy
using_res_ty = m2_ty `mkAppTy` n_app tup_ty
using_poly_ty = mkInvForAllTy alphaTyVar $
by_arrow $
poly_arg_ty `mkFunTy` poly_res_ty
-- 'stmts' returns a result of type (m1_ty tuple_ty),
-- typically something like [(Int,Bool,Int)]
-- We don't know what tuple_ty is yet, so we use a variable
; let (bndr_names, n_bndr_names) = unzip bindersMap
; (stmts', (bndr_ids, by', return_op')) <-
tcStmtsAndThen (TransStmtCtxt ctxt) tcMcStmt stmts
(mkCheckExpType using_arg_ty) $ \res_ty' -> do
{ by' <- case by of
Nothing -> return Nothing
Just e -> do { e' <- tcMonoExpr e
(mkCheckExpType by_e_ty)
; return (Just e') }
-- Find the Ids (and hence types) of all old binders
; bndr_ids <- tcLookupLocalIds bndr_names
-- 'return' is only used for the binders, so we know its type.
-- return :: (a,b,c,..) -> m (a,b,c,..)
; (_, return_op') <- tcSyntaxOp MCompOrigin return_op
[synKnownType (mkBigCoreVarTupTy bndr_ids)]
res_ty' $ \ _ -> return ()
; return (bndr_ids, by', return_op') }
--------------- Typecheck the 'bind' function -------------
-- (>>=) :: m2 (n (a,b,c)) -> ( n (a,b,c) -> new_res_ty ) -> res_ty
; new_res_ty <- newFlexiTyVarTy liftedTypeKind
; (_, bind_op') <- tcSyntaxOp MCompOrigin bind_op
[ synKnownType using_res_ty
, synKnownType (n_app tup_ty `mkFunTy` new_res_ty) ]
res_ty $ \ _ -> return ()
--------------- Typecheck the 'fmap' function -------------
; fmap_op' <- case form of
ThenForm -> return noExpr
_ -> fmap unLoc . tcPolyExpr (noLoc fmap_op) $
mkInvForAllTy alphaTyVar $
mkInvForAllTy betaTyVar $
(alphaTy `mkFunTy` betaTy)
`mkFunTy` (n_app alphaTy)
`mkFunTy` (n_app betaTy)
--------------- Typecheck the 'using' function -------------
-- using :: ((a,b,c)->t) -> m1 (a,b,c) -> m2 (n (a,b,c))
; using' <- tcPolyExpr using using_poly_ty
; let final_using = fmap (HsWrap (WpTyApp tup_ty)) using'
--------------- Bulding the bindersMap ----------------
; let mk_n_bndr :: Name -> TcId -> TcId
mk_n_bndr n_bndr_name bndr_id = mkLocalIdOrCoVar n_bndr_name (n_app (idType bndr_id))
-- Ensure that every old binder of type `b` is linked up with its
-- new binder which should have type `n b`
-- See Note [GroupStmt binder map] in HsExpr
n_bndr_ids = zipWith mk_n_bndr n_bndr_names bndr_ids
bindersMap' = bndr_ids `zip` n_bndr_ids
-- Type check the thing in the environment with
-- these new binders and return the result
; thing <- tcExtendIdEnv n_bndr_ids $
thing_inside (mkCheckExpType new_res_ty)
; return (TransStmt { trS_stmts = stmts', trS_bndrs = bindersMap'
, trS_by = by', trS_using = final_using
, trS_ret = return_op', trS_bind = bind_op'
, trS_bind_arg_ty = n_app tup_ty
, trS_fmap = fmap_op', trS_form = form }, thing) }
-- A parallel set of comprehensions
-- [ (g x, h x) | ... ; let g v = ...
-- | ... ; let h v = ... ]
--
-- It's possible that g,h are overloaded, so we need to feed the LIE from the
-- (g x, h x) up through both lots of bindings (so we get the bindLocalMethods).
-- Similarly if we had an existential pattern match:
--
-- data T = forall a. Show a => C a
--
-- [ (show x, show y) | ... ; C x <- ...
-- | ... ; C y <- ... ]
--
-- Then we need the LIE from (show x, show y) to be simplified against
-- the bindings for x and y.
--
-- It's difficult to do this in parallel, so we rely on the renamer to
-- ensure that g,h and x,y don't duplicate, and simply grow the environment.
-- So the binders of the first parallel group will be in scope in the second
-- group. But that's fine; there's no shadowing to worry about.
--
-- Note: The `mzip` function will get typechecked via:
--
-- ParStmt [st1::t1, st2::t2, st3::t3]
--
-- mzip :: m st1
-- -> (m st2 -> m st3 -> m (st2, st3)) -- recursive call
-- -> m (st1, (st2, st3))
--
tcMcStmt ctxt (ParStmt bndr_stmts_s mzip_op bind_op _) res_ty thing_inside
= do { let star_star_kind = liftedTypeKind `mkFunTy` liftedTypeKind
; m_ty <- newFlexiTyVarTy star_star_kind
; let mzip_ty = mkInvForAllTys [alphaTyVar, betaTyVar] $
(m_ty `mkAppTy` alphaTy)
`mkFunTy`
(m_ty `mkAppTy` betaTy)
`mkFunTy`
(m_ty `mkAppTy` mkBoxedTupleTy [alphaTy, betaTy])
; mzip_op' <- unLoc `fmap` tcPolyExpr (noLoc mzip_op) mzip_ty
-- type dummies since we don't know all binder types yet
; id_tys_s <- (mapM . mapM) (const (newFlexiTyVarTy liftedTypeKind))
[ names | ParStmtBlock _ names _ <- bndr_stmts_s ]
-- Typecheck bind:
; let tup_tys = [ mkBigCoreTupTy id_tys | id_tys <- id_tys_s ]
tuple_ty = mk_tuple_ty tup_tys
; (((blocks', thing), inner_res_ty), bind_op')
<- tcSyntaxOp MCompOrigin bind_op
[ synKnownType (m_ty `mkAppTy` tuple_ty)
, SynFun (synKnownType tuple_ty) SynRho ] res_ty $
\ [inner_res_ty] ->
do { stuff <- loop m_ty (mkCheckExpType inner_res_ty)
tup_tys bndr_stmts_s
; return (stuff, inner_res_ty) }
; return (ParStmt blocks' mzip_op' bind_op' inner_res_ty, thing) }
where
mk_tuple_ty tys = foldr1 (\tn tm -> mkBoxedTupleTy [tn, tm]) tys
-- loop :: Type -- m_ty
-- -> ExpRhoType -- inner_res_ty
-- -> [TcType] -- tup_tys
-- -> [ParStmtBlock Name]
-- -> TcM ([([LStmt TcId], [TcId])], thing)
loop _ inner_res_ty [] [] = do { thing <- thing_inside inner_res_ty
; return ([], thing) }
-- matching in the branches
loop m_ty inner_res_ty (tup_ty_in : tup_tys_in)
(ParStmtBlock stmts names return_op : pairs)
= do { let m_tup_ty = m_ty `mkAppTy` tup_ty_in
; (stmts', (ids, return_op', pairs', thing))
<- tcStmtsAndThen ctxt tcMcStmt stmts (mkCheckExpType m_tup_ty) $
\m_tup_ty' ->
do { ids <- tcLookupLocalIds names
; let tup_ty = mkBigCoreVarTupTy ids
; (_, return_op') <-
tcSyntaxOp MCompOrigin return_op
[synKnownType tup_ty] m_tup_ty' $
\ _ -> return ()
; (pairs', thing) <- loop m_ty inner_res_ty tup_tys_in pairs
; return (ids, return_op', pairs', thing) }
; return (ParStmtBlock stmts' ids return_op' : pairs', thing) }
loop _ _ _ _ = panic "tcMcStmt.loop"
tcMcStmt _ stmt _ _
= pprPanic "tcMcStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- Do-notation
-- (supports rebindable syntax)
---------------------------------------------------
tcDoStmt :: TcExprStmtChecker
tcDoStmt _ (LastStmt body noret _) res_ty thing_inside
= do { body' <- tcMonoExprNC body res_ty
; thing <- thing_inside (panic "tcDoStmt: thing_inside")
; return (LastStmt body' noret noSyntaxExpr, thing) }
tcDoStmt ctxt (BindStmt pat rhs bind_op fail_op _) res_ty thing_inside
= do { -- Deal with rebindable syntax:
-- (>>=) :: rhs_ty -> (pat_ty -> new_res_ty) -> res_ty
-- This level of generality is needed for using do-notation
-- in full generality; see Trac #1537
((rhs', pat', new_res_ty, thing), bind_op')
<- tcSyntaxOp DoOrigin bind_op [SynRho, SynFun SynAny SynRho] res_ty $
\ [rhs_ty, pat_ty, new_res_ty] ->
do { rhs' <- tcMonoExprNC rhs (mkCheckExpType rhs_ty)
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat
(mkCheckExpType pat_ty) $
thing_inside (mkCheckExpType new_res_ty)
; return (rhs', pat', new_res_ty, thing) }
-- If (but only if) the pattern can fail, typecheck the 'fail' operator
; fail_op' <- tcMonadFailOp (DoPatOrigin pat) pat' fail_op new_res_ty
; return (BindStmt pat' rhs' bind_op' fail_op' new_res_ty, thing) }
tcDoStmt ctxt (ApplicativeStmt pairs mb_join _) res_ty thing_inside
= do { let tc_app_stmts ty = tcApplicativeStmts ctxt pairs ty $
thing_inside . mkCheckExpType
; ((pairs', body_ty, thing), mb_join') <- case mb_join of
Nothing -> (, Nothing) <$> tc_app_stmts res_ty
Just join_op ->
second Just <$>
(tcSyntaxOp DoOrigin join_op [SynRho] res_ty $
\ [rhs_ty] -> tc_app_stmts (mkCheckExpType rhs_ty))
; return (ApplicativeStmt pairs' mb_join' body_ty, thing) }
tcDoStmt _ (BodyStmt rhs then_op _ _) res_ty thing_inside
= do { -- Deal with rebindable syntax;
-- (>>) :: rhs_ty -> new_res_ty -> res_ty
; ((rhs', rhs_ty, thing), then_op')
<- tcSyntaxOp DoOrigin then_op [SynRho, SynRho] res_ty $
\ [rhs_ty, new_res_ty] ->
do { rhs' <- tcMonoExprNC rhs (mkCheckExpType rhs_ty)
; thing <- thing_inside (mkCheckExpType new_res_ty)
; return (rhs', rhs_ty, thing) }
; return (BodyStmt rhs' then_op' noSyntaxExpr rhs_ty, thing) }
tcDoStmt ctxt (RecStmt { recS_stmts = stmts, recS_later_ids = later_names
, recS_rec_ids = rec_names, recS_ret_fn = ret_op
, recS_mfix_fn = mfix_op, recS_bind_fn = bind_op })
res_ty thing_inside
= do { let tup_names = rec_names ++ filterOut (`elem` rec_names) later_names
; tup_elt_tys <- newFlexiTyVarTys (length tup_names) liftedTypeKind
; let tup_ids = zipWith mkLocalId tup_names tup_elt_tys
tup_ty = mkBigCoreTupTy tup_elt_tys
; tcExtendIdEnv tup_ids $ do
{ ((stmts', (ret_op', tup_rets)), stmts_ty)
<- tcInferInst $ \ exp_ty ->
tcStmtsAndThen ctxt tcDoStmt stmts exp_ty $ \ inner_res_ty ->
do { tup_rets <- zipWithM tcCheckId tup_names
(map mkCheckExpType tup_elt_tys)
-- Unify the types of the "final" Ids (which may
-- be polymorphic) with those of "knot-tied" Ids
; (_, ret_op')
<- tcSyntaxOp DoOrigin ret_op [synKnownType tup_ty]
inner_res_ty $ \_ -> return ()
; return (ret_op', tup_rets) }
; ((_, mfix_op'), mfix_res_ty)
<- tcInferInst $ \ exp_ty ->
tcSyntaxOp DoOrigin mfix_op
[synKnownType (mkFunTy tup_ty stmts_ty)] exp_ty $
\ _ -> return ()
; ((thing, new_res_ty), bind_op')
<- tcSyntaxOp DoOrigin bind_op
[ synKnownType mfix_res_ty
, synKnownType tup_ty `SynFun` SynRho ]
res_ty $
\ [new_res_ty] ->
do { thing <- thing_inside (mkCheckExpType new_res_ty)
; return (thing, new_res_ty) }
; let rec_ids = takeList rec_names tup_ids
; later_ids <- tcLookupLocalIds later_names
; traceTc "tcdo" $ vcat [ppr rec_ids <+> ppr (map idType rec_ids),
ppr later_ids <+> ppr (map idType later_ids)]
; return (RecStmt { recS_stmts = stmts', recS_later_ids = later_ids
, recS_rec_ids = rec_ids, recS_ret_fn = ret_op'
, recS_mfix_fn = mfix_op', recS_bind_fn = bind_op'
, recS_bind_ty = new_res_ty
, recS_later_rets = [], recS_rec_rets = tup_rets
, recS_ret_ty = stmts_ty }, thing)
}}
tcDoStmt _ stmt _ _
= pprPanic "tcDoStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- MonadFail Proposal warnings
---------------------------------------------------
-- The idea behind issuing MonadFail warnings is that we add them whenever a
-- failable pattern is encountered. However, instead of throwing a type error
-- when the constraint cannot be satisfied, we only issue a warning in
-- TcErrors.hs.
tcMonadFailOp :: CtOrigin
-> LPat TcId
-> SyntaxExpr Name -- The fail op
-> TcType -- Type of the whole do-expression
-> TcRn (SyntaxExpr TcId) -- Typechecked fail op
-- Get a 'fail' operator expression, to use if the pattern
-- match fails. If the pattern is irrefutatable, just return
-- noSyntaxExpr; it won't be used
tcMonadFailOp orig pat fail_op res_ty
| isIrrefutableHsPat pat
= return noSyntaxExpr
| otherwise
= do { -- Issue MonadFail warnings
rebindableSyntax <- xoptM LangExt.RebindableSyntax
; desugarFlag <- xoptM LangExt.MonadFailDesugaring
; missingWarning <- woptM Opt_WarnMissingMonadFailInstances
; if | rebindableSyntax && (desugarFlag || missingWarning)
-> warnRebindableClash pat
| not desugarFlag && missingWarning
-> emitMonadFailConstraint pat res_ty
| otherwise
-> return ()
-- Get the fail op itself
; snd <$> (tcSyntaxOp orig fail_op [synKnownType stringTy]
(mkCheckExpType res_ty) $ \_ -> return ()) }
emitMonadFailConstraint :: LPat TcId -> TcType -> TcRn ()
emitMonadFailConstraint pat res_ty
= do { -- We expect res_ty to be of form (monad_ty arg_ty)
(_co, (monad_ty, _arg_ty)) <- matchExpectedAppTy res_ty
-- Emit (MonadFail m), but ignore the evidence; it's
-- just there to generate a warning
; monadFailClass <- tcLookupClass monadFailClassName
; _ <- emitWanted (FailablePattern pat)
(mkClassPred monadFailClass [monad_ty])
; return () }
warnRebindableClash :: LPat TcId -> TcRn ()
warnRebindableClash pattern = addWarnAt
(Reason Opt_WarnMissingMonadFailInstances)
(getLoc pattern)
(text "The failable pattern" <+> quotes (ppr pattern)
$$
nest 2 (text "is used together with -XRebindableSyntax."
<+> text "If this is intentional,"
$$
text "compile with -Wno-missing-monadfail-instances."))
{-
Note [Treat rebindable syntax first]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When typechecking
do { bar; ... } :: IO ()
we want to typecheck 'bar' in the knowledge that it should be an IO thing,
pushing info from the context into the RHS. To do this, we check the
rebindable syntax first, and push that information into (tcMonoExprNC rhs).
Otherwise the error shows up when cheking the rebindable syntax, and
the expected/inferred stuff is back to front (see Trac #3613).
Note [typechecking ApplicativeStmt]
join ((\pat1 ... patn -> body) <$> e1 <*> ... <*> en)
fresh type variables:
pat_ty_1..pat_ty_n
exp_ty_1..exp_ty_n
t_1..t_(n-1)
body :: body_ty
(\pat1 ... patn -> body) :: pat_ty_1 -> ... -> pat_ty_n -> body_ty
pat_i :: pat_ty_i
e_i :: exp_ty_i
<$> :: (pat_ty_1 -> ... -> pat_ty_n -> body_ty) -> exp_ty_1 -> t_1
<*>_i :: t_(i-1) -> exp_ty_i -> t_i
join :: tn -> res_ty
-}
tcApplicativeStmts
:: HsStmtContext Name
-> [(SyntaxExpr Name, ApplicativeArg Name Name)]
-> ExpRhoType -- rhs_ty
-> (TcRhoType -> TcM t) -- thing_inside
-> TcM ([(SyntaxExpr TcId, ApplicativeArg TcId TcId)], Type, t)
tcApplicativeStmts ctxt pairs rhs_ty thing_inside
= do { body_ty <- newFlexiTyVarTy liftedTypeKind
; let arity = length pairs
; ts <- replicateM (arity-1) $ newInferExpTypeInst
; exp_tys <- replicateM arity $ newFlexiTyVarTy liftedTypeKind
; pat_tys <- replicateM arity $ newFlexiTyVarTy liftedTypeKind
; let fun_ty = mkFunTys pat_tys body_ty
-- NB. do the <$>,<*> operators first, we don't want type errors here
-- i.e. goOps before goArgs
-- See Note [Treat rebindable syntax first]
; let (ops, args) = unzip pairs
; ops' <- goOps fun_ty (zip3 ops (ts ++ [rhs_ty]) exp_tys)
; (args', thing) <- goArgs (zip3 args pat_tys exp_tys) $
thing_inside body_ty
; return (zip ops' args', body_ty, thing) }
where
goOps _ [] = return []
goOps t_left ((op,t_i,exp_ty) : ops)
= do { (_, op')
<- tcSyntaxOp DoOrigin op
[synKnownType t_left, synKnownType exp_ty] t_i $
\ _ -> return ()
; t_i <- readExpType t_i
; ops' <- goOps t_i ops
; return (op' : ops') }
goArgs
:: [(ApplicativeArg Name Name, Type, Type)]
-> TcM t
-> TcM ([ApplicativeArg TcId TcId], t)
goArgs [] thing_inside
= do { thing <- thing_inside
; return ([],thing)
}
goArgs ((ApplicativeArgOne pat rhs, pat_ty, exp_ty) : rest) thing_inside
= do { let stmt :: ExprStmt Name
stmt = mkBindStmt pat rhs
; setSrcSpan (combineSrcSpans (getLoc pat) (getLoc rhs)) $
addErrCtxt (pprStmtInCtxt ctxt stmt) $
do { rhs' <- tcMonoExprNC rhs (mkCheckExpType exp_ty)
; (pat',(pairs, thing)) <-
tcPat (StmtCtxt ctxt) pat (mkCheckExpType pat_ty) $
popErrCtxt $
goArgs rest thing_inside
; return (ApplicativeArgOne pat' rhs' : pairs, thing) } }
goArgs ((ApplicativeArgMany stmts ret pat, pat_ty, exp_ty) : rest)
thing_inside
= do { (stmts', (ret',pat',rest',thing)) <-
tcStmtsAndThen ctxt tcDoStmt stmts (mkCheckExpType exp_ty) $
\res_ty -> do
{ L _ ret' <- tcMonoExprNC (noLoc ret) res_ty
; (pat',(rest', thing)) <-
tcPat (StmtCtxt ctxt) pat (mkCheckExpType pat_ty) $
goArgs rest thing_inside
; return (ret', pat', rest', thing)
}
; return (ApplicativeArgMany stmts' ret' pat' : rest', thing) }
{-
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
@sameNoOfArgs@ takes a @[RenamedMatch]@ and decides whether the same
number of args are used in each equation.
-}
checkArgs :: Name -> MatchGroup Name body -> TcM ()
checkArgs _ (MG { mg_alts = L _ [] })
= return ()
checkArgs fun (MG { mg_alts = L _ (match1:matches) })
| null bad_matches
= return ()
| otherwise
= failWithTc (vcat [ text "Equations for" <+> quotes (ppr fun) <+>
text "have different numbers of arguments"
, nest 2 (ppr (getLoc match1))
, nest 2 (ppr (getLoc (head bad_matches)))])
where
n_args1 = args_in_match match1
bad_matches = [m | m <- matches, args_in_match m /= n_args1]
args_in_match :: LMatch Name body -> Int
args_in_match (L _ (Match _ pats _ _)) = length pats
|
mettekou/ghc
|
compiler/typecheck/TcMatches.hs
|
bsd-3-clause
| 48,078 | 4 | 21 | 15,994 | 9,656 | 5,114 | 4,542 | 630 | 7 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Desugaring arrow commands
-}
{-# LANGUAGE CPP #-}
module DsArrows ( dsProcExpr ) where
#include "HsVersions.h"
import Match
import DsUtils
import DsMonad
import HsSyn hiding (collectPatBinders, collectPatsBinders, collectLStmtsBinders, collectLStmtBinders, collectStmtBinders )
import TcHsSyn
-- NB: The desugarer, which straddles the source and Core worlds, sometimes
-- needs to see source types (newtypes etc), and sometimes not
-- So WATCH OUT; check each use of split*Ty functions.
-- Sigh. This is a pain.
import {-# SOURCE #-} DsExpr ( dsExpr, dsLExpr, dsLocalBinds )
import TcType
import TcEvidence
import CoreSyn
import CoreFVs
import CoreUtils
import MkCore
import DsBinds (dsHsWrapper)
import Name
import Var
import Id
import DataCon
import TysWiredIn
import BasicTypes
import PrelNames
import Outputable
import Bag
import VarSet
import SrcLoc
import ListSetOps( assocDefault )
import FastString
import Data.List
data DsCmdEnv = DsCmdEnv {
arr_id, compose_id, first_id, app_id, choice_id, loop_id :: CoreExpr
}
mkCmdEnv :: CmdSyntaxTable Id -> DsM ([CoreBind], DsCmdEnv)
-- See Note [CmdSyntaxTable] in HsExpr
mkCmdEnv tc_meths
= do { (meth_binds, prs) <- mapAndUnzipM mk_bind tc_meths
; return (meth_binds, DsCmdEnv {
arr_id = Var (find_meth prs arrAName),
compose_id = Var (find_meth prs composeAName),
first_id = Var (find_meth prs firstAName),
app_id = Var (find_meth prs appAName),
choice_id = Var (find_meth prs choiceAName),
loop_id = Var (find_meth prs loopAName)
}) }
where
mk_bind (std_name, expr)
= do { rhs <- dsExpr expr
; id <- newSysLocalDs (exprType rhs)
; return (NonRec id rhs, (std_name, id)) }
find_meth prs std_name
= assocDefault (mk_panic std_name) prs std_name
mk_panic std_name = pprPanic "mkCmdEnv" (ptext (sLit "Not found:") <+> ppr std_name)
-- arr :: forall b c. (b -> c) -> a b c
do_arr :: DsCmdEnv -> Type -> Type -> CoreExpr -> CoreExpr
do_arr ids b_ty c_ty f = mkApps (arr_id ids) [Type b_ty, Type c_ty, f]
-- (>>>) :: forall b c d. a b c -> a c d -> a b d
do_compose :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_compose ids b_ty c_ty d_ty f g
= mkApps (compose_id ids) [Type b_ty, Type c_ty, Type d_ty, f, g]
-- first :: forall b c d. a b c -> a (b,d) (c,d)
do_first :: DsCmdEnv -> Type -> Type -> Type -> CoreExpr -> CoreExpr
do_first ids b_ty c_ty d_ty f
= mkApps (first_id ids) [Type b_ty, Type c_ty, Type d_ty, f]
-- app :: forall b c. a (a b c, b) c
do_app :: DsCmdEnv -> Type -> Type -> CoreExpr
do_app ids b_ty c_ty = mkApps (app_id ids) [Type b_ty, Type c_ty]
-- (|||) :: forall b d c. a b d -> a c d -> a (Either b c) d
-- note the swapping of d and c
do_choice :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_choice ids b_ty c_ty d_ty f g
= mkApps (choice_id ids) [Type b_ty, Type d_ty, Type c_ty, f, g]
-- loop :: forall b d c. a (b,d) (c,d) -> a b c
-- note the swapping of d and c
do_loop :: DsCmdEnv -> Type -> Type -> Type -> CoreExpr -> CoreExpr
do_loop ids b_ty c_ty d_ty f
= mkApps (loop_id ids) [Type b_ty, Type d_ty, Type c_ty, f]
-- premap :: forall b c d. (b -> c) -> a c d -> a b d
-- premap f g = arr f >>> g
do_premap :: DsCmdEnv -> Type -> Type -> Type ->
CoreExpr -> CoreExpr -> CoreExpr
do_premap ids b_ty c_ty d_ty f g
= do_compose ids b_ty c_ty d_ty (do_arr ids b_ty c_ty f) g
mkFailExpr :: HsMatchContext Id -> Type -> DsM CoreExpr
mkFailExpr ctxt ty
= mkErrorAppDs pAT_ERROR_ID ty (matchContextErrString ctxt)
-- construct CoreExpr for \ (a :: a_ty, b :: b_ty) -> a
mkFstExpr :: Type -> Type -> DsM CoreExpr
mkFstExpr a_ty b_ty = do
a_var <- newSysLocalDs a_ty
b_var <- newSysLocalDs b_ty
pair_var <- newSysLocalDs (mkCorePairTy a_ty b_ty)
return (Lam pair_var
(coreCasePair pair_var a_var b_var (Var a_var)))
-- construct CoreExpr for \ (a :: a_ty, b :: b_ty) -> b
mkSndExpr :: Type -> Type -> DsM CoreExpr
mkSndExpr a_ty b_ty = do
a_var <- newSysLocalDs a_ty
b_var <- newSysLocalDs b_ty
pair_var <- newSysLocalDs (mkCorePairTy a_ty b_ty)
return (Lam pair_var
(coreCasePair pair_var a_var b_var (Var b_var)))
{-
Build case analysis of a tuple. This cannot be done in the DsM monad,
because the list of variables is typically not yet defined.
-}
-- coreCaseTuple [u1..] v [x1..xn] body
-- = case v of v { (x1, .., xn) -> body }
-- But the matching may be nested if the tuple is very big
coreCaseTuple :: UniqSupply -> Id -> [Id] -> CoreExpr -> CoreExpr
coreCaseTuple uniqs scrut_var vars body
= mkTupleCase uniqs vars body scrut_var (Var scrut_var)
coreCasePair :: Id -> Id -> Id -> CoreExpr -> CoreExpr
coreCasePair scrut_var var1 var2 body
= Case (Var scrut_var) scrut_var (exprType body)
[(DataAlt (tupleCon BoxedTuple 2), [var1, var2], body)]
mkCorePairTy :: Type -> Type -> Type
mkCorePairTy t1 t2 = mkBoxedTupleTy [t1, t2]
mkCorePairExpr :: CoreExpr -> CoreExpr -> CoreExpr
mkCorePairExpr e1 e2 = mkCoreTup [e1, e2]
mkCoreUnitExpr :: CoreExpr
mkCoreUnitExpr = mkCoreTup []
{-
The input is divided into a local environment, which is a flat tuple
(unless it's too big), and a stack, which is a right-nested pair.
In general, the input has the form
((x1,...,xn), (s1,...(sk,())...))
where xi are the environment values, and si the ones on the stack,
with s1 being the "top", the first one to be matched with a lambda.
-}
envStackType :: [Id] -> Type -> Type
envStackType ids stack_ty = mkCorePairTy (mkBigCoreVarTupTy ids) stack_ty
-- splitTypeAt n (t1,... (tn,t)...) = ([t1, ..., tn], t)
splitTypeAt :: Int -> Type -> ([Type], Type)
splitTypeAt n ty
| n == 0 = ([], ty)
| otherwise = case tcTyConAppArgs ty of
[t, ty'] -> let (ts, ty_r) = splitTypeAt (n-1) ty' in (t:ts, ty_r)
_ -> pprPanic "splitTypeAt" (ppr ty)
----------------------------------------------
-- buildEnvStack
--
-- ((x1,...,xn),stk)
buildEnvStack :: [Id] -> Id -> CoreExpr
buildEnvStack env_ids stack_id
= mkCorePairExpr (mkBigCoreVarTup env_ids) (Var stack_id)
----------------------------------------------
-- matchEnvStack
--
-- \ ((x1,...,xn),stk) -> body
-- =>
-- \ pair ->
-- case pair of (tup,stk) ->
-- case tup of (x1,...,xn) ->
-- body
matchEnvStack :: [Id] -- x1..xn
-> Id -- stk
-> CoreExpr -- e
-> DsM CoreExpr
matchEnvStack env_ids stack_id body = do
uniqs <- newUniqueSupply
tup_var <- newSysLocalDs (mkBigCoreVarTupTy env_ids)
let match_env = coreCaseTuple uniqs tup_var env_ids body
pair_id <- newSysLocalDs (mkCorePairTy (idType tup_var) (idType stack_id))
return (Lam pair_id (coreCasePair pair_id tup_var stack_id match_env))
----------------------------------------------
-- matchEnv
--
-- \ (x1,...,xn) -> body
-- =>
-- \ tup ->
-- case tup of (x1,...,xn) ->
-- body
matchEnv :: [Id] -- x1..xn
-> CoreExpr -- e
-> DsM CoreExpr
matchEnv env_ids body = do
uniqs <- newUniqueSupply
tup_id <- newSysLocalDs (mkBigCoreVarTupTy env_ids)
return (Lam tup_id (coreCaseTuple uniqs tup_id env_ids body))
----------------------------------------------
-- matchVarStack
--
-- case (x1, ...(xn, s)...) -> e
-- =>
-- case z0 of (x1,z1) ->
-- case zn-1 of (xn,s) ->
-- e
matchVarStack :: [Id] -> Id -> CoreExpr -> DsM (Id, CoreExpr)
matchVarStack [] stack_id body = return (stack_id, body)
matchVarStack (param_id:param_ids) stack_id body = do
(tail_id, tail_code) <- matchVarStack param_ids stack_id body
pair_id <- newSysLocalDs (mkCorePairTy (idType param_id) (idType tail_id))
return (pair_id, coreCasePair pair_id param_id tail_id tail_code)
mkHsEnvStackExpr :: [Id] -> Id -> LHsExpr Id
mkHsEnvStackExpr env_ids stack_id
= mkLHsTupleExpr [mkLHsVarTuple env_ids, nlHsVar stack_id]
-- Translation of arrow abstraction
-- D; xs |-a c : () --> t' ---> c'
-- --------------------------
-- D |- proc p -> c :: a t t' ---> premap (\ p -> ((xs),())) c'
--
-- where (xs) is the tuple of variables bound by p
dsProcExpr
:: LPat Id
-> LHsCmdTop Id
-> DsM CoreExpr
dsProcExpr pat (L _ (HsCmdTop cmd _unitTy cmd_ty ids)) = do
(meth_binds, meth_ids) <- mkCmdEnv ids
let locals = mkVarSet (collectPatBinders pat)
(core_cmd, _free_vars, env_ids) <- dsfixCmd meth_ids locals unitTy cmd_ty cmd
let env_ty = mkBigCoreVarTupTy env_ids
let env_stk_ty = mkCorePairTy env_ty unitTy
let env_stk_expr = mkCorePairExpr (mkBigCoreVarTup env_ids) mkCoreUnitExpr
fail_expr <- mkFailExpr ProcExpr env_stk_ty
var <- selectSimpleMatchVarL pat
match_code <- matchSimply (Var var) ProcExpr pat env_stk_expr fail_expr
let pat_ty = hsLPatType pat
proc_code = do_premap meth_ids pat_ty env_stk_ty cmd_ty
(Lam var match_code)
core_cmd
return (mkLets meth_binds proc_code)
{-
Translation of a command judgement of the form
D; xs |-a c : stk --> t
to an expression e such that
D |- e :: a (xs, stk) t
-}
dsLCmd :: DsCmdEnv -> IdSet -> Type -> Type -> LHsCmd Id -> [Id]
-> DsM (CoreExpr, IdSet)
dsLCmd ids local_vars stk_ty res_ty cmd env_ids
= dsCmd ids local_vars stk_ty res_ty (unLoc cmd) env_ids
dsCmd :: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this command
-> Type -- type of the stack (right-nested tuple)
-> Type -- return type of the command
-> HsCmd Id -- command to desugar
-> [Id] -- list of vars in the input to this command
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
-- D |- fun :: a t1 t2
-- D, xs |- arg :: t1
-- -----------------------------
-- D; xs |-a fun -< arg : stk --> t2
--
-- ---> premap (\ ((xs), _stk) -> arg) fun
dsCmd ids local_vars stack_ty res_ty
(HsCmdArrApp arrow arg arrow_ty HsFirstOrderApp _)
env_ids = do
let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
core_arrow <- dsLExpr arrow
core_arg <- dsLExpr arg
stack_id <- newSysLocalDs stack_ty
core_make_arg <- matchEnvStack env_ids stack_id core_arg
return (do_premap ids
(envStackType env_ids stack_ty)
arg_ty
res_ty
core_make_arg
core_arrow,
exprFreeIds core_arg `intersectVarSet` local_vars)
-- D, xs |- fun :: a t1 t2
-- D, xs |- arg :: t1
-- ------------------------------
-- D; xs |-a fun -<< arg : stk --> t2
--
-- ---> premap (\ ((xs), _stk) -> (fun, arg)) app
dsCmd ids local_vars stack_ty res_ty
(HsCmdArrApp arrow arg arrow_ty HsHigherOrderApp _)
env_ids = do
let
(a_arg_ty, _res_ty') = tcSplitAppTy arrow_ty
(_a_ty, arg_ty) = tcSplitAppTy a_arg_ty
core_arrow <- dsLExpr arrow
core_arg <- dsLExpr arg
stack_id <- newSysLocalDs stack_ty
core_make_pair <- matchEnvStack env_ids stack_id
(mkCorePairExpr core_arrow core_arg)
return (do_premap ids
(envStackType env_ids stack_ty)
(mkCorePairTy arrow_ty arg_ty)
res_ty
core_make_pair
(do_app ids arg_ty res_ty),
(exprFreeIds core_arrow `unionVarSet` exprFreeIds core_arg)
`intersectVarSet` local_vars)
-- D; ys |-a cmd : (t,stk) --> t'
-- D, xs |- exp :: t
-- ------------------------
-- D; xs |-a cmd exp : stk --> t'
--
-- ---> premap (\ ((xs),stk) -> ((ys),(e,stk))) cmd
dsCmd ids local_vars stack_ty res_ty (HsCmdApp cmd arg) env_ids = do
core_arg <- dsLExpr arg
let
arg_ty = exprType core_arg
stack_ty' = mkCorePairTy arg_ty stack_ty
(core_cmd, free_vars, env_ids')
<- dsfixCmd ids local_vars stack_ty' res_ty cmd
stack_id <- newSysLocalDs stack_ty
arg_id <- newSysLocalDs arg_ty
-- push the argument expression onto the stack
let
stack' = mkCorePairExpr (Var arg_id) (Var stack_id)
core_body = bindNonRec arg_id core_arg
(mkCorePairExpr (mkBigCoreVarTup env_ids') stack')
-- match the environment and stack against the input
core_map <- matchEnvStack env_ids stack_id core_body
return (do_premap ids
(envStackType env_ids stack_ty)
(envStackType env_ids' stack_ty')
res_ty
core_map
core_cmd,
free_vars `unionVarSet`
(exprFreeIds core_arg `intersectVarSet` local_vars))
-- D; ys |-a cmd : stk t'
-- -----------------------------------------------
-- D; xs |-a \ p1 ... pk -> cmd : (t1,...(tk,stk)...) t'
--
-- ---> premap (\ ((xs), (p1, ... (pk,stk)...)) -> ((ys),stk)) cmd
dsCmd ids local_vars stack_ty res_ty
(HsCmdLam (MG { mg_alts = [L _ (Match _ pats _
(GRHSs [L _ (GRHS [] body)] _ ))] }))
env_ids = do
let
pat_vars = mkVarSet (collectPatsBinders pats)
local_vars' = pat_vars `unionVarSet` local_vars
(pat_tys, stack_ty') = splitTypeAt (length pats) stack_ty
(core_body, free_vars, env_ids') <- dsfixCmd ids local_vars' stack_ty' res_ty body
param_ids <- mapM newSysLocalDs pat_tys
stack_id' <- newSysLocalDs stack_ty'
-- the expression is built from the inside out, so the actions
-- are presented in reverse order
let
-- build a new environment, plus what's left of the stack
core_expr = buildEnvStack env_ids' stack_id'
in_ty = envStackType env_ids stack_ty
in_ty' = envStackType env_ids' stack_ty'
fail_expr <- mkFailExpr LambdaExpr in_ty'
-- match the patterns against the parameters
match_code <- matchSimplys (map Var param_ids) LambdaExpr pats core_expr fail_expr
-- match the parameters against the top of the old stack
(stack_id, param_code) <- matchVarStack param_ids stack_id' match_code
-- match the old environment and stack against the input
select_code <- matchEnvStack env_ids stack_id param_code
return (do_premap ids in_ty in_ty' res_ty select_code core_body,
free_vars `minusVarSet` pat_vars)
dsCmd ids local_vars stack_ty res_ty (HsCmdPar cmd) env_ids
= dsLCmd ids local_vars stack_ty res_ty cmd env_ids
-- D, xs |- e :: Bool
-- D; xs1 |-a c1 : stk --> t
-- D; xs2 |-a c2 : stk --> t
-- ----------------------------------------
-- D; xs |-a if e then c1 else c2 : stk --> t
--
-- ---> premap (\ ((xs),stk) ->
-- if e then Left ((xs1),stk) else Right ((xs2),stk))
-- (c1 ||| c2)
dsCmd ids local_vars stack_ty res_ty (HsCmdIf mb_fun cond then_cmd else_cmd)
env_ids = do
core_cond <- dsLExpr cond
(core_then, fvs_then, then_ids) <- dsfixCmd ids local_vars stack_ty res_ty then_cmd
(core_else, fvs_else, else_ids) <- dsfixCmd ids local_vars stack_ty res_ty else_cmd
stack_id <- newSysLocalDs stack_ty
either_con <- dsLookupTyCon eitherTyConName
left_con <- dsLookupDataCon leftDataConName
right_con <- dsLookupDataCon rightDataConName
let mk_left_expr ty1 ty2 e = mkCoreConApps left_con [Type ty1, Type ty2, e]
mk_right_expr ty1 ty2 e = mkCoreConApps right_con [Type ty1, Type ty2, e]
in_ty = envStackType env_ids stack_ty
then_ty = envStackType then_ids stack_ty
else_ty = envStackType else_ids stack_ty
sum_ty = mkTyConApp either_con [then_ty, else_ty]
fvs_cond = exprFreeIds core_cond `intersectVarSet` local_vars
core_left = mk_left_expr then_ty else_ty (buildEnvStack then_ids stack_id)
core_right = mk_right_expr then_ty else_ty (buildEnvStack else_ids stack_id)
core_if <- case mb_fun of
Just fun -> do { core_fun <- dsExpr fun
; matchEnvStack env_ids stack_id $
mkCoreApps core_fun [core_cond, core_left, core_right] }
Nothing -> matchEnvStack env_ids stack_id $
mkIfThenElse core_cond core_left core_right
return (do_premap ids in_ty sum_ty res_ty
core_if
(do_choice ids then_ty else_ty res_ty core_then core_else),
fvs_cond `unionVarSet` fvs_then `unionVarSet` fvs_else)
{-
Case commands are treated in much the same way as if commands
(see above) except that there are more alternatives. For example
case e of { p1 -> c1; p2 -> c2; p3 -> c3 }
is translated to
premap (\ ((xs)*ts) -> case e of
p1 -> (Left (Left (xs1)*ts))
p2 -> Left ((Right (xs2)*ts))
p3 -> Right ((xs3)*ts))
((c1 ||| c2) ||| c3)
The idea is to extract the commands from the case, build a balanced tree
of choices, and replace the commands with expressions that build tagged
tuples, obtaining a case expression that can be desugared normally.
To build all this, we use triples describing segments of the list of
case bodies, containing the following fields:
* a list of expressions of the form (Left|Right)* ((xs)*ts), to be put
into the case replacing the commands
* a sum type that is the common type of these expressions, and also the
input type of the arrow
* a CoreExpr for an arrow built by combining the translated command
bodies with |||.
-}
dsCmd ids local_vars stack_ty res_ty
(HsCmdCase exp (MG { mg_alts = matches, mg_arg_tys = arg_tys, mg_origin = origin }))
env_ids = do
stack_id <- newSysLocalDs stack_ty
-- Extract and desugar the leaf commands in the case, building tuple
-- expressions that will (after tagging) replace these leaves
let
leaves = concatMap leavesMatch matches
make_branch (leaf, bound_vars) = do
(core_leaf, _fvs, leaf_ids) <-
dsfixCmd ids (bound_vars `unionVarSet` local_vars) stack_ty res_ty leaf
return ([mkHsEnvStackExpr leaf_ids stack_id],
envStackType leaf_ids stack_ty,
core_leaf)
branches <- mapM make_branch leaves
either_con <- dsLookupTyCon eitherTyConName
left_con <- dsLookupDataCon leftDataConName
right_con <- dsLookupDataCon rightDataConName
let
left_id = HsVar (dataConWrapId left_con)
right_id = HsVar (dataConWrapId right_con)
left_expr ty1 ty2 e = noLoc $ HsApp (noLoc $ HsWrap (mkWpTyApps [ty1, ty2]) left_id ) e
right_expr ty1 ty2 e = noLoc $ HsApp (noLoc $ HsWrap (mkWpTyApps [ty1, ty2]) right_id) e
-- Prefix each tuple with a distinct series of Left's and Right's,
-- in a balanced way, keeping track of the types.
merge_branches (builds1, in_ty1, core_exp1)
(builds2, in_ty2, core_exp2)
= (map (left_expr in_ty1 in_ty2) builds1 ++
map (right_expr in_ty1 in_ty2) builds2,
mkTyConApp either_con [in_ty1, in_ty2],
do_choice ids in_ty1 in_ty2 res_ty core_exp1 core_exp2)
(leaves', sum_ty, core_choices) = foldb merge_branches branches
-- Replace the commands in the case with these tagged tuples,
-- yielding a HsExpr Id we can feed to dsExpr.
(_, matches') = mapAccumL (replaceLeavesMatch res_ty) leaves' matches
in_ty = envStackType env_ids stack_ty
core_body <- dsExpr (HsCase exp (MG { mg_alts = matches', mg_arg_tys = arg_tys
, mg_res_ty = sum_ty, mg_origin = origin }))
-- Note that we replace the HsCase result type by sum_ty,
-- which is the type of matches'
core_matches <- matchEnvStack env_ids stack_id core_body
return (do_premap ids in_ty sum_ty res_ty core_matches core_choices,
exprFreeIds core_body `intersectVarSet` local_vars)
-- D; ys |-a cmd : stk --> t
-- ----------------------------------
-- D; xs |-a let binds in cmd : stk --> t
--
-- ---> premap (\ ((xs),stk) -> let binds in ((ys),stk)) c
dsCmd ids local_vars stack_ty res_ty (HsCmdLet binds body) env_ids = do
let
defined_vars = mkVarSet (collectLocalBinders binds)
local_vars' = defined_vars `unionVarSet` local_vars
(core_body, _free_vars, env_ids') <- dsfixCmd ids local_vars' stack_ty res_ty body
stack_id <- newSysLocalDs stack_ty
-- build a new environment, plus the stack, using the let bindings
core_binds <- dsLocalBinds binds (buildEnvStack env_ids' stack_id)
-- match the old environment and stack against the input
core_map <- matchEnvStack env_ids stack_id core_binds
return (do_premap ids
(envStackType env_ids stack_ty)
(envStackType env_ids' stack_ty)
res_ty
core_map
core_body,
exprFreeIds core_binds `intersectVarSet` local_vars)
-- D; xs |-a ss : t
-- ----------------------------------
-- D; xs |-a do { ss } : () --> t
--
-- ---> premap (\ (env,stk) -> env) c
dsCmd ids local_vars stack_ty res_ty (HsCmdDo stmts _) env_ids = do
(core_stmts, env_ids') <- dsCmdDo ids local_vars res_ty stmts env_ids
let env_ty = mkBigCoreVarTupTy env_ids
core_fst <- mkFstExpr env_ty stack_ty
return (do_premap ids
(mkCorePairTy env_ty stack_ty)
env_ty
res_ty
core_fst
core_stmts,
env_ids')
-- D |- e :: forall e. a1 (e,stk1) t1 -> ... an (e,stkn) tn -> a (e,stk) t
-- D; xs |-a ci :: stki --> ti
-- -----------------------------------
-- D; xs |-a (|e c1 ... cn|) :: stk --> t ---> e [t_xs] c1 ... cn
dsCmd _ids local_vars _stack_ty _res_ty (HsCmdArrForm op _ args) env_ids = do
let env_ty = mkBigCoreVarTupTy env_ids
core_op <- dsLExpr op
(core_args, fv_sets) <- mapAndUnzipM (dsTrimCmdArg local_vars env_ids) args
return (mkApps (App core_op (Type env_ty)) core_args,
unionVarSets fv_sets)
dsCmd ids local_vars stack_ty res_ty (HsCmdCast coercion cmd) env_ids = do
(core_cmd, env_ids') <- dsCmd ids local_vars stack_ty res_ty cmd env_ids
wrapped_cmd <- dsHsWrapper (mkWpCast coercion) core_cmd
return (wrapped_cmd, env_ids')
dsCmd _ _ _ _ _ c = pprPanic "dsCmd" (ppr c)
-- D; ys |-a c : stk --> t (ys <= xs)
-- ---------------------
-- D; xs |-a c : stk --> t ---> premap (\ ((xs),stk) -> ((ys),stk)) c
dsTrimCmdArg
:: IdSet -- set of local vars available to this command
-> [Id] -- list of vars in the input to this command
-> LHsCmdTop Id -- command argument to desugar
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
dsTrimCmdArg local_vars env_ids (L _ (HsCmdTop cmd stack_ty cmd_ty ids)) = do
(meth_binds, meth_ids) <- mkCmdEnv ids
(core_cmd, free_vars, env_ids') <- dsfixCmd meth_ids local_vars stack_ty cmd_ty cmd
stack_id <- newSysLocalDs stack_ty
trim_code <- matchEnvStack env_ids stack_id (buildEnvStack env_ids' stack_id)
let
in_ty = envStackType env_ids stack_ty
in_ty' = envStackType env_ids' stack_ty
arg_code = if env_ids' == env_ids then core_cmd else
do_premap meth_ids in_ty in_ty' cmd_ty trim_code core_cmd
return (mkLets meth_binds arg_code, free_vars)
-- Given D; xs |-a c : stk --> t, builds c with xs fed back.
-- Typically needs to be prefixed with arr (\(p, stk) -> ((xs),stk))
dsfixCmd
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this command
-> Type -- type of the stack (right-nested tuple)
-> Type -- return type of the command
-> LHsCmd Id -- command to desugar
-> DsM (CoreExpr, -- desugared expression
IdSet, -- subset of local vars that occur free
[Id]) -- the same local vars as a list, fed back
dsfixCmd ids local_vars stk_ty cmd_ty cmd
= trimInput (dsLCmd ids local_vars stk_ty cmd_ty cmd)
-- Feed back the list of local variables actually used a command,
-- for use as the input tuple of the generated arrow.
trimInput
:: ([Id] -> DsM (CoreExpr, IdSet))
-> DsM (CoreExpr, -- desugared expression
IdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list, fed back to
-- the inner function to form the tuple of
-- inputs to the arrow.
trimInput build_arrow
= fixDs (\ ~(_,_,env_ids) -> do
(core_cmd, free_vars) <- build_arrow env_ids
return (core_cmd, free_vars, varSetElems free_vars))
{-
Translation of command judgements of the form
D |-a do { ss } : t
-}
dsCmdDo :: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> Type -- return type of the statement
-> [CmdLStmt Id] -- statements to desugar
-> [Id] -- list of vars in the input to this statement
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
dsCmdDo _ _ _ [] _ = panic "dsCmdDo"
-- D; xs |-a c : () --> t
-- --------------------------
-- D; xs |-a do { c } : t
--
-- ---> premap (\ (xs) -> ((xs), ())) c
dsCmdDo ids local_vars res_ty [L _ (LastStmt body _)] env_ids = do
(core_body, env_ids') <- dsLCmd ids local_vars unitTy res_ty body env_ids
let env_ty = mkBigCoreVarTupTy env_ids
env_var <- newSysLocalDs env_ty
let core_map = Lam env_var (mkCorePairExpr (Var env_var) mkCoreUnitExpr)
return (do_premap ids
env_ty
(mkCorePairTy env_ty unitTy)
res_ty
core_map
core_body,
env_ids')
dsCmdDo ids local_vars res_ty (stmt:stmts) env_ids = do
let
bound_vars = mkVarSet (collectLStmtBinders stmt)
local_vars' = bound_vars `unionVarSet` local_vars
(core_stmts, _, env_ids') <- trimInput (dsCmdDo ids local_vars' res_ty stmts)
(core_stmt, fv_stmt) <- dsCmdLStmt ids local_vars env_ids' stmt env_ids
return (do_compose ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy env_ids')
res_ty
core_stmt
core_stmts,
fv_stmt)
{-
A statement maps one local environment to another, and is represented
as an arrow from one tuple type to another. A statement sequence is
translated to a composition of such arrows.
-}
dsCmdLStmt :: DsCmdEnv -> IdSet -> [Id] -> CmdLStmt Id -> [Id]
-> DsM (CoreExpr, IdSet)
dsCmdLStmt ids local_vars out_ids cmd env_ids
= dsCmdStmt ids local_vars out_ids (unLoc cmd) env_ids
dsCmdStmt
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- list of vars in the output of this statement
-> CmdStmt Id -- statement to desugar
-> [Id] -- list of vars in the input to this statement
-- This is typically fed back,
-- so don't pull on it too early
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
-- D; xs1 |-a c : () --> t
-- D; xs' |-a do { ss } : t'
-- ------------------------------
-- D; xs |-a do { c; ss } : t'
--
-- ---> premap (\ ((xs)) -> (((xs1),()),(xs')))
-- (first c >>> arr snd) >>> ss
dsCmdStmt ids local_vars out_ids (BodyStmt cmd _ _ c_ty) env_ids = do
(core_cmd, fv_cmd, env_ids1) <- dsfixCmd ids local_vars unitTy c_ty cmd
core_mux <- matchEnv env_ids
(mkCorePairExpr
(mkCorePairExpr (mkBigCoreVarTup env_ids1) mkCoreUnitExpr)
(mkBigCoreVarTup out_ids))
let
in_ty = mkBigCoreVarTupTy env_ids
in_ty1 = mkCorePairTy (mkBigCoreVarTupTy env_ids1) unitTy
out_ty = mkBigCoreVarTupTy out_ids
before_c_ty = mkCorePairTy in_ty1 out_ty
after_c_ty = mkCorePairTy c_ty out_ty
snd_fn <- mkSndExpr c_ty out_ty
return (do_premap ids in_ty before_c_ty out_ty core_mux $
do_compose ids before_c_ty after_c_ty out_ty
(do_first ids in_ty1 c_ty out_ty core_cmd) $
do_arr ids after_c_ty out_ty snd_fn,
extendVarSetList fv_cmd out_ids)
-- D; xs1 |-a c : () --> t
-- D; xs' |-a do { ss } : t' xs2 = xs' - defs(p)
-- -----------------------------------
-- D; xs |-a do { p <- c; ss } : t'
--
-- ---> premap (\ (xs) -> (((xs1),()),(xs2)))
-- (first c >>> arr (\ (p, (xs2)) -> (xs'))) >>> ss
--
-- It would be simpler and more consistent to do this using second,
-- but that's likely to be defined in terms of first.
dsCmdStmt ids local_vars out_ids (BindStmt pat cmd _ _) env_ids = do
(core_cmd, fv_cmd, env_ids1) <- dsfixCmd ids local_vars unitTy (hsLPatType pat) cmd
let
pat_ty = hsLPatType pat
pat_vars = mkVarSet (collectPatBinders pat)
env_ids2 = varSetElems (mkVarSet out_ids `minusVarSet` pat_vars)
env_ty2 = mkBigCoreVarTupTy env_ids2
-- multiplexing function
-- \ (xs) -> (((xs1),()),(xs2))
core_mux <- matchEnv env_ids
(mkCorePairExpr
(mkCorePairExpr (mkBigCoreVarTup env_ids1) mkCoreUnitExpr)
(mkBigCoreVarTup env_ids2))
-- projection function
-- \ (p, (xs2)) -> (zs)
env_id <- newSysLocalDs env_ty2
uniqs <- newUniqueSupply
let
after_c_ty = mkCorePairTy pat_ty env_ty2
out_ty = mkBigCoreVarTupTy out_ids
body_expr = coreCaseTuple uniqs env_id env_ids2 (mkBigCoreVarTup out_ids)
fail_expr <- mkFailExpr (StmtCtxt DoExpr) out_ty
pat_id <- selectSimpleMatchVarL pat
match_code <- matchSimply (Var pat_id) (StmtCtxt DoExpr) pat body_expr fail_expr
pair_id <- newSysLocalDs after_c_ty
let
proj_expr = Lam pair_id (coreCasePair pair_id pat_id env_id match_code)
-- put it all together
let
in_ty = mkBigCoreVarTupTy env_ids
in_ty1 = mkCorePairTy (mkBigCoreVarTupTy env_ids1) unitTy
in_ty2 = mkBigCoreVarTupTy env_ids2
before_c_ty = mkCorePairTy in_ty1 in_ty2
return (do_premap ids in_ty before_c_ty out_ty core_mux $
do_compose ids before_c_ty after_c_ty out_ty
(do_first ids in_ty1 pat_ty in_ty2 core_cmd) $
do_arr ids after_c_ty out_ty proj_expr,
fv_cmd `unionVarSet` (mkVarSet out_ids `minusVarSet` pat_vars))
-- D; xs' |-a do { ss } : t
-- --------------------------------------
-- D; xs |-a do { let binds; ss } : t
--
-- ---> arr (\ (xs) -> let binds in (xs')) >>> ss
dsCmdStmt ids local_vars out_ids (LetStmt binds) env_ids = do
-- build a new environment using the let bindings
core_binds <- dsLocalBinds binds (mkBigCoreVarTup out_ids)
-- match the old environment against the input
core_map <- matchEnv env_ids core_binds
return (do_arr ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy out_ids)
core_map,
exprFreeIds core_binds `intersectVarSet` local_vars)
-- D; ys |-a do { ss; returnA -< ((xs1), (ys2)) } : ...
-- D; xs' |-a do { ss' } : t
-- ------------------------------------
-- D; xs |-a do { rec ss; ss' } : t
--
-- xs1 = xs' /\ defs(ss)
-- xs2 = xs' - defs(ss)
-- ys1 = ys - defs(ss)
-- ys2 = ys /\ defs(ss)
--
-- ---> arr (\(xs) -> ((ys1),(xs2))) >>>
-- first (loop (arr (\((ys1),~(ys2)) -> (ys)) >>> ss)) >>>
-- arr (\((xs1),(xs2)) -> (xs')) >>> ss'
dsCmdStmt ids local_vars out_ids
(RecStmt { recS_stmts = stmts
, recS_later_ids = later_ids, recS_rec_ids = rec_ids
, recS_later_rets = later_rets, recS_rec_rets = rec_rets })
env_ids = do
let
env2_id_set = mkVarSet out_ids `minusVarSet` mkVarSet later_ids
env2_ids = varSetElems env2_id_set
env2_ty = mkBigCoreVarTupTy env2_ids
-- post_loop_fn = \((later_ids),(env2_ids)) -> (out_ids)
uniqs <- newUniqueSupply
env2_id <- newSysLocalDs env2_ty
let
later_ty = mkBigCoreVarTupTy later_ids
post_pair_ty = mkCorePairTy later_ty env2_ty
post_loop_body = coreCaseTuple uniqs env2_id env2_ids (mkBigCoreVarTup out_ids)
post_loop_fn <- matchEnvStack later_ids env2_id post_loop_body
--- loop (...)
(core_loop, env1_id_set, env1_ids)
<- dsRecCmd ids local_vars stmts later_ids later_rets rec_ids rec_rets
-- pre_loop_fn = \(env_ids) -> ((env1_ids),(env2_ids))
let
env1_ty = mkBigCoreVarTupTy env1_ids
pre_pair_ty = mkCorePairTy env1_ty env2_ty
pre_loop_body = mkCorePairExpr (mkBigCoreVarTup env1_ids)
(mkBigCoreVarTup env2_ids)
pre_loop_fn <- matchEnv env_ids pre_loop_body
-- arr pre_loop_fn >>> first (loop (...)) >>> arr post_loop_fn
let
env_ty = mkBigCoreVarTupTy env_ids
out_ty = mkBigCoreVarTupTy out_ids
core_body = do_premap ids env_ty pre_pair_ty out_ty
pre_loop_fn
(do_compose ids pre_pair_ty post_pair_ty out_ty
(do_first ids env1_ty later_ty env2_ty
core_loop)
(do_arr ids post_pair_ty out_ty
post_loop_fn))
return (core_body, env1_id_set `unionVarSet` env2_id_set)
dsCmdStmt _ _ _ _ s = pprPanic "dsCmdStmt" (ppr s)
-- loop (premap (\ ((env1_ids), ~(rec_ids)) -> (env_ids))
-- (ss >>> arr (\ (out_ids) -> ((later_rets),(rec_rets))))) >>>
dsRecCmd
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [CmdLStmt Id] -- list of statements inside the RecCmd
-> [Id] -- list of vars defined here and used later
-> [HsExpr Id] -- expressions corresponding to later_ids
-> [Id] -- list of vars fed back through the loop
-> [HsExpr Id] -- expressions corresponding to rec_ids
-> DsM (CoreExpr, -- desugared statement
IdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list
dsRecCmd ids local_vars stmts later_ids later_rets rec_ids rec_rets = do
let
later_id_set = mkVarSet later_ids
rec_id_set = mkVarSet rec_ids
local_vars' = rec_id_set `unionVarSet` later_id_set `unionVarSet` local_vars
-- mk_pair_fn = \ (out_ids) -> ((later_rets),(rec_rets))
core_later_rets <- mapM dsExpr later_rets
core_rec_rets <- mapM dsExpr rec_rets
let
-- possibly polymorphic version of vars of later_ids and rec_ids
out_ids = varSetElems (unionVarSets (map exprFreeIds (core_later_rets ++ core_rec_rets)))
out_ty = mkBigCoreVarTupTy out_ids
later_tuple = mkBigCoreTup core_later_rets
later_ty = mkBigCoreVarTupTy later_ids
rec_tuple = mkBigCoreTup core_rec_rets
rec_ty = mkBigCoreVarTupTy rec_ids
out_pair = mkCorePairExpr later_tuple rec_tuple
out_pair_ty = mkCorePairTy later_ty rec_ty
mk_pair_fn <- matchEnv out_ids out_pair
-- ss
(core_stmts, fv_stmts, env_ids) <- dsfixCmdStmts ids local_vars' out_ids stmts
-- squash_pair_fn = \ ((env1_ids), ~(rec_ids)) -> (env_ids)
rec_id <- newSysLocalDs rec_ty
let
env1_id_set = fv_stmts `minusVarSet` rec_id_set
env1_ids = varSetElems env1_id_set
env1_ty = mkBigCoreVarTupTy env1_ids
in_pair_ty = mkCorePairTy env1_ty rec_ty
core_body = mkBigCoreTup (map selectVar env_ids)
where
selectVar v
| v `elemVarSet` rec_id_set
= mkTupleSelector rec_ids v rec_id (Var rec_id)
| otherwise = Var v
squash_pair_fn <- matchEnvStack env1_ids rec_id core_body
-- loop (premap squash_pair_fn (ss >>> arr mk_pair_fn))
let
env_ty = mkBigCoreVarTupTy env_ids
core_loop = do_loop ids env1_ty later_ty rec_ty
(do_premap ids in_pair_ty env_ty out_pair_ty
squash_pair_fn
(do_compose ids env_ty out_ty out_pair_ty
core_stmts
(do_arr ids out_ty out_pair_ty mk_pair_fn)))
return (core_loop, env1_id_set, env1_ids)
{-
A sequence of statements (as in a rec) is desugared to an arrow between
two environments (no stack)
-}
dsfixCmdStmts
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- output vars of these statements
-> [CmdLStmt Id] -- statements to desugar
-> DsM (CoreExpr, -- desugared expression
IdSet, -- subset of local vars that occur free
[Id]) -- same local vars as a list
dsfixCmdStmts ids local_vars out_ids stmts
= trimInput (dsCmdStmts ids local_vars out_ids stmts)
dsCmdStmts
:: DsCmdEnv -- arrow combinators
-> IdSet -- set of local vars available to this statement
-> [Id] -- output vars of these statements
-> [CmdLStmt Id] -- statements to desugar
-> [Id] -- list of vars in the input to these statements
-> DsM (CoreExpr, -- desugared expression
IdSet) -- subset of local vars that occur free
dsCmdStmts ids local_vars out_ids [stmt] env_ids
= dsCmdLStmt ids local_vars out_ids stmt env_ids
dsCmdStmts ids local_vars out_ids (stmt:stmts) env_ids = do
let
bound_vars = mkVarSet (collectLStmtBinders stmt)
local_vars' = bound_vars `unionVarSet` local_vars
(core_stmts, _fv_stmts, env_ids') <- dsfixCmdStmts ids local_vars' out_ids stmts
(core_stmt, fv_stmt) <- dsCmdLStmt ids local_vars env_ids' stmt env_ids
return (do_compose ids
(mkBigCoreVarTupTy env_ids)
(mkBigCoreVarTupTy env_ids')
(mkBigCoreVarTupTy out_ids)
core_stmt
core_stmts,
fv_stmt)
dsCmdStmts _ _ _ [] _ = panic "dsCmdStmts []"
-- Match a list of expressions against a list of patterns, left-to-right.
matchSimplys :: [CoreExpr] -- Scrutinees
-> HsMatchContext Name -- Match kind
-> [LPat Id] -- Patterns they should match
-> CoreExpr -- Return this if they all match
-> CoreExpr -- Return this if they don't
-> DsM CoreExpr
matchSimplys [] _ctxt [] result_expr _fail_expr = return result_expr
matchSimplys (exp:exps) ctxt (pat:pats) result_expr fail_expr = do
match_code <- matchSimplys exps ctxt pats result_expr fail_expr
matchSimply exp ctxt pat match_code fail_expr
matchSimplys _ _ _ _ _ = panic "matchSimplys"
-- List of leaf expressions, with set of variables bound in each
leavesMatch :: LMatch Id (Located (body Id)) -> [(Located (body Id), IdSet)]
leavesMatch (L _ (Match _ pats _ (GRHSs grhss binds)))
= let
defined_vars = mkVarSet (collectPatsBinders pats)
`unionVarSet`
mkVarSet (collectLocalBinders binds)
in
[(body,
mkVarSet (collectLStmtsBinders stmts)
`unionVarSet` defined_vars)
| L _ (GRHS stmts body) <- grhss]
-- Replace the leaf commands in a match
replaceLeavesMatch
:: Type -- new result type
-> [Located (body' Id)] -- replacement leaf expressions of that type
-> LMatch Id (Located (body Id)) -- the matches of a case command
-> ([Located (body' Id)], -- remaining leaf expressions
LMatch Id (Located (body' Id))) -- updated match
replaceLeavesMatch _res_ty leaves (L loc (Match mf pat mt (GRHSs grhss binds)))
= let
(leaves', grhss') = mapAccumL replaceLeavesGRHS leaves grhss
in
(leaves', L loc (Match mf pat mt (GRHSs grhss' binds)))
replaceLeavesGRHS
:: [Located (body' Id)] -- replacement leaf expressions of that type
-> LGRHS Id (Located (body Id)) -- rhss of a case command
-> ([Located (body' Id)], -- remaining leaf expressions
LGRHS Id (Located (body' Id))) -- updated GRHS
replaceLeavesGRHS (leaf:leaves) (L loc (GRHS stmts _))
= (leaves, L loc (GRHS stmts leaf))
replaceLeavesGRHS [] _ = panic "replaceLeavesGRHS []"
-- Balanced fold of a non-empty list.
foldb :: (a -> a -> a) -> [a] -> a
foldb _ [] = error "foldb of empty list"
foldb _ [x] = x
foldb f xs = foldb f (fold_pairs xs)
where
fold_pairs [] = []
fold_pairs [x] = [x]
fold_pairs (x1:x2:xs) = f x1 x2:fold_pairs xs
{-
Note [Dictionary binders in ConPatOut] See also same Note in HsUtils
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following functions to collect value variables from patterns are
copied from HsUtils, with one change: we also collect the dictionary
bindings (pat_binds) from ConPatOut. We need them for cases like
h :: Arrow a => Int -> a (Int,Int) Int
h x = proc (y,z) -> case compare x y of
GT -> returnA -< z+x
The type checker turns the case into
case compare x y of
GT { p77 = plusInt } -> returnA -< p77 z x
Here p77 is a local binding for the (+) operation.
See comments in HsUtils for why the other version does not include
these bindings.
-}
collectPatBinders :: LPat Id -> [Id]
collectPatBinders pat = collectl pat []
collectPatsBinders :: [LPat Id] -> [Id]
collectPatsBinders pats = foldr collectl [] pats
---------------------
collectl :: LPat Id -> [Id] -> [Id]
-- See Note [Dictionary binders in ConPatOut]
collectl (L _ pat) bndrs
= go pat
where
go (VarPat var) = var : bndrs
go (WildPat _) = bndrs
go (LazyPat pat) = collectl pat bndrs
go (BangPat pat) = collectl pat bndrs
go (AsPat (L _ a) pat) = a : collectl pat bndrs
go (ParPat pat) = collectl pat bndrs
go (ListPat pats _ _) = foldr collectl bndrs pats
go (PArrPat pats _) = foldr collectl bndrs pats
go (TuplePat pats _ _) = foldr collectl bndrs pats
go (ConPatIn _ ps) = foldr collectl bndrs (hsConPatArgs ps)
go (ConPatOut {pat_args=ps, pat_binds=ds}) =
collectEvBinders ds
++ foldr collectl bndrs (hsConPatArgs ps)
go (LitPat _) = bndrs
go (NPat _ _ _) = bndrs
go (NPlusKPat (L _ n) _ _ _) = n : bndrs
go (SigPatIn pat _) = collectl pat bndrs
go (SigPatOut pat _) = collectl pat bndrs
go (CoPat _ pat _) = collectl (noLoc pat) bndrs
go (ViewPat _ pat _) = collectl pat bndrs
go p@(SplicePat {}) = pprPanic "collectl/go" (ppr p)
go p@(QuasiQuotePat {}) = pprPanic "collectl/go" (ppr p)
collectEvBinders :: TcEvBinds -> [Id]
collectEvBinders (EvBinds bs) = foldrBag add_ev_bndr [] bs
collectEvBinders (TcEvBinds {}) = panic "ToDo: collectEvBinders"
add_ev_bndr :: EvBind -> [Id] -> [Id]
add_ev_bndr (EvBind { eb_lhs = b }) bs | isId b = b:bs
| otherwise = bs
-- A worry: what about coercion variable binders??
collectLStmtsBinders :: [LStmt Id body] -> [Id]
collectLStmtsBinders = concatMap collectLStmtBinders
collectLStmtBinders :: LStmt Id body -> [Id]
collectLStmtBinders = collectStmtBinders . unLoc
collectStmtBinders :: Stmt Id body -> [Id]
collectStmtBinders (BindStmt pat _ _ _) = collectPatBinders pat
collectStmtBinders (LetStmt binds) = collectLocalBinders binds
collectStmtBinders (BodyStmt {}) = []
collectStmtBinders (LastStmt {}) = []
collectStmtBinders (ParStmt xs _ _) = collectLStmtsBinders
$ [ s | ParStmtBlock ss _ _ <- xs, s <- ss]
collectStmtBinders (TransStmt { trS_stmts = stmts }) = collectLStmtsBinders stmts
collectStmtBinders (RecStmt { recS_later_ids = later_ids }) = later_ids
|
green-haskell/ghc
|
compiler/deSugar/DsArrows.hs
|
bsd-3-clause
| 46,047 | 0 | 22 | 13,598 | 9,630 | 4,963 | 4,667 | 704 | 20 |
{-# LANGUAGE TypeOperators
, DataKinds
, PolyKinds
, TypeFamilies
, GADTs
, UndecidableInstances
, RankNTypes
, ScopedTypeVariables
#-}
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -Werror #-}
{-# OPTIONS_GHC -O1 -fspec-constr #-}
{-
ghc-stage2: panic! (the 'impossible' happened)
(GHC version 7.11.20150723 for x86_64-unknown-linux):
Template variable unbound in rewrite rule
-}
module List (sFoldr1) where
data Proxy t
data family Sing (a :: k)
data TyFun (a :: *) (b :: *)
type family Apply (f :: TyFun k1 k2 -> *) (x :: k1) :: k2
data instance Sing (f :: TyFun k1 k2 -> *) =
SLambda { applySing :: forall t. Sing t -> Sing (Apply f t) }
type SingFunction1 f = forall t. Sing t -> Sing (Apply f t)
type SingFunction2 f = forall t. Sing t -> SingFunction1 (Apply f t)
singFun2 :: Proxy f -> SingFunction2 f -> Sing f
singFun2 _ f = SLambda (\x -> SLambda (f x))
data (:$$) (j :: a) (i :: TyFun [a] [a])
type instance Apply ((:$$) j) i = (:) j i
data (:$) (l :: TyFun a (TyFun [a] [a] -> *))
type instance Apply (:$) l = (:$$) l
data instance Sing (z :: [a])
= z ~ '[] =>
SNil
| forall (m :: a)
(n :: [a]). z ~ (:) m n =>
SCons (Sing m) (Sing n)
data ErrorSym0 (t1 :: TyFun k1 k2)
type Let1627448493XsSym4 t_afee t_afef t_afeg t_afeh = Let1627448493Xs t_afee t_afef t_afeg t_afeh
type Let1627448493Xs f_afe9
x_afea
wild_1627448474_afeb
wild_1627448476_afec =
Apply (Apply (:$) wild_1627448474_afeb) wild_1627448476_afec
type Foldr1Sym2 (t_afdY :: TyFun a_afdP (TyFun a_afdP a_afdP -> *)
-> *)
(t_afdZ :: [a_afdP]) =
Foldr1 t_afdY t_afdZ
data Foldr1Sym1 (l_afe3 :: TyFun a_afdP (TyFun a_afdP a_afdP -> *)
-> *)
(l_afe2 :: TyFun [a_afdP] a_afdP)
type instance Apply (Foldr1Sym1 l_afe3) l_afe2 = Foldr1Sym2 l_afe3 l_afe2
data Foldr1Sym0 (l_afe0 :: TyFun (TyFun a_afdP (TyFun a_afdP a_afdP
-> *)
-> *) (TyFun [a_afdP] a_afdP -> *))
type instance Apply Foldr1Sym0 l = Foldr1Sym1 l
type family Foldr1 (a_afe5 :: TyFun a_afdP (TyFun a_afdP a_afdP
-> *)
-> *)
(a_afe6 :: [a_afdP]) :: a_afdP where
Foldr1 _z_afe7 '[x_afe8] = x_afe8
Foldr1 f_afe9 ((:) x_afea ((:) wild_1627448474_afeb wild_1627448476_afec)) = Apply (Apply f_afe9 x_afea) (Apply (Apply Foldr1Sym0 f_afe9) (Let1627448493XsSym4 f_afe9 x_afea wild_1627448474_afeb wild_1627448476_afec))
Foldr1 _z_afew '[] = Apply ErrorSym0 "Data.Singletons.List.foldr1: empty list"
sFoldr1 ::
forall (x :: TyFun a_afdP (TyFun a_afdP a_afdP -> *) -> *)
(y :: [a_afdP]).
Sing x
-> Sing y -> Sing (Apply (Apply Foldr1Sym0 x) y)
sFoldr1 _ (SCons _sX SNil) = undefined
sFoldr1 sF (SCons sX (SCons sWild_1627448474 sWild_1627448476))
= let
lambda_afeC ::
forall f_afe9 x_afea wild_1627448474_afeb wild_1627448476_afec.
Sing f_afe9
-> Sing x_afea
-> Sing wild_1627448474_afeb
-> Sing wild_1627448476_afec
-> Sing (Apply (Apply Foldr1Sym0 f_afe9) (Apply (Apply (:$) x_afea) (Apply (Apply (:$) wild_1627448474_afeb) wild_1627448476_afec)))
lambda_afeC f_afeD x_afeE wild_1627448474_afeF wild_1627448476_afeG
= let
sXs ::
Sing (Let1627448493XsSym4 f_afe9 x_afea wild_1627448474_afeb wild_1627448476_afec)
sXs
= applySing
(applySing
(singFun2 (undefined :: Proxy (:$)) SCons) wild_1627448474_afeF)
wild_1627448476_afeG
in
applySing
(applySing f_afeD x_afeE)
(applySing
(applySing (singFun2 (undefined :: Proxy Foldr1Sym0) sFoldr1) f_afeD)
sXs)
in lambda_afeC sF sX sWild_1627448474 sWild_1627448476
sFoldr1 _ SNil = undefined
|
gridaphobe/ghc
|
testsuite/tests/simplCore/should_compile/T10689a.hs
|
bsd-3-clause
| 4,120 | 0 | 22 | 1,288 | 1,214 | 668 | 546 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Geometry.ThreeD.Camera
-- Copyright : (c) 2013-2017 diagrams team (see LICENSE)
-- License : BSD-style (see LICENSE)
-- Maintainer : [email protected]
--
-- Types to specify viewpoint for 3D rendering.
--
-----------------------------------------------------------------------------
module Geometry.ThreeD.Camera
(
-- * Cameras
Camera -- do not export constructor
, CameraLens (..)
, cameraLocation
, cameraAngle
, cameraView
, cameraLoc
, mm50Camera
-- * Perspective lens
, PerspectiveLens(..)
, mm50
, mm50Wide
, mm50Narrow
, fovx
-- * Orthographic lens
, OrthoLens(..)
, orthoBounds
-- , horizontalFieldOfView, verticalFieldOfView
-- , orthoWidth, orthoHeight
-- , camLoc, camForward, camUp, camRight, camLens
-- , facing_ZCamera, mm50Camera
-- , mm50, mm50Wide, mm50Narrow
-- , aspect, camAspect
, camForwardRight
, camForward
, camUp
, cameraLens
)
where
import Control.Lens
import Data.Typeable
import Geometry.Angle
import Geometry.Points
import Geometry.Space
import Geometry.ThreeD.Transform
import Geometry.ThreeD.Types
import Linear.Matrix (M44, mkTransformationMat, transpose,
(!*))
import Linear.Projection
import Linear.Vector
-- | A @Camera@ specifies a 3D viewpoint for rendering. It is
-- parameterized on the lens type, so backends can express which
-- lenses they handle.
--
-- Note that the constructor is intentionally not exported; to
-- construct a @Camera@, XXX?
data Camera l n = Camera
{ cameraLocation :: !(P3 n)
, cameraAngle :: !(Euler n)
, _cameraUp :: !(V3 n)
, camLens :: !(l n)
} deriving Typeable
type instance V (Camera l n) = V3
type instance N (Camera l n) = n
-- instance Num n => Transformable (Camera l n) where
-- transform t (Camera p f u l) =
-- Camera (transform t p)
-- (transform t f)
-- (transform t u)
-- l
class Typeable l => CameraLens l where
-- | The natural aspect ratio of the projection.
aspect :: Floating n => l n -> n
-- | The projection of a lens as a homogeneous transformation matrix.
lensProjection :: Floating n => l n -> M44 n
-- | The inverse projection of a lens as a homogeneous transformation
-- matrix.
inverseLensProjection :: Floating n => l n -> M44 n
instance Rotational (Camera l) where
euler f cam = f (cameraAngle cam) <&> \e -> cam {cameraAngle = e}
-- | The homogeneous view matrix for a camera, /not/ including the lens
-- projection.
cameraView :: RealFloat n => Camera l n -> M44 n
cameraView cam = mkTransformationMat m v
where
-- To get the view matrix we want the inverse of translating and then
-- rotating the camera. The inverse of a rotation matrix is its
-- transpose and the camera location is negated.
m = transpose (rotationMatrix cam)
v = m !* (-cam^.cameraLoc._Point)
cameraLoc :: Lens' (Camera l n) (P3 n)
cameraLoc f cam = f (cameraLocation cam) <&> \p -> cam {cameraLocation = p}
instance CameraLens l => CameraLens (Camera l) where
aspect = aspect . camLens
lensProjection = lensProjection . camLens
inverseLensProjection = inverseLensProjection . camLens
-- Perspective ---------------------------------------------------------
-- | A perspective projection
data PerspectiveLens n = PerspectiveLens
{ _fovx :: !(Angle n) -- ^ Horizontal field of view
, _fovy :: !(Angle n) -- ^ Vertical field of view
, _nearz :: !n -- ^ near clipping plane
, _farz :: !n -- ^ far clipping plane
}
deriving Typeable
makeLenses ''PerspectiveLens
type instance V (PerspectiveLens n) = V3
type instance N (PerspectiveLens n) = n
instance CameraLens PerspectiveLens where
aspect (PerspectiveLens h v _ _) = angleRatio h v
lensProjection l = perspective (l^.fovy.rad) (aspect l) (l^.nearz) (l^.farz)
inverseLensProjection l = inversePerspective (l^.fovy.rad) (aspect l) (l^.nearz) (l^.farz)
-- | mm50 has the field of view of a 50mm lens on standard 35mm film,
-- hence an aspect ratio of 3:2.
mm50 :: Floating n => PerspectiveLens n
mm50 = PerspectiveLens (40.5 @@ deg) (27 @@ deg) 0.1 1000
-- | mm50blWide has the same vertical field of view as mm50, but an
-- aspect ratio of 1.6, suitable for wide screen computer monitors.
mm50Wide :: Floating n => PerspectiveLens n
mm50Wide = PerspectiveLens (43.2 @@ deg) (27 @@ deg) 0.1 1000
-- | mm50Narrow has the same vertical field of view as mm50, but an
-- aspect ratio of 4:3, for VGA and similar computer resolutions.
mm50Narrow :: Floating n => PerspectiveLens n
mm50Narrow = PerspectiveLens (36 @@ deg) (27 @@ deg) 0.1 1000
-- Orthographic --------------------------------------------------------
-- | An orthographic projection
data OrthoLens n = OrthoLens
{ _orthoWidth :: n -- ^ Width
, _orthoHeight :: n -- ^ Height
, _orthoBounds :: V3 (n,n)
}
deriving Typeable
makeLenses ''OrthoLens
-- orthoRight, orthoLeft, orthoTom, orthoBottom, orthoNearZ, ortheFarX
type instance V (OrthoLens n) = V3
type instance N (OrthoLens n) = n
instance CameraLens OrthoLens where
aspect o = o^.orthoHeight / o^.orthoWidth
lensProjection orthoLens = ortho l r b t n f where
V3 (l,r) (b,t) (n,f) = orthoLens^.orthoBounds
inverseLensProjection orthoLens = inverseOrtho l r b t n f where
V3 (l,r) (b,t) (n,f) = orthoLens^.orthoBounds
-- | A camera at the origin facing along the negative Z axis, with its
-- up-axis coincident with the positive Y axis. The field of view is
-- chosen to match a 50mm camera on 35mm film. Note that Cameras take
-- up no space in the Diagram.
mm50Camera :: Floating n => Camera PerspectiveLens n
mm50Camera = facing_ZCamera mm50
-- | 'facing_ZCamera l' is a camera at the origin facing along the
-- negative Z axis, with its up-axis coincident with the positive Y
-- axis, with the projection defined by l.
facing_ZCamera :: Num n => l n -> Camera l n
facing_ZCamera = Camera origin (Euler zero zero zero) (V3 0 1 0)
{-# ANN facing_ZCamera ("HLint: ignore Use camelCase" :: String) #-}
-- | The unit forward and right directions.
camForwardRight :: RealFloat n => Camera l n -> (V3 n, V3 n)
camForwardRight cam = (fw, V3 cy 0 (-sy))
where
fw = V3 (-sy*cp) sp (-cy*cp) -- - ^/ sqrt (1 + sp*sp)
y = cam^.yaw
p = cam^.pitch
sy = sinA y
cy = cosA y
sp = sinA p
cp = cosA p
{-# INLINE camForwardRight #-}
camUp :: RealFloat n => Lens' (Camera l n) (V3 n)
camUp f (Camera loc angle up l) = f up <&> \up' -> Camera loc angle up' l
camForward :: RealFloat n => Lens' (Camera l n) (V3 n)
camForward f cam = f (fst $ camForwardRight cam) <&> \v ->
cam & pitch .~ atan2A (v^._x) (-v^._z)
& yaw .~ acosA (v^._z)
-- | The lens used for the camera.
cameraLens :: Lens (Camera l n) (Camera l' n) (l n) (l' n)
cameraLens f (Camera loc angle up l) = f l <&> Camera loc angle up
|
cchalmers/geometry
|
src/Geometry/ThreeD/Camera.hs
|
bsd-3-clause
| 7,467 | 0 | 13 | 1,725 | 1,688 | 927 | 761 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RankNTypes #-}
module Haskell.Ide.HaRePlugin where
import Control.Monad.State
import Control.Monad.Trans.Control
import Data.Aeson
import Data.Monoid
import qualified Data.Text as T
import Exception
import Haskell.Ide.Engine.PluginDescriptor
import Haskell.Ide.Engine.PluginUtils
import Haskell.Ide.Engine.SemanticTypes
import qualified Language.Haskell.GhcMod.Monad as GM
import qualified Language.Haskell.GhcMod.Error as GM
import Language.Haskell.Refact.HaRe
import Language.Haskell.Refact.Utils.Monad
import Language.Haskell.Refact.Utils.Types
import Language.Haskell.Refact.Utils.Utils
import System.FilePath
-- ---------------------------------------------------------------------
hareDescriptor :: TaggedPluginDescriptor _
hareDescriptor = PluginDescriptor
{
pdUIShortName = "HaRe"
, pdUIOverview = "A Haskell 2010 refactoring tool. HaRe supports the full \
\Haskell 2010 standard, through making use of the GHC API. HaRe attempts to \
\operate in a safe way, by first writing new files with proposed changes, and \
\only swapping these with the originals when the change is accepted. "
, pdCommands =
buildCommand demoteCmd (Proxy :: Proxy "demote") "Move a definition one level down"
[".hs"] (SCtxPoint :& RNil) RNil SaveAll
:& buildCommand dupdefCmd (Proxy :: Proxy "dupdef") "Duplicate a definition"
[".hs"] (SCtxPoint :& RNil)
( SParamDesc (Proxy :: Proxy "name") (Proxy :: Proxy "the new name") SPtText SRequired
:& RNil) SaveAll
:& buildCommand iftocaseCmd (Proxy :: Proxy "iftocase") "Converts an if statement to a case statement"
[".hs"] (SCtxRegion :& RNil) RNil SaveAll
:& buildCommand liftonelevelCmd (Proxy :: Proxy "liftonelevel") "Move a definition one level up from where it is now"
[".hs"] (SCtxPoint :& RNil) RNil SaveAll
:& buildCommand lifttotoplevelCmd (Proxy :: Proxy "lifttotoplevel") "Move a definition to the top level from where it is now"
[".hs"] (SCtxPoint :& RNil) RNil SaveAll
:& buildCommand renameCmd (Proxy :: Proxy "rename") "rename a variable or type"
[".hs"] (SCtxPoint :& RNil)
( SParamDesc (Proxy :: Proxy "name") (Proxy :: Proxy "the new name") SPtText SRequired
:& RNil) SaveAll
:& RNil
, pdExposedServices = []
, pdUsedServices = []
}
-- ---------------------------------------------------------------------
demoteCmd :: CommandFunc RefactorResult
demoteCmd = CmdSync $ \_ctxs req ->
case getParams (IdFile "file" :& IdPos "start_pos" :& RNil) req of
Left err -> return err
Right (ParamFile fileName :& ParamPos pos :& RNil) ->
runHareCommand "demote" (compDemote (T.unpack fileName) (unPos pos))
Right _ -> return $ IdeResponseError (IdeError InternalError
"HaRePlugin.demoteCmd: ghc’s exhaustiveness checker is broken" Null)
-- compDemote :: FilePath -> SimpPos -> IO [FilePath]
-- ---------------------------------------------------------------------
dupdefCmd :: CommandFunc RefactorResult
dupdefCmd = CmdSync $ \_ctxs req ->
case getParams (IdFile "file" :& IdPos "start_pos" :& IdText "name" :& RNil) req of
Left err -> return err
Right (ParamFile fileName :& ParamPos pos :& ParamText name :& RNil) ->
runHareCommand "dupdef" (compDuplicateDef (T.unpack fileName) (T.unpack name) (unPos pos))
Right _ -> return $ IdeResponseError (IdeError InternalError
"HaRePlugin.dupdefCmd: ghc’s exhaustiveness checker is broken" Null)
-- compDuplicateDef :: FilePath -> String -> SimpPos -> IO [FilePath]
-- ---------------------------------------------------------------------
iftocaseCmd :: CommandFunc RefactorResult
iftocaseCmd = CmdSync $ \_ctxs req ->
case getParams (IdFile "file" :& IdPos "start_pos" :& IdPos "end_pos" :& RNil) req of
Left err -> return err
Right (ParamFile fileName :& ParamPos startPos :& ParamPos endPos :& RNil) ->
runHareCommand "iftocase" (compIfToCase (T.unpack fileName) (unPos startPos) (unPos endPos))
Right _ -> return $ IdeResponseError (IdeError InternalError
"HaRePlugin.ifToCaseCmd: ghc’s exhaustiveness checker is broken" Null)
-- compIfToCase :: FilePath -> SimpPos -> SimpPos -> IO [FilePath]
-- ---------------------------------------------------------------------
liftonelevelCmd :: CommandFunc RefactorResult
liftonelevelCmd = CmdSync $ \_ctxs req ->
case getParams (IdFile "file" :& IdPos "start_pos" :& RNil) req of
Left err -> return err
Right (ParamFile fileName :& ParamPos pos :& RNil) ->
runHareCommand "liftonelevel" (compLiftOneLevel (T.unpack fileName) (unPos pos))
Right _ -> return $ IdeResponseError (IdeError InternalError
"HaRePlugin.liftOneLevel: ghc’s exhaustiveness checker is broken" Null)
-- compLiftOneLevel :: FilePath -> SimpPos -> IO [FilePath]
-- ---------------------------------------------------------------------
lifttotoplevelCmd :: CommandFunc RefactorResult
lifttotoplevelCmd = CmdSync $ \_ctxs req ->
case getParams (IdFile "file" :& IdPos "start_pos" :& RNil) req of
Left err -> return err
Right (ParamFile fileName :& ParamPos pos :& RNil) ->
runHareCommand "lifttotoplevel" (compLiftToTopLevel (T.unpack fileName) (unPos pos))
Right _ -> return $ IdeResponseError (IdeError InternalError
"HaRePlugin.liftToTopLevel: ghc’s exhaustiveness checker is broken" Null)
-- compLiftToTopLevel :: FilePath -> SimpPos -> IO [FilePath]
-- ---------------------------------------------------------------------
renameCmd :: CommandFunc RefactorResult
renameCmd = CmdSync $ \_ctxs req ->
case getParams (IdFile "file" :& IdPos "start_pos" :& IdText "name" :& RNil) req of
Left err -> return err
Right (ParamFile fileName :& ParamPos pos :& ParamText name :& RNil) ->
runHareCommand "rename" (compRename (T.unpack fileName) (T.unpack name) (unPos pos))
Right _ -> return $ IdeResponseError (IdeError InternalError
"HaRePlugin.renameCmd: ghc’s exhaustiveness checker is broken" Null)
-- compRename :: FilePath -> String -> SimpPos -> IO [FilePath]
-- ---------------------------------------------------------------------
makeRefactorResult :: [FilePath] -> IO RefactorResult
makeRefactorResult changedFiles = do
let
diffOne f1 = do
let (baseFileName,ext) = splitExtension f1
f2 = (baseFileName ++ ".refactored" ++ ext)
diffFiles f1 f2
diffs <- mapM diffOne changedFiles
return (RefactorResult diffs)
-- ---------------------------------------------------------------------
runHareCommand :: String -> RefactGhc [ApplyRefacResult]
-> IdeM (IdeResponse RefactorResult)
runHareCommand name cmd =
do let initialState =
RefSt {rsSettings = defaultSettings
,rsUniqState = 1
,rsSrcSpanCol = 1
,rsFlags = RefFlags False
,rsStorage = StorageNone
,rsCurrentTarget = Nothing
,rsModule = Nothing}
let cmd' = unRefactGhc cmd
embeddedCmd =
GM.unGmlT $
hoist (liftIO . flip evalStateT initialState)
(GM.GmlT cmd')
handlers
:: Applicative m
=> [GM.GHandler m (Either String a)]
handlers =
[GM.GHandler (\(ErrorCall e) -> pure (Left e))
,GM.GHandler (\(err :: GM.GhcModError) -> pure (Left (show err)))]
eitherRes <- fmap Right embeddedCmd `GM.gcatches` handlers
case eitherRes of
Left err ->
pure (IdeResponseFail
(IdeError PluginError
(T.pack $ name <> ": \"" <> err <> "\"")
Null))
Right res ->
do liftIO $
writeRefactoredFiles (rsetVerboseLevel defaultSettings)
res
let files = modifiedFiles res
refactRes <- liftIO $ makeRefactorResult files
pure (IdeResponseOk refactRes)
-- | This is like hoist from the mmorph package, but build on
-- `MonadTransControl` since we don’t have an `MFunctor` instance.
hoist
:: (MonadTransControl t,Monad (t m'),Applicative m',Monad m',Monad m)
=> (forall b. m b -> m' b) -> t m a -> t m' a
hoist f a =
liftWith (\run ->
let b = run a
c = f b
in pure c) >>=
restoreT
|
ankhers/haskell-ide-engine
|
hie-hare/Haskell/Ide/HaRePlugin.hs
|
bsd-3-clause
| 8,868 | 0 | 20 | 2,126 | 2,024 | 1,037 | 987 | 150 | 3 |
module LogInstances (isSorted) where
-- Export isSorted and instances, not 'build'
import Log
import Test.Tasty.QuickCheck
import Data.List (sortOn)
-- The library provides Arbitrary instances for built-in types like Int & String
-- This module tells QuickCheck how to pick an arbitrary value of our new types.
instance Arbitrary MessageType where
arbitrary = oneof
[pure Info
, pure Warning
, fmap Error (getNonNegative <$> arbitrary) ] -- An Error with an arbitrary Int
-- This instance never makes Unknown messages
instance Arbitrary LogMessage where
arbitrary = LogMessage <$> arbitrary <*> ( getNonNegative <$> arbitrary) <*> arbitrary
-- This instance always makes sorted MessageTrees
instance Arbitrary MessageTree where
arbitrary = fmap (build . sortOn (\(LogMessage _ t _) -> t) ) arbitrary
build :: [LogMessage] -> MessageTree
build [] = Leaf
build msgs = let (l,(x:r)) = splitAt (length msgs `div` 2) msgs in
Node (build l) x (build r)
isSorted :: [LogMessage] -> Bool
isSorted [] = True
isSorted [_] = True
isSorted (LogMessage _ t1 _ : LogMessage _ t2 _ : rest)
= t1 <= t2 && isSorted rest
|
zzwick/cs2-adts-logging
|
LogInstances.hs
|
bsd-3-clause
| 1,134 | 0 | 13 | 211 | 323 | 173 | 150 | 22 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Tom.When
(
When(..),
WhenParser,
WhenParserData(..),
getWhenParserData,
runWhenParser,
runWhenParser',
parseWhen,
parseWhen',
displayWhen,
-- * Individual parsers
momentP,
wildcardP,
durationMomentP,
periodicP,
)
where
-- General
import BasePrelude hiding (try, second)
-- Lenses
import Lens.Micro.Platform
-- Monads & monad transformers
import Control.Monad.Reader
-- Text
import Data.Text (Text)
-- Parsing
import Text.Megaparsec
import Text.Megaparsec.Prim
import Text.Megaparsec.Lexer
-- Time
import Data.Time
import Data.Time.Clock.TAI
import Data.Time.Calendar.MonthDay
import Data.Time.Zones -- tz
-- acid-state & safecopy
import Data.SafeCopy
-- Binary
import Data.Binary
import Data.Binary.Orphans () -- binary-orphans
-- Tom-specific
import Tom.Utils
-- | A type for specifying moments of time when a reminder should fire.
data When
= Mask {
year :: Maybe Integer,
month :: Maybe Int,
day :: Maybe Int,
hour :: Maybe Int,
minute :: Maybe Int,
second :: Maybe Int,
weekdays :: Maybe [Int], -- ^ Numbers between 1 and 7
timezone :: Maybe String } -- ^ 'Nothing' = always use local timezone;
-- otherwise the timezone is stored
-- as a string in Olson format (e.g.
-- “Europe/Paris”)
| Moment {
moment :: AbsoluteTime }
| Periodic {
start :: AbsoluteTime, -- ^ 1st time the reminder should fire
period :: DiffTime } -- ^ Period
deriving (Eq, Generic, Binary)
data When_v0
= Mask_v0 {
year_v0 :: Maybe Integer,
month_v0 :: Maybe Int,
day_v0 :: Maybe Int,
hour_v0 :: Maybe Int,
minute_v0 :: Maybe Int,
second_v0 :: Maybe Int,
weekdays_v0 :: Maybe [Int],
timezone_v0 :: Maybe String }
| Moment_v0 {
moment_v0 :: AbsoluteTime }
deriveSafeCopy 0 'base ''When_v0
deriveSafeCopy 1 'extension ''When
instance Migrate When where
type MigrateFrom When = When_v0
migrate Mask_v0{..} = Mask {
year = year_v0,
month = month_v0,
day = day_v0,
hour = hour_v0,
minute = minute_v0,
second = second_v0,
weekdays = weekdays_v0,
timezone = timezone_v0 }
migrate Moment_v0{..} = Moment {
moment = moment_v0 }
{-
Examples of format used by the Show instance of Mask:
* xxxx-xx-03,13.xx:56
* 2015-xx-xx[6,7],12.00:00(UTC)
TODO: document other constructors
-}
instance Show When where
show Mask{..} = do
-- Show something with padding. If not set, show some “x”s; if set, show
-- and pad with zeroes.
--
-- >>> mb 2 Nothing
-- "xx"
--
-- >>> mb 2 (Just 3)
-- "03"
let mb :: (Show a, Integral a) => Int -> Maybe a -> String
mb n Nothing = replicate n 'x'
mb n (Just x) = let s = show x
in replicate (n - length s) '0' ++ s
printf "%s-%s-%s%s,%s.%s:%s%s"
(mb 4 year) (mb 2 month) (mb 2 day)
(maybe "" show weekdays)
(mb 2 hour) (mb 2 minute) (mb 2 second)
(maybe "" (\s -> "(" ++ olsonToTZName s ++ ")") timezone)
show Moment{..} = "moment " ++ showAbsoluteTime moment
show Periodic{..} = printf "every %s from %s"
(showDiffTime period) (showAbsoluteTime start)
displayWhen :: When -> String
displayWhen x = case x of
Mask{} -> show x
Moment{..} -> showAbsoluteTime moment
Periodic{..} -> printf "every %s" (showDiffTime period)
showAbsoluteTime :: AbsoluteTime -> String
showAbsoluteTime = ("TAI " ++) . show .
utcToLocalTime utc . taiToUTCTime (const 0)
showDiffTime :: DiffTime -> String
showDiffTime (realToFrac -> seconds_) = do
let hours, minutes :: Integer
seconds :: Double
(minutes_, seconds) = divMod' seconds_ 60
(hours, minutes) = divMod' minutes_ 60
concat [if hours /= 0
then show hours ++ "h" else "",
if minutes /= 0 || (hours /= 0 && seconds /= 0)
then show minutes ++ "m" else "",
case properFraction seconds of
(0 :: Integer, 0) -> ""
(s, 0) -> printf "%ds" s
_ -> printf "%.3fs" seconds]
----------------------------------------------------------------------------
-- Parsing utilities
----------------------------------------------------------------------------
{- |
A parser for time specifiers ('When').
The parser is given access to the current time, timezone, and IO (might be needed to query some timezone from the timezone data file – look for 'loadSystemTZ').
-}
type WhenParser = ParsecT Dec Text (ReaderT WhenParserData IO) When
data WhenParserData = WhenParserData {
_currentTime :: UTCTime,
_localTZ :: TZ }
makeLenses ''WhenParserData
getWhenParserData :: IO WhenParserData
getWhenParserData = WhenParserData <$> getCurrentTime <*> loadLocalTZ
runWhenParser' :: WhenParserData -> WhenParser -> Text -> IO (Either (ParseError Char Dec) When)
runWhenParser' pData p s = do
runReaderT (runParserT (p <* eof) "" s) pData
runWhenParser :: WhenParser -> Text -> IO (Either (ParseError Char Dec) When)
runWhenParser p s = do
pData <- getWhenParserData
runWhenParser' pData p s
parseWhen' :: WhenParserData -> Text -> IO (Either (ParseError Char Dec) When)
parseWhen' pData = runWhenParser' pData p
where
p = choice $ map (\x -> try (x <* eof))
[momentP, wildcardP, durationMomentP, periodicP]
parseWhen :: Text -> IO (Either (ParseError Char Dec) When)
parseWhen s = do
pData <- getWhenParserData
parseWhen' pData s
-- | A parser for “am”/“pm”, returning the function to apply to hours to get
-- the corrected version.
parseAMPM :: (MonadParsec Dec Text m, Num a, Ord a) => m (a -> a)
parseAMPM = do
-- 1. “pm” means “add 12 to hour”.
-- 2. Unless it's 12pm, in which case it doesn't.
-- 3. And “am” can't be ignored, because 12am ≠ 12.00.
-- 4. 13am shall mean 1.00. For consistency.
let fromPM x = if x >= 12 then x else x + 12
fromAM x = if x < 12 then x else x - 12
choice [
string "pm" *> pure fromPM,
string "am" *> pure fromAM,
pure id ]
{- |
Parses year, accounting for the “assume current millenium” shortcut.
* @"10"@ → 2010
* @"112"@ → 2112
* @"2020"@ → 2020
* @"3388"@ → 3388
-}
yearP :: MonadParsec Dec Text m => m Integer
yearP = do
s <- some digitChar
return $ if length s < 4 then 2000 + read s else read s
-- | Timezone name parser. Returns already queried Olson name.
timezoneP :: MonadParsec Dec Text m => m String
timezoneP = do
name <- some (letterChar <|> digitChar <|> oneOf "-+")
case tzNameToOlson name of
Nothing -> fail $ printf "unknown timezone name ‘%s’" name
Just tz -> return tz
{- |
A parser for moments in time.
All numbers can be specified using any amount of digits. Date and time are
separated with a slash. Date can be omitted.
Date format:
* Y-M-D
* M-D
* D
Time format: [H][.MM][:SS][am|pm][,timezone] (all components can be omitted).
The next suitable time is always chosen. For instance, if it's past 9pm
already, then “9pm” will resolve to 9pm of the next day. In the same way,
“2-29/8am” refers to morning of the next February 29 (which may happen in as
much as 4 years).
If the resulting time is always in the past, the function will fail.
A timezone can be specified as an abbreviation. At the moment, only a handful
of abbreviations are supported.
-}
momentP :: WhenParser
momentP = do
-- Parsing date: it's either “Y-M-D”, “M-D”, “D”, or nothing (and then
-- there is no slash). We use Just to denote that year/month/day is set.
(mbYear, mbMonth, mbDay) <- choice $ map try
[ do y <- yearP <* string "-"
m <- nonnegative <* string "-"
d <- nonnegative <* string "/"
return (Just y, Just m, Just d)
, do m <- nonnegative <* string "-"
d <- nonnegative <* string "/"
return (Nothing, Just m, Just d)
, do d <- nonnegative <* string "/"
return (Nothing, Nothing, Just d)
, return (Nothing, Nothing, Nothing)
]
-- We parse [hour][.minute][:second][am|pm][,timezone], where every
-- component is optional.
(mbHour, mbMinute, mbSecond, mbTZ) <- do
h <- optional nonnegative
-- If hour is set and minute isn't, it's assumed to be 0, *not*
-- omitted. For instance, “10am” really means “10.00:00”.
m <- option (if isJust h then Just 0 else Nothing)
(Just <$> (char '.' *> nonnegative))
-- Same for minute/second.
s <- option (if isJust m then Just 0 else Nothing)
(Just <$> (char ':' *> nonnegative))
-- “am”/“pm”.
fromAMPM <- parseAMPM
-- Timezone.
tz <- optional (string "," *> timezoneP)
-- And now we can return parsed hour, minute, second, and timezone.
return (fromAMPM <$> h, m, s, tz)
-- Finally, we have to fill in the blanks (“Nothing”) so that the result is
-- the *least* possible time which is still bigger than the current time.
-- Let's turn time and timezone into something more usable – namely,
-- year/month/day/etc.
time <- view currentTime
tz <- case mbTZ of
Nothing -> view localTZ
Just x -> liftIO $ loadSystemTZ x
let ((cYear,cMonth,cDay),(cHour,cMinute,cSecond)) = expandTime tz time
-- These are lowest possible second, minute+second, hour+minute+second,
-- etc. (where “possible” means “don't contradict filled parts of the
-- time mask”). For convenience, they are built as nested tuples.
let zeroSecond = fromMaybe 0 mbSecond
zeroMinute = ( fromMaybe 0 mbMinute , zeroSecond )
zeroHour = ( fromMaybe 0 mbHour , zeroMinute )
zeroDay = ( fromMaybe 1 mbDay , zeroHour )
zeroMonth = ( fromMaybe 1 mbMonth , zeroDay )
-- The following functions compute the lowest possible second,
-- minute+second, hour+minute+second, etc. which is *bigger than
-- given*. They return Nothing if the given time can't be incremented.
let nextSecond x = case mbSecond of
Just y -> if x < y then Just y else Nothing
Nothing -> if x <= 58 then Just (x+1) else Nothing
let nextMinute (x, rest) = case mbMinute of
-- If we *have* to use mbMinute, we only decide whether we have to
-- increment the second or not.
Just y
| x < y -> Just (y, zeroSecond)
| x == y -> (y,) <$> nextSecond rest
| otherwise -> Nothing
-- If we don't have to use mbMinute, we try to increment the second
-- first, and increase the minute if we can't increment the second.
Nothing -> (x,) <$> nextSecond rest <|>
(guard (x <= 58) >> return (x+1, zeroSecond))
-- nextHour is the same as nextMinute.
let nextHour (x, rest) = case mbHour of
Just y
| x < y -> Just (y, zeroMinute)
| x == y -> (y,) <$> nextMinute rest
| otherwise -> Nothing
Nothing -> (x,) <$> nextMinute rest <|>
(guard (x <= 22) >> return (x+1, zeroMinute))
-- nextDay is special – whether you can increment the day or not depends
-- on the month. Therefore, it has to be passed the number of days in the
-- month. Moreover, it's possible that the day from the mask ('mbDay')
-- can't fit into month – in this case we return Nothing.
let nextDay days (x, rest) = do
-- It's easier to compute first and check whether the day fits
-- afterwards.
(x', rest') <- case mbDay of
Just y
| x < y -> Just (y, zeroHour)
| x == y -> (y,) <$> nextHour rest
| otherwise -> Nothing
Nothing -> (x,) <$> nextHour rest <|>
Just (x+1, zeroHour)
guard (x' <= days)
return (x', rest')
-- nextMonth is special too, because the amount of days in the month
-- depends on the year. So, we have to know whether the year is
-- leap. Moreover, additional trouble comes with the fact that it may not
-- be enough to increment once – we try incrementing both once and twice
-- to make sure we touch at least one month with 31 days in it.
let nextMonth isLeap (x, rest) = case mbMonth of
Just y
| x < y -> Just (y, zeroDay)
| x == y -> (y,) <$> nextDay (monthLength isLeap y) rest
| otherwise -> Nothing
Nothing -> asum
-- Increment day.
[ (x,) <$> nextDay (monthLength isLeap x) rest
-- Increment month once, and check whether the day fits.
, do guard (x <= 11)
guard (fst zeroDay <= monthLength isLeap (x+1))
return (x+1, zeroDay)
-- Increment month twice, and check.
, do guard (x <= 10)
guard (fst zeroDay <= monthLength isLeap (x+2))
return (x+2, zeroDay)
]
-- Phew, almost done.
let nextYear (x, rest) = case mbYear of
Just y
| x < y -> Just (y, zeroMonth)
| x == y -> (y,) <$> nextMonth (isLeapYear y) rest
| otherwise -> Nothing
Nothing -> asum
-- Increment month.
[ (x,) <$> nextMonth (isLeapYear x) rest
-- Increment year once, see if day and month fit. (They can only
-- not fit if the year isn't leap but it's February 29.)
, do let (m,(d,_)) = zeroMonth
guard $ not (m == 2 && d == 29 && not (isLeapYear (x+1)))
return (x+1, zeroMonth)
-- Increment year until it's a leap one.
, return (fromJust (find isLeapYear [x+2..]), zeroMonth)
]
-- Now we simply use nextYear to increment the current time, which would
-- give us next time which fits the mask.
(year', (month', (day', (hour', (minute', second'))))) <-
-- TODO: figure out when this actually happens and add to tests
case nextYear (cYear, (cMonth, (cDay, (cHour, (cMinute, cSecond))))) of
Nothing -> fail "time is always in the past"
Just x -> return x
return Mask {
year = Just year',
month = Just month',
day = Just day',
hour = Just hour',
minute = Just minute',
second = Just second',
weekdays = Nothing,
timezone = mbTZ }
{- |
A parser for masks with wildcards.
All numbers can be specified using any amount of digits. Date and time are
separated with a slash. Date can be omitted.
Date format:
* Y-M-D
* M-D
* D
Time format: [H][.MM][:SS][am|pm][,timezone] (all components can be omitted).
You can use any amount of “x”s instead of a number to specify that it can be
any number.
Omitted minute/second aren't assumed to be wildcards, so it's safe to do
“x.30” without it meaning “x.30:x”.
am/pm isn't really part of the mask, so “xpm” does not mean “the 2nd half of
the day”.
A timezone can be specified as an abbreviation. At the moment, only a handful
of abbreviations are supported.
-}
wildcardP :: WhenParser
wildcardP = do
-- A function to turn any parser into a parser which accepts a wildcard
-- (and returns Nothing in that case).
let wild p = (some (char 'x') *> pure Nothing) <|> (Just <$> p)
-- Parsing date: it's either “Y-M-D”, “M-D”, “D”, or nothing (and then
-- there is no slash).
(mbYear, mbMonth, mbDay) <- choice $ map try
[ do y <- wild yearP <* string "-"
m <- wild nonnegative <* string "-"
d <- wild nonnegative <* string "/"
return (y, m, d)
, do m <- wild nonnegative <* string "-"
d <- wild nonnegative <* string "/"
return (Nothing, m, d)
, do d <- wild nonnegative <* string "/"
return (Nothing, Nothing, d)
, return (Nothing, Nothing, Nothing)
]
-- [hour][.minute][:second][am|pm][,timezone]
(mbHour, mbMinute, mbSecond, mbTZ) <- do
h <- option Nothing (wild nonnegative)
m <- option (Just 0) (char '.' *> wild nonnegative)
s <- option (Just 0) (char ':' *> wild nonnegative)
-- “am”/“pm”.
fromAMPM <- parseAMPM
-- Timezone.
tz <- optional (string "," *> timezoneP)
-- And now we can return parsed hour, minute, second, and timezone.
return (fromAMPM <$> h, m, s, tz)
return Mask {
year = mbYear,
month = mbMonth,
day = mbDay,
hour = mbHour,
minute = mbMinute,
second = mbSecond,
weekdays = Nothing,
timezone = mbTZ }
{- |
A parser for moments coming after some time (like “1h20m”).
-}
durationMomentP :: WhenParser
durationMomentP = do
dur <- duration
time <- view currentTime
return $ Moment {
moment = addAbsoluteTime dur (utcToAbsoluteTime time) }
{- |
A parser for periodic stuff: “every 1h20m”.
-}
periodicP :: WhenParser
periodicP = do
string "every"
skipSome spaceChar
dur <- duration
time <- view currentTime
return $ Periodic {
start = addAbsoluteTime dur (utcToAbsoluteTime time),
period = dur }
nonnegative :: (MonadParsec Dec Text m, Integral a) => m a
nonnegative = fromInteger <$> integer
{- |
'duration' parses strings consisting of things, each being a number followed by one of:
* “h” (hours)
* “m” (minutes)
* “s” (seconds)
-}
duration :: MonadParsec Dec Text m => m DiffTime
duration = fmap (fromInteger . sum) $ some $ do
n <- nonnegative
choice [
string "h" *> pure (n*3600),
string "m" *> pure (n*60),
string "s" *> pure n ]
|
aelve/tom
|
lib/Tom/When.hs
|
bsd-3-clause
| 17,994 | 11 | 26 | 4,916 | 4,268 | 2,235 | 2,033 | 309 | 13 |
-----------------------------------------------------------------------------
--
-- Pretty-printing assembly language
--
-- (c) The University of Glasgow 1993-2005
--
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -fno-warn-orphans #-}
module X86.Ppr (
pprNatCmmDecl,
pprBasicBlock,
pprSectionHeader,
pprData,
pprInstr,
pprSize,
pprImm,
pprDataItem,
)
where
#include "HsVersions.h"
#include "nativeGen/NCG.h"
import X86.Regs
import X86.Instr
import X86.Cond
import Instruction
import Size
import Reg
import PprBase
import BlockId
import BasicTypes (Alignment)
import OldCmm
import CLabel
import Unique ( pprUnique, Uniquable(..) )
import Platform
import FastString
import Outputable
import Data.Word
import Data.Bits
-- -----------------------------------------------------------------------------
-- Printing this stuff out
pprNatCmmDecl :: NatCmmDecl (Alignment, CmmStatics) Instr -> SDoc
pprNatCmmDecl (CmmData section dats) =
pprSectionHeader section $$ pprDatas dats
pprNatCmmDecl proc@(CmmProc top_info lbl (ListGraph blocks)) =
case topInfoTable proc of
Nothing ->
case blocks of
[] -> -- special case for split markers:
pprLabel lbl
blocks -> -- special case for code without info table:
pprSectionHeader Text $$
pprLabel lbl $$ -- blocks guaranteed not null, so label needed
vcat (map (pprBasicBlock top_info) blocks) $$
pprSizeDecl lbl
Just (Statics info_lbl _) ->
sdocWithPlatform $ \platform ->
(if platformHasSubsectionsViaSymbols platform
then pprSectionHeader Text $$
ppr (mkDeadStripPreventer info_lbl) <> char ':'
else empty) $$
vcat (map (pprBasicBlock top_info) blocks) $$
-- above: Even the first block gets a label, because with branch-chain
-- elimination, it might be the target of a goto.
(if platformHasSubsectionsViaSymbols platform
then
-- If we are using the .subsections_via_symbols directive
-- (available on recent versions of Darwin),
-- we have to make sure that there is some kind of reference
-- from the entry code to a label on the _top_ of of the info table,
-- so that the linker will not think it is unreferenced and dead-strip
-- it. That's why the label is called a DeadStripPreventer (_dsp).
text "\t.long "
<+> ppr info_lbl
<+> char '-'
<+> ppr (mkDeadStripPreventer info_lbl)
else empty) $$
pprSizeDecl info_lbl
-- | Output the ELF .size directive.
pprSizeDecl :: CLabel -> SDoc
pprSizeDecl lbl
= sdocWithPlatform $ \platform ->
if osElfTarget (platformOS platform)
then ptext (sLit "\t.size") <+> ppr lbl
<> ptext (sLit ", .-") <> ppr lbl
else empty
pprBasicBlock :: BlockEnv CmmStatics -> NatBasicBlock Instr -> SDoc
pprBasicBlock info_env (BasicBlock blockid instrs)
= maybe_infotable $$
pprLabel (mkAsmTempLabel (getUnique blockid)) $$
vcat (map pprInstr instrs)
where
maybe_infotable = case mapLookup blockid info_env of
Nothing -> empty
Just (Statics info_lbl info) ->
pprSectionHeader Text $$
vcat (map pprData info) $$
pprLabel info_lbl
pprDatas :: (Alignment, CmmStatics) -> SDoc
pprDatas (align, (Statics lbl dats))
= vcat (pprAlign align : pprLabel lbl : map pprData dats)
-- TODO: could remove if align == 1
pprData :: CmmStatic -> SDoc
pprData (CmmString str) = pprASCII str
pprData (CmmUninitialised bytes)
= sdocWithPlatform $ \platform ->
if platformOS platform == OSDarwin then ptext (sLit ".space ") <> int bytes
else ptext (sLit ".skip ") <> int bytes
pprData (CmmStaticLit lit) = pprDataItem lit
pprGloblDecl :: CLabel -> SDoc
pprGloblDecl lbl
| not (externallyVisibleCLabel lbl) = empty
| otherwise = ptext (sLit ".globl ") <> ppr lbl
pprTypeAndSizeDecl :: CLabel -> SDoc
pprTypeAndSizeDecl lbl
= sdocWithPlatform $ \platform ->
if osElfTarget (platformOS platform) && externallyVisibleCLabel lbl
then ptext (sLit ".type ") <> ppr lbl <> ptext (sLit ", @object")
else empty
pprLabel :: CLabel -> SDoc
pprLabel lbl = pprGloblDecl lbl
$$ pprTypeAndSizeDecl lbl
$$ (ppr lbl <> char ':')
pprASCII :: [Word8] -> SDoc
pprASCII str
= vcat (map do1 str) $$ do1 0
where
do1 :: Word8 -> SDoc
do1 w = ptext (sLit "\t.byte\t") <> int (fromIntegral w)
pprAlign :: Int -> SDoc
pprAlign bytes
= sdocWithPlatform $ \platform ->
ptext (sLit ".align ") <> int (alignment platform)
where
alignment platform = if platformOS platform == OSDarwin
then log2 bytes
else bytes
log2 :: Int -> Int -- cache the common ones
log2 1 = 0
log2 2 = 1
log2 4 = 2
log2 8 = 3
log2 n = 1 + log2 (n `quot` 2)
-- -----------------------------------------------------------------------------
-- pprInstr: print an 'Instr'
instance Outputable Instr where
ppr instr = pprInstr instr
pprReg :: Size -> Reg -> SDoc
pprReg s r
= case r of
RegReal (RealRegSingle i) ->
sdocWithPlatform $ \platform ->
if target32Bit platform then ppr32_reg_no s i
else ppr64_reg_no s i
RegReal (RealRegPair _ _) -> panic "X86.Ppr: no reg pairs on this arch"
RegVirtual (VirtualRegI u) -> text "%vI_" <> pprUnique u
RegVirtual (VirtualRegHi u) -> text "%vHi_" <> pprUnique u
RegVirtual (VirtualRegF u) -> text "%vF_" <> pprUnique u
RegVirtual (VirtualRegD u) -> text "%vD_" <> pprUnique u
RegVirtual (VirtualRegSSE u) -> text "%vSSE_" <> pprUnique u
where
ppr32_reg_no :: Size -> Int -> SDoc
ppr32_reg_no II8 = ppr32_reg_byte
ppr32_reg_no II16 = ppr32_reg_word
ppr32_reg_no _ = ppr32_reg_long
ppr32_reg_byte i = ptext
(case i of {
0 -> sLit "%al"; 1 -> sLit "%bl";
2 -> sLit "%cl"; 3 -> sLit "%dl";
_ -> sLit "very naughty I386 byte register"
})
ppr32_reg_word i = ptext
(case i of {
0 -> sLit "%ax"; 1 -> sLit "%bx";
2 -> sLit "%cx"; 3 -> sLit "%dx";
4 -> sLit "%si"; 5 -> sLit "%di";
6 -> sLit "%bp"; 7 -> sLit "%sp";
_ -> sLit "very naughty I386 word register"
})
ppr32_reg_long i = ptext
(case i of {
0 -> sLit "%eax"; 1 -> sLit "%ebx";
2 -> sLit "%ecx"; 3 -> sLit "%edx";
4 -> sLit "%esi"; 5 -> sLit "%edi";
6 -> sLit "%ebp"; 7 -> sLit "%esp";
_ -> ppr_reg_float i
})
ppr64_reg_no :: Size -> Int -> SDoc
ppr64_reg_no II8 = ppr64_reg_byte
ppr64_reg_no II16 = ppr64_reg_word
ppr64_reg_no II32 = ppr64_reg_long
ppr64_reg_no _ = ppr64_reg_quad
ppr64_reg_byte i = ptext
(case i of {
0 -> sLit "%al"; 1 -> sLit "%bl";
2 -> sLit "%cl"; 3 -> sLit "%dl";
4 -> sLit "%sil"; 5 -> sLit "%dil"; -- new 8-bit regs!
6 -> sLit "%bpl"; 7 -> sLit "%spl";
8 -> sLit "%r8b"; 9 -> sLit "%r9b";
10 -> sLit "%r10b"; 11 -> sLit "%r11b";
12 -> sLit "%r12b"; 13 -> sLit "%r13b";
14 -> sLit "%r14b"; 15 -> sLit "%r15b";
_ -> sLit "very naughty x86_64 byte register"
})
ppr64_reg_word i = ptext
(case i of {
0 -> sLit "%ax"; 1 -> sLit "%bx";
2 -> sLit "%cx"; 3 -> sLit "%dx";
4 -> sLit "%si"; 5 -> sLit "%di";
6 -> sLit "%bp"; 7 -> sLit "%sp";
8 -> sLit "%r8w"; 9 -> sLit "%r9w";
10 -> sLit "%r10w"; 11 -> sLit "%r11w";
12 -> sLit "%r12w"; 13 -> sLit "%r13w";
14 -> sLit "%r14w"; 15 -> sLit "%r15w";
_ -> sLit "very naughty x86_64 word register"
})
ppr64_reg_long i = ptext
(case i of {
0 -> sLit "%eax"; 1 -> sLit "%ebx";
2 -> sLit "%ecx"; 3 -> sLit "%edx";
4 -> sLit "%esi"; 5 -> sLit "%edi";
6 -> sLit "%ebp"; 7 -> sLit "%esp";
8 -> sLit "%r8d"; 9 -> sLit "%r9d";
10 -> sLit "%r10d"; 11 -> sLit "%r11d";
12 -> sLit "%r12d"; 13 -> sLit "%r13d";
14 -> sLit "%r14d"; 15 -> sLit "%r15d";
_ -> sLit "very naughty x86_64 register"
})
ppr64_reg_quad i = ptext
(case i of {
0 -> sLit "%rax"; 1 -> sLit "%rbx";
2 -> sLit "%rcx"; 3 -> sLit "%rdx";
4 -> sLit "%rsi"; 5 -> sLit "%rdi";
6 -> sLit "%rbp"; 7 -> sLit "%rsp";
8 -> sLit "%r8"; 9 -> sLit "%r9";
10 -> sLit "%r10"; 11 -> sLit "%r11";
12 -> sLit "%r12"; 13 -> sLit "%r13";
14 -> sLit "%r14"; 15 -> sLit "%r15";
_ -> ppr_reg_float i
})
ppr_reg_float :: Int -> LitString
ppr_reg_float i = case i of
16 -> sLit "%fake0"; 17 -> sLit "%fake1"
18 -> sLit "%fake2"; 19 -> sLit "%fake3"
20 -> sLit "%fake4"; 21 -> sLit "%fake5"
24 -> sLit "%xmm0"; 25 -> sLit "%xmm1"
26 -> sLit "%xmm2"; 27 -> sLit "%xmm3"
28 -> sLit "%xmm4"; 29 -> sLit "%xmm5"
30 -> sLit "%xmm6"; 31 -> sLit "%xmm7"
32 -> sLit "%xmm8"; 33 -> sLit "%xmm9"
34 -> sLit "%xmm10"; 35 -> sLit "%xmm11"
36 -> sLit "%xmm12"; 37 -> sLit "%xmm13"
38 -> sLit "%xmm14"; 39 -> sLit "%xmm15"
_ -> sLit "very naughty x86 register"
pprSize :: Size -> SDoc
pprSize x
= ptext (case x of
II8 -> sLit "b"
II16 -> sLit "w"
II32 -> sLit "l"
II64 -> sLit "q"
FF32 -> sLit "ss" -- "scalar single-precision float" (SSE2)
FF64 -> sLit "sd" -- "scalar double-precision float" (SSE2)
FF80 -> sLit "t"
)
pprSize_x87 :: Size -> SDoc
pprSize_x87 x
= ptext $ case x of
FF32 -> sLit "s"
FF64 -> sLit "l"
FF80 -> sLit "t"
_ -> panic "X86.Ppr.pprSize_x87"
pprCond :: Cond -> SDoc
pprCond c
= ptext (case c of {
GEU -> sLit "ae"; LU -> sLit "b";
EQQ -> sLit "e"; GTT -> sLit "g";
GE -> sLit "ge"; GU -> sLit "a";
LTT -> sLit "l"; LE -> sLit "le";
LEU -> sLit "be"; NE -> sLit "ne";
NEG -> sLit "s"; POS -> sLit "ns";
CARRY -> sLit "c"; OFLO -> sLit "o";
PARITY -> sLit "p"; NOTPARITY -> sLit "np";
ALWAYS -> sLit "mp"})
pprImm :: Imm -> SDoc
pprImm (ImmInt i) = int i
pprImm (ImmInteger i) = integer i
pprImm (ImmCLbl l) = ppr l
pprImm (ImmIndex l i) = ppr l <> char '+' <> int i
pprImm (ImmLit s) = s
pprImm (ImmFloat _) = ptext (sLit "naughty float immediate")
pprImm (ImmDouble _) = ptext (sLit "naughty double immediate")
pprImm (ImmConstantSum a b) = pprImm a <> char '+' <> pprImm b
pprImm (ImmConstantDiff a b) = pprImm a <> char '-'
<> lparen <> pprImm b <> rparen
pprAddr :: AddrMode -> SDoc
pprAddr (ImmAddr imm off)
= let pp_imm = pprImm imm
in
if (off == 0) then
pp_imm
else if (off < 0) then
pp_imm <> int off
else
pp_imm <> char '+' <> int off
pprAddr (AddrBaseIndex base index displacement)
= sdocWithPlatform $ \platform ->
let
pp_disp = ppr_disp displacement
pp_off p = pp_disp <> char '(' <> p <> char ')'
pp_reg r = pprReg (archWordSize (target32Bit platform)) r
in
case (base, index) of
(EABaseNone, EAIndexNone) -> pp_disp
(EABaseReg b, EAIndexNone) -> pp_off (pp_reg b)
(EABaseRip, EAIndexNone) -> pp_off (ptext (sLit "%rip"))
(EABaseNone, EAIndex r i) -> pp_off (comma <> pp_reg r <> comma <> int i)
(EABaseReg b, EAIndex r i) -> pp_off (pp_reg b <> comma <> pp_reg r
<> comma <> int i)
_ -> panic "X86.Ppr.pprAddr: no match"
where
ppr_disp (ImmInt 0) = empty
ppr_disp imm = pprImm imm
pprSectionHeader :: Section -> SDoc
pprSectionHeader seg
= sdocWithPlatform $ \platform ->
case platformOS platform of
OSDarwin
| target32Bit platform ->
case seg of
Text -> ptext (sLit ".text\n\t.align 2")
Data -> ptext (sLit ".data\n\t.align 2")
ReadOnlyData -> ptext (sLit ".const\n.align 2")
RelocatableReadOnlyData -> ptext (sLit ".const_data\n.align 2")
UninitialisedData -> ptext (sLit ".data\n\t.align 2")
ReadOnlyData16 -> ptext (sLit ".const\n.align 4")
OtherSection _ -> panic "X86.Ppr.pprSectionHeader: unknown section"
| otherwise ->
case seg of
Text -> ptext (sLit ".text\n.align 3")
Data -> ptext (sLit ".data\n.align 3")
ReadOnlyData -> ptext (sLit ".const\n.align 3")
RelocatableReadOnlyData -> ptext (sLit ".const_data\n.align 3")
UninitialisedData -> ptext (sLit ".data\n\t.align 3")
ReadOnlyData16 -> ptext (sLit ".const\n.align 4")
OtherSection _ -> panic "PprMach.pprSectionHeader: unknown section"
_
| target32Bit platform ->
case seg of
Text -> ptext (sLit ".text\n\t.align 4,0x90")
Data -> ptext (sLit ".data\n\t.align 4")
ReadOnlyData -> ptext (sLit ".section .rodata\n\t.align 4")
RelocatableReadOnlyData -> ptext (sLit ".section .data\n\t.align 4")
UninitialisedData -> ptext (sLit ".section .bss\n\t.align 4")
ReadOnlyData16 -> ptext (sLit ".section .rodata\n\t.align 16")
OtherSection _ -> panic "X86.Ppr.pprSectionHeader: unknown section"
| otherwise ->
case seg of
Text -> ptext (sLit ".text\n\t.align 8")
Data -> ptext (sLit ".data\n\t.align 8")
ReadOnlyData -> ptext (sLit ".section .rodata\n\t.align 8")
RelocatableReadOnlyData -> ptext (sLit ".section .data\n\t.align 8")
UninitialisedData -> ptext (sLit ".section .bss\n\t.align 8")
ReadOnlyData16 -> ptext (sLit ".section .rodata.cst16\n\t.align 16")
OtherSection _ -> panic "PprMach.pprSectionHeader: unknown section"
pprDataItem :: CmmLit -> SDoc
pprDataItem lit = sdocWithPlatform $ \platform -> pprDataItem' platform lit
pprDataItem' :: Platform -> CmmLit -> SDoc
pprDataItem' platform lit
= vcat (ppr_item (cmmTypeSize $ cmmLitType lit) lit)
where
imm = litToImm lit
-- These seem to be common:
ppr_item II8 _ = [ptext (sLit "\t.byte\t") <> pprImm imm]
ppr_item II16 _ = [ptext (sLit "\t.word\t") <> pprImm imm]
ppr_item II32 _ = [ptext (sLit "\t.long\t") <> pprImm imm]
ppr_item FF32 (CmmFloat r _)
= let bs = floatToBytes (fromRational r)
in map (\b -> ptext (sLit "\t.byte\t") <> pprImm (ImmInt b)) bs
ppr_item FF64 (CmmFloat r _)
= let bs = doubleToBytes (fromRational r)
in map (\b -> ptext (sLit "\t.byte\t") <> pprImm (ImmInt b)) bs
ppr_item II64 _
= case platformOS platform of
OSDarwin
| target32Bit platform ->
case lit of
CmmInt x _ ->
[ptext (sLit "\t.long\t")
<> int (fromIntegral (fromIntegral x :: Word32)),
ptext (sLit "\t.long\t")
<> int (fromIntegral
(fromIntegral (x `shiftR` 32) :: Word32))]
_ -> panic "X86.Ppr.ppr_item: no match for II64"
| otherwise ->
[ptext (sLit "\t.quad\t") <> pprImm imm]
_
| target32Bit platform ->
[ptext (sLit "\t.quad\t") <> pprImm imm]
| otherwise ->
-- x86_64: binutils can't handle the R_X86_64_PC64
-- relocation type, which means we can't do
-- pc-relative 64-bit addresses. Fortunately we're
-- assuming the small memory model, in which all such
-- offsets will fit into 32 bits, so we have to stick
-- to 32-bit offset fields and modify the RTS
-- appropriately
--
-- See Note [x86-64-relative] in includes/rts/storage/InfoTables.h
--
case lit of
-- A relative relocation:
CmmLabelDiffOff _ _ _ ->
[ptext (sLit "\t.long\t") <> pprImm imm,
ptext (sLit "\t.long\t0")]
_ ->
[ptext (sLit "\t.quad\t") <> pprImm imm]
ppr_item _ _
= panic "X86.Ppr.ppr_item: no match"
pprInstr :: Instr -> SDoc
pprInstr (COMMENT _) = empty -- nuke 'em
{-
pprInstr (COMMENT s) = ptext (sLit "# ") <> ftext s
-}
pprInstr (DELTA d)
= pprInstr (COMMENT (mkFastString ("\tdelta = " ++ show d)))
pprInstr (NEWBLOCK _)
= panic "PprMach.pprInstr: NEWBLOCK"
pprInstr (LDATA _ _)
= panic "PprMach.pprInstr: LDATA"
{-
pprInstr (SPILL reg slot)
= hcat [
ptext (sLit "\tSPILL"),
char ' ',
pprUserReg reg,
comma,
ptext (sLit "SLOT") <> parens (int slot)]
pprInstr (RELOAD slot reg)
= hcat [
ptext (sLit "\tRELOAD"),
char ' ',
ptext (sLit "SLOT") <> parens (int slot),
comma,
pprUserReg reg]
-}
pprInstr (MOV size src dst)
= pprSizeOpOp (sLit "mov") size src dst
pprInstr (MOVZxL II32 src dst) = pprSizeOpOp (sLit "mov") II32 src dst
-- 32-to-64 bit zero extension on x86_64 is accomplished by a simple
-- movl. But we represent it as a MOVZxL instruction, because
-- the reg alloc would tend to throw away a plain reg-to-reg
-- move, and we still want it to do that.
pprInstr (MOVZxL sizes src dst) = pprSizeOpOpCoerce (sLit "movz") sizes II32 src dst
-- zero-extension only needs to extend to 32 bits: on x86_64,
-- the remaining zero-extension to 64 bits is automatic, and the 32-bit
-- instruction is shorter.
pprInstr (MOVSxL sizes src dst)
= sdocWithPlatform $ \platform ->
pprSizeOpOpCoerce (sLit "movs") sizes (archWordSize (target32Bit platform)) src dst
-- here we do some patching, since the physical registers are only set late
-- in the code generation.
pprInstr (LEA size (OpAddr (AddrBaseIndex (EABaseReg reg1) (EAIndex reg2 1) (ImmInt 0))) dst@(OpReg reg3))
| reg1 == reg3
= pprSizeOpOp (sLit "add") size (OpReg reg2) dst
pprInstr (LEA size (OpAddr (AddrBaseIndex (EABaseReg reg1) (EAIndex reg2 1) (ImmInt 0))) dst@(OpReg reg3))
| reg2 == reg3
= pprSizeOpOp (sLit "add") size (OpReg reg1) dst
pprInstr (LEA size (OpAddr (AddrBaseIndex (EABaseReg reg1) EAIndexNone displ)) dst@(OpReg reg3))
| reg1 == reg3
= pprInstr (ADD size (OpImm displ) dst)
pprInstr (LEA size src dst) = pprSizeOpOp (sLit "lea") size src dst
pprInstr (ADD size (OpImm (ImmInt (-1))) dst)
= pprSizeOp (sLit "dec") size dst
pprInstr (ADD size (OpImm (ImmInt 1)) dst)
= pprSizeOp (sLit "inc") size dst
pprInstr (ADD size src dst)
= pprSizeOpOp (sLit "add") size src dst
pprInstr (ADC size src dst)
= pprSizeOpOp (sLit "adc") size src dst
pprInstr (SUB size src dst) = pprSizeOpOp (sLit "sub") size src dst
pprInstr (IMUL size op1 op2) = pprSizeOpOp (sLit "imul") size op1 op2
{- A hack. The Intel documentation says that "The two and three
operand forms [of IMUL] may also be used with unsigned operands
because the lower half of the product is the same regardless if
(sic) the operands are signed or unsigned. The CF and OF flags,
however, cannot be used to determine if the upper half of the
result is non-zero." So there.
-}
pprInstr (AND size src dst) = pprSizeOpOp (sLit "and") size src dst
pprInstr (OR size src dst) = pprSizeOpOp (sLit "or") size src dst
pprInstr (XOR FF32 src dst) = pprOpOp (sLit "xorps") FF32 src dst
pprInstr (XOR FF64 src dst) = pprOpOp (sLit "xorpd") FF64 src dst
pprInstr (XOR size src dst) = pprSizeOpOp (sLit "xor") size src dst
pprInstr (POPCNT size src dst) = pprOpOp (sLit "popcnt") size src (OpReg dst)
pprInstr (NOT size op) = pprSizeOp (sLit "not") size op
pprInstr (NEGI size op) = pprSizeOp (sLit "neg") size op
pprInstr (SHL size src dst) = pprShift (sLit "shl") size src dst
pprInstr (SAR size src dst) = pprShift (sLit "sar") size src dst
pprInstr (SHR size src dst) = pprShift (sLit "shr") size src dst
pprInstr (BT size imm src) = pprSizeImmOp (sLit "bt") size imm src
pprInstr (CMP size src dst)
| is_float size = pprSizeOpOp (sLit "ucomi") size src dst -- SSE2
| otherwise = pprSizeOpOp (sLit "cmp") size src dst
where
-- This predicate is needed here and nowhere else
is_float FF32 = True
is_float FF64 = True
is_float FF80 = True
is_float _ = False
pprInstr (TEST size src dst) = pprSizeOpOp (sLit "test") size src dst
pprInstr (PUSH size op) = pprSizeOp (sLit "push") size op
pprInstr (POP size op) = pprSizeOp (sLit "pop") size op
-- both unused (SDM):
-- pprInstr PUSHA = ptext (sLit "\tpushal")
-- pprInstr POPA = ptext (sLit "\tpopal")
pprInstr NOP = ptext (sLit "\tnop")
pprInstr (CLTD II32) = ptext (sLit "\tcltd")
pprInstr (CLTD II64) = ptext (sLit "\tcqto")
pprInstr (SETCC cond op) = pprCondInstr (sLit "set") cond (pprOperand II8 op)
pprInstr (JXX cond blockid)
= pprCondInstr (sLit "j") cond (ppr lab)
where lab = mkAsmTempLabel (getUnique blockid)
pprInstr (JXX_GBL cond imm) = pprCondInstr (sLit "j") cond (pprImm imm)
pprInstr (JMP (OpImm imm) _) = ptext (sLit "\tjmp ") <> pprImm imm
pprInstr (JMP op _) = sdocWithPlatform $ \platform ->
ptext (sLit "\tjmp *") <> pprOperand (archWordSize (target32Bit platform)) op
pprInstr (JMP_TBL op _ _ _) = pprInstr (JMP op [])
pprInstr (CALL (Left imm) _) = ptext (sLit "\tcall ") <> pprImm imm
pprInstr (CALL (Right reg) _) = sdocWithPlatform $ \platform ->
ptext (sLit "\tcall *") <> pprReg (archWordSize (target32Bit platform)) reg
pprInstr (IDIV sz op) = pprSizeOp (sLit "idiv") sz op
pprInstr (DIV sz op) = pprSizeOp (sLit "div") sz op
pprInstr (IMUL2 sz op) = pprSizeOp (sLit "imul") sz op
-- x86_64 only
pprInstr (MUL size op1 op2) = pprSizeOpOp (sLit "mul") size op1 op2
pprInstr (MUL2 size op) = pprSizeOp (sLit "mul") size op
pprInstr (FDIV size op1 op2) = pprSizeOpOp (sLit "div") size op1 op2
pprInstr (CVTSS2SD from to) = pprRegReg (sLit "cvtss2sd") from to
pprInstr (CVTSD2SS from to) = pprRegReg (sLit "cvtsd2ss") from to
pprInstr (CVTTSS2SIQ sz from to) = pprSizeSizeOpReg (sLit "cvttss2si") FF32 sz from to
pprInstr (CVTTSD2SIQ sz from to) = pprSizeSizeOpReg (sLit "cvttsd2si") FF64 sz from to
pprInstr (CVTSI2SS sz from to) = pprSizeOpReg (sLit "cvtsi2ss") sz from to
pprInstr (CVTSI2SD sz from to) = pprSizeOpReg (sLit "cvtsi2sd") sz from to
-- FETCHGOT for PIC on ELF platforms
pprInstr (FETCHGOT reg)
= vcat [ ptext (sLit "\tcall 1f"),
hcat [ ptext (sLit "1:\tpopl\t"), pprReg II32 reg ],
hcat [ ptext (sLit "\taddl\t$_GLOBAL_OFFSET_TABLE_+(.-1b), "),
pprReg II32 reg ]
]
-- FETCHPC for PIC on Darwin/x86
-- get the instruction pointer into a register
-- (Terminology note: the IP is called Program Counter on PPC,
-- and it's a good thing to use the same name on both platforms)
pprInstr (FETCHPC reg)
= vcat [ ptext (sLit "\tcall 1f"),
hcat [ ptext (sLit "1:\tpopl\t"), pprReg II32 reg ]
]
-- -----------------------------------------------------------------------------
-- i386 floating-point
-- Simulating a flat register set on the x86 FP stack is tricky.
-- you have to free %st(7) before pushing anything on the FP reg stack
-- so as to preclude the possibility of a FP stack overflow exception.
pprInstr g@(GMOV src dst)
| src == dst
= empty
| otherwise
= pprG g (hcat [gtab, gpush src 0, gsemi, gpop dst 1])
-- GLD sz addr dst ==> FLDsz addr ; FSTP (dst+1)
pprInstr g@(GLD sz addr dst)
= pprG g (hcat [gtab, text "fld", pprSize_x87 sz, gsp,
pprAddr addr, gsemi, gpop dst 1])
-- GST sz src addr ==> FLD dst ; FSTPsz addr
pprInstr g@(GST sz src addr)
| src == fake0 && sz /= FF80 -- fstt instruction doesn't exist
= pprG g (hcat [gtab,
text "fst", pprSize_x87 sz, gsp, pprAddr addr])
| otherwise
= pprG g (hcat [gtab, gpush src 0, gsemi,
text "fstp", pprSize_x87 sz, gsp, pprAddr addr])
pprInstr g@(GLDZ dst)
= pprG g (hcat [gtab, text "fldz ; ", gpop dst 1])
pprInstr g@(GLD1 dst)
= pprG g (hcat [gtab, text "fld1 ; ", gpop dst 1])
pprInstr (GFTOI src dst)
= pprInstr (GDTOI src dst)
pprInstr g@(GDTOI src dst)
= pprG g (vcat [
hcat [gtab, text "subl $8, %esp ; fnstcw 4(%esp)"],
hcat [gtab, gpush src 0],
hcat [gtab, text "movzwl 4(%esp), ", reg,
text " ; orl $0xC00, ", reg],
hcat [gtab, text "movl ", reg, text ", 0(%esp) ; fldcw 0(%esp)"],
hcat [gtab, text "fistpl 0(%esp)"],
hcat [gtab, text "fldcw 4(%esp) ; movl 0(%esp), ", reg],
hcat [gtab, text "addl $8, %esp"]
])
where
reg = pprReg II32 dst
pprInstr (GITOF src dst)
= pprInstr (GITOD src dst)
pprInstr g@(GITOD src dst)
= pprG g (hcat [gtab, text "pushl ", pprReg II32 src,
text " ; fildl (%esp) ; ",
gpop dst 1, text " ; addl $4,%esp"])
pprInstr g@(GDTOF src dst)
= pprG g (vcat [gtab <> gpush src 0,
gtab <> text "subl $4,%esp ; fstps (%esp) ; flds (%esp) ; addl $4,%esp ;",
gtab <> gpop dst 1])
{- Gruesome swamp follows. If you're unfortunate enough to have ventured
this far into the jungle AND you give a Rat's Ass (tm) what's going
on, here's the deal. Generate code to do a floating point comparison
of src1 and src2, of kind cond, and set the Zero flag if true.
The complications are to do with handling NaNs correctly. We want the
property that if either argument is NaN, then the result of the
comparison is False ... except if we're comparing for inequality,
in which case the answer is True.
Here's how the general (non-inequality) case works. As an
example, consider generating the an equality test:
pushl %eax -- we need to mess with this
<get src1 to top of FPU stack>
fcomp <src2 location in FPU stack> and pop pushed src1
-- Result of comparison is in FPU Status Register bits
-- C3 C2 and C0
fstsw %ax -- Move FPU Status Reg to %ax
sahf -- move C3 C2 C0 from %ax to integer flag reg
-- now the serious magic begins
setpo %ah -- %ah = if comparable(neither arg was NaN) then 1 else 0
sete %al -- %al = if arg1 == arg2 then 1 else 0
andb %ah,%al -- %al &= %ah
-- so %al == 1 iff (comparable && same); else it holds 0
decb %al -- %al == 0, ZeroFlag=1 iff (comparable && same);
else %al == 0xFF, ZeroFlag=0
-- the zero flag is now set as we desire.
popl %eax
The special case of inequality differs thusly:
setpe %ah -- %ah = if incomparable(either arg was NaN) then 1 else 0
setne %al -- %al = if arg1 /= arg2 then 1 else 0
orb %ah,%al -- %al = if (incomparable || different) then 1 else 0
decb %al -- if (incomparable || different) then (%al == 0, ZF=1)
else (%al == 0xFF, ZF=0)
-}
pprInstr g@(GCMP cond src1 src2)
| case cond of { NE -> True; _ -> False }
= pprG g (vcat [
hcat [gtab, text "pushl %eax ; ",gpush src1 0],
hcat [gtab, text "fcomp ", greg src2 1,
text "; fstsw %ax ; sahf ; setpe %ah"],
hcat [gtab, text "setne %al ; ",
text "orb %ah,%al ; decb %al ; popl %eax"]
])
| otherwise
= pprG g (vcat [
hcat [gtab, text "pushl %eax ; ",gpush src1 0],
hcat [gtab, text "fcomp ", greg src2 1,
text "; fstsw %ax ; sahf ; setpo %ah"],
hcat [gtab, text "set", pprCond (fix_FP_cond cond), text " %al ; ",
text "andb %ah,%al ; decb %al ; popl %eax"]
])
where
{- On the 486, the flags set by FP compare are the unsigned ones!
(This looks like a HACK to me. WDP 96/03)
-}
fix_FP_cond :: Cond -> Cond
fix_FP_cond GE = GEU
fix_FP_cond GTT = GU
fix_FP_cond LTT = LU
fix_FP_cond LE = LEU
fix_FP_cond EQQ = EQQ
fix_FP_cond NE = NE
fix_FP_cond _ = panic "X86.Ppr.fix_FP_cond: no match"
-- there should be no others
pprInstr g@(GABS _ src dst)
= pprG g (hcat [gtab, gpush src 0, text " ; fabs ; ", gpop dst 1])
pprInstr g@(GNEG _ src dst)
= pprG g (hcat [gtab, gpush src 0, text " ; fchs ; ", gpop dst 1])
pprInstr g@(GSQRT sz src dst)
= pprG g (hcat [gtab, gpush src 0, text " ; fsqrt"] $$
hcat [gtab, gcoerceto sz, gpop dst 1])
pprInstr g@(GSIN sz l1 l2 src dst)
= pprG g (pprTrigOp "fsin" False l1 l2 src dst sz)
pprInstr g@(GCOS sz l1 l2 src dst)
= pprG g (pprTrigOp "fcos" False l1 l2 src dst sz)
pprInstr g@(GTAN sz l1 l2 src dst)
= pprG g (pprTrigOp "fptan" True l1 l2 src dst sz)
-- In the translations for GADD, GMUL, GSUB and GDIV,
-- the first two cases are mere optimisations. The otherwise clause
-- generates correct code under all circumstances.
pprInstr g@(GADD _ src1 src2 dst)
| src1 == dst
= pprG g (text "\t#GADD-xxxcase1" $$
hcat [gtab, gpush src2 0,
text " ; faddp %st(0),", greg src1 1])
| src2 == dst
= pprG g (text "\t#GADD-xxxcase2" $$
hcat [gtab, gpush src1 0,
text " ; faddp %st(0),", greg src2 1])
| otherwise
= pprG g (hcat [gtab, gpush src1 0,
text " ; fadd ", greg src2 1, text ",%st(0)",
gsemi, gpop dst 1])
pprInstr g@(GMUL _ src1 src2 dst)
| src1 == dst
= pprG g (text "\t#GMUL-xxxcase1" $$
hcat [gtab, gpush src2 0,
text " ; fmulp %st(0),", greg src1 1])
| src2 == dst
= pprG g (text "\t#GMUL-xxxcase2" $$
hcat [gtab, gpush src1 0,
text " ; fmulp %st(0),", greg src2 1])
| otherwise
= pprG g (hcat [gtab, gpush src1 0,
text " ; fmul ", greg src2 1, text ",%st(0)",
gsemi, gpop dst 1])
pprInstr g@(GSUB _ src1 src2 dst)
| src1 == dst
= pprG g (text "\t#GSUB-xxxcase1" $$
hcat [gtab, gpush src2 0,
text " ; fsubrp %st(0),", greg src1 1])
| src2 == dst
= pprG g (text "\t#GSUB-xxxcase2" $$
hcat [gtab, gpush src1 0,
text " ; fsubp %st(0),", greg src2 1])
| otherwise
= pprG g (hcat [gtab, gpush src1 0,
text " ; fsub ", greg src2 1, text ",%st(0)",
gsemi, gpop dst 1])
pprInstr g@(GDIV _ src1 src2 dst)
| src1 == dst
= pprG g (text "\t#GDIV-xxxcase1" $$
hcat [gtab, gpush src2 0,
text " ; fdivrp %st(0),", greg src1 1])
| src2 == dst
= pprG g (text "\t#GDIV-xxxcase2" $$
hcat [gtab, gpush src1 0,
text " ; fdivp %st(0),", greg src2 1])
| otherwise
= pprG g (hcat [gtab, gpush src1 0,
text " ; fdiv ", greg src2 1, text ",%st(0)",
gsemi, gpop dst 1])
pprInstr GFREE
= vcat [ ptext (sLit "\tffree %st(0) ;ffree %st(1) ;ffree %st(2) ;ffree %st(3)"),
ptext (sLit "\tffree %st(4) ;ffree %st(5)")
]
pprInstr _
= panic "X86.Ppr.pprInstr: no match"
pprTrigOp :: String -> Bool -> CLabel -> CLabel
-> Reg -> Reg -> Size -> SDoc
pprTrigOp op -- fsin, fcos or fptan
isTan -- we need a couple of extra steps if we're doing tan
l1 l2 -- internal labels for us to use
src dst sz
= -- We'll be needing %eax later on
hcat [gtab, text "pushl %eax;"] $$
-- tan is going to use an extra space on the FP stack
(if isTan then hcat [gtab, text "ffree %st(6)"] else empty) $$
-- First put the value in %st(0) and try to apply the op to it
hcat [gpush src 0, text ("; " ++ op)] $$
-- Now look to see if C2 was set (overflow, |value| >= 2^63)
hcat [gtab, text "fnstsw %ax"] $$
hcat [gtab, text "test $0x400,%eax"] $$
-- If we were in bounds then jump to the end
hcat [gtab, text "je " <> ppr l1] $$
-- Otherwise we need to shrink the value. Start by
-- loading pi, doubleing it (by adding it to itself),
-- and then swapping pi with the value, so the value we
-- want to apply op to is in %st(0) again
hcat [gtab, text "ffree %st(7); fldpi"] $$
hcat [gtab, text "fadd %st(0),%st"] $$
hcat [gtab, text "fxch %st(1)"] $$
-- Now we have a loop in which we make the value smaller,
-- see if it's small enough, and loop if not
(ppr l2 <> char ':') $$
hcat [gtab, text "fprem1"] $$
-- My Debian libc uses fstsw here for the tan code, but I can't
-- see any reason why it should need to be different for tan.
hcat [gtab, text "fnstsw %ax"] $$
hcat [gtab, text "test $0x400,%eax"] $$
hcat [gtab, text "jne " <> ppr l2] $$
hcat [gtab, text "fstp %st(1)"] $$
hcat [gtab, text op] $$
(ppr l1 <> char ':') $$
-- Pop the 1.0 tan gave us
(if isTan then hcat [gtab, text "fstp %st(0)"] else empty) $$
-- Restore %eax
hcat [gtab, text "popl %eax;"] $$
-- And finally make the result the right size
hcat [gtab, gcoerceto sz, gpop dst 1]
--------------------------
-- coerce %st(0) to the specified size
gcoerceto :: Size -> SDoc
gcoerceto FF64 = empty
gcoerceto FF32 = empty --text "subl $4,%esp ; fstps (%esp) ; flds (%esp) ; addl $4,%esp ; "
gcoerceto _ = panic "X86.Ppr.gcoerceto: no match"
gpush :: Reg -> RegNo -> SDoc
gpush reg offset
= hcat [text "fld ", greg reg offset]
gpop :: Reg -> RegNo -> SDoc
gpop reg offset
= hcat [text "fstp ", greg reg offset]
greg :: Reg -> RegNo -> SDoc
greg reg offset = text "%st(" <> int (gregno reg - firstfake+offset) <> char ')'
gsemi :: SDoc
gsemi = text " ; "
gtab :: SDoc
gtab = char '\t'
gsp :: SDoc
gsp = char ' '
gregno :: Reg -> RegNo
gregno (RegReal (RealRegSingle i)) = i
gregno _ = --pprPanic "gregno" (ppr other)
999 -- bogus; only needed for debug printing
pprG :: Instr -> SDoc -> SDoc
pprG fake actual
= (char '#' <> pprGInstr fake) $$ actual
pprGInstr :: Instr -> SDoc
pprGInstr (GMOV src dst) = pprSizeRegReg (sLit "gmov") FF64 src dst
pprGInstr (GLD sz src dst) = pprSizeAddrReg (sLit "gld") sz src dst
pprGInstr (GST sz src dst) = pprSizeRegAddr (sLit "gst") sz src dst
pprGInstr (GLDZ dst) = pprSizeReg (sLit "gldz") FF64 dst
pprGInstr (GLD1 dst) = pprSizeReg (sLit "gld1") FF64 dst
pprGInstr (GFTOI src dst) = pprSizeSizeRegReg (sLit "gftoi") FF32 II32 src dst
pprGInstr (GDTOI src dst) = pprSizeSizeRegReg (sLit "gdtoi") FF64 II32 src dst
pprGInstr (GITOF src dst) = pprSizeSizeRegReg (sLit "gitof") II32 FF32 src dst
pprGInstr (GITOD src dst) = pprSizeSizeRegReg (sLit "gitod") II32 FF64 src dst
pprGInstr (GDTOF src dst) = pprSizeSizeRegReg (sLit "gdtof") FF64 FF32 src dst
pprGInstr (GCMP co src dst) = pprCondRegReg (sLit "gcmp_") FF64 co src dst
pprGInstr (GABS sz src dst) = pprSizeRegReg (sLit "gabs") sz src dst
pprGInstr (GNEG sz src dst) = pprSizeRegReg (sLit "gneg") sz src dst
pprGInstr (GSQRT sz src dst) = pprSizeRegReg (sLit "gsqrt") sz src dst
pprGInstr (GSIN sz _ _ src dst) = pprSizeRegReg (sLit "gsin") sz src dst
pprGInstr (GCOS sz _ _ src dst) = pprSizeRegReg (sLit "gcos") sz src dst
pprGInstr (GTAN sz _ _ src dst) = pprSizeRegReg (sLit "gtan") sz src dst
pprGInstr (GADD sz src1 src2 dst) = pprSizeRegRegReg (sLit "gadd") sz src1 src2 dst
pprGInstr (GSUB sz src1 src2 dst) = pprSizeRegRegReg (sLit "gsub") sz src1 src2 dst
pprGInstr (GMUL sz src1 src2 dst) = pprSizeRegRegReg (sLit "gmul") sz src1 src2 dst
pprGInstr (GDIV sz src1 src2 dst) = pprSizeRegRegReg (sLit "gdiv") sz src1 src2 dst
pprGInstr _ = panic "X86.Ppr.pprGInstr: no match"
pprDollImm :: Imm -> SDoc
pprDollImm i = ptext (sLit "$") <> pprImm i
pprOperand :: Size -> Operand -> SDoc
pprOperand s (OpReg r) = pprReg s r
pprOperand _ (OpImm i) = pprDollImm i
pprOperand _ (OpAddr ea) = pprAddr ea
pprMnemonic_ :: LitString -> SDoc
pprMnemonic_ name =
char '\t' <> ptext name <> space
pprMnemonic :: LitString -> Size -> SDoc
pprMnemonic name size =
char '\t' <> ptext name <> pprSize size <> space
pprSizeImmOp :: LitString -> Size -> Imm -> Operand -> SDoc
pprSizeImmOp name size imm op1
= hcat [
pprMnemonic name size,
char '$',
pprImm imm,
comma,
pprOperand size op1
]
pprSizeOp :: LitString -> Size -> Operand -> SDoc
pprSizeOp name size op1
= hcat [
pprMnemonic name size,
pprOperand size op1
]
pprSizeOpOp :: LitString -> Size -> Operand -> Operand -> SDoc
pprSizeOpOp name size op1 op2
= hcat [
pprMnemonic name size,
pprOperand size op1,
comma,
pprOperand size op2
]
pprOpOp :: LitString -> Size -> Operand -> Operand -> SDoc
pprOpOp name size op1 op2
= hcat [
pprMnemonic_ name,
pprOperand size op1,
comma,
pprOperand size op2
]
pprSizeReg :: LitString -> Size -> Reg -> SDoc
pprSizeReg name size reg1
= hcat [
pprMnemonic name size,
pprReg size reg1
]
pprSizeRegReg :: LitString -> Size -> Reg -> Reg -> SDoc
pprSizeRegReg name size reg1 reg2
= hcat [
pprMnemonic name size,
pprReg size reg1,
comma,
pprReg size reg2
]
pprRegReg :: LitString -> Reg -> Reg -> SDoc
pprRegReg name reg1 reg2
= sdocWithPlatform $ \platform ->
hcat [
pprMnemonic_ name,
pprReg (archWordSize (target32Bit platform)) reg1,
comma,
pprReg (archWordSize (target32Bit platform)) reg2
]
pprSizeOpReg :: LitString -> Size -> Operand -> Reg -> SDoc
pprSizeOpReg name size op1 reg2
= sdocWithPlatform $ \platform ->
hcat [
pprMnemonic name size,
pprOperand size op1,
comma,
pprReg (archWordSize (target32Bit platform)) reg2
]
pprCondRegReg :: LitString -> Size -> Cond -> Reg -> Reg -> SDoc
pprCondRegReg name size cond reg1 reg2
= hcat [
char '\t',
ptext name,
pprCond cond,
space,
pprReg size reg1,
comma,
pprReg size reg2
]
pprSizeSizeRegReg :: LitString -> Size -> Size -> Reg -> Reg -> SDoc
pprSizeSizeRegReg name size1 size2 reg1 reg2
= hcat [
char '\t',
ptext name,
pprSize size1,
pprSize size2,
space,
pprReg size1 reg1,
comma,
pprReg size2 reg2
]
pprSizeSizeOpReg :: LitString -> Size -> Size -> Operand -> Reg -> SDoc
pprSizeSizeOpReg name size1 size2 op1 reg2
= hcat [
pprMnemonic name size2,
pprOperand size1 op1,
comma,
pprReg size2 reg2
]
pprSizeRegRegReg :: LitString -> Size -> Reg -> Reg -> Reg -> SDoc
pprSizeRegRegReg name size reg1 reg2 reg3
= hcat [
pprMnemonic name size,
pprReg size reg1,
comma,
pprReg size reg2,
comma,
pprReg size reg3
]
pprSizeAddrReg :: LitString -> Size -> AddrMode -> Reg -> SDoc
pprSizeAddrReg name size op dst
= hcat [
pprMnemonic name size,
pprAddr op,
comma,
pprReg size dst
]
pprSizeRegAddr :: LitString -> Size -> Reg -> AddrMode -> SDoc
pprSizeRegAddr name size src op
= hcat [
pprMnemonic name size,
pprReg size src,
comma,
pprAddr op
]
pprShift :: LitString -> Size -> Operand -> Operand -> SDoc
pprShift name size src dest
= hcat [
pprMnemonic name size,
pprOperand II8 src, -- src is 8-bit sized
comma,
pprOperand size dest
]
pprSizeOpOpCoerce :: LitString -> Size -> Size -> Operand -> Operand -> SDoc
pprSizeOpOpCoerce name size1 size2 op1 op2
= hcat [ char '\t', ptext name, pprSize size1, pprSize size2, space,
pprOperand size1 op1,
comma,
pprOperand size2 op2
]
pprCondInstr :: LitString -> Cond -> SDoc -> SDoc
pprCondInstr name cond arg
= hcat [ char '\t', ptext name, pprCond cond, space, arg]
|
nomeata/ghc
|
compiler/nativeGen/X86/Ppr.hs
|
bsd-3-clause
| 41,666 | 0 | 28 | 12,983 | 12,248 | 6,099 | 6,149 | 799 | 97 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
module AuthServer where
import System.Random
import Control.Monad.Trans.Except
import Control.Monad.Trans.Resource
import Control.Monad.IO.Class
import Data.Aeson
import Data.Aeson.TH
import Data.Bson.Generic
import GHC.Generics
import Network.Wai hiding(Response)
import Network.Wai.Handler.Warp
import Network.Wai.Logger
import Servant
import Servant.API
import Servant.Client
import System.IO
import System.Directory
import System.Environment (getArgs, getProgName, lookupEnv)
import System.Log.Formatter
import System.Log.Handler (setFormatter)
import System.Log.Handler.Simple
import System.Log.Handler.Syslog
import System.Log.Logger
import Data.Bson.Generic
import qualified Data.List as DL
import Data.Maybe (catMaybes)
import Data.Text (pack, unpack)
import Data.Time.Clock (UTCTime, getCurrentTime)
import Data.Time.Format (defaultTimeLocale, formatTime)
import Database.MongoDB
import Control.Monad (when)
import Network.HTTP.Client (newManager, defaultManagerSettings)
import Data.UUID
import Data.Time
data Response = Response{
response :: String
} deriving (Eq, Show, Generic)
instance ToJSON Response
instance FromJSON Response
data User = User{
username :: String,
password :: String,
timeout :: Int,
token :: String
} deriving (Eq, Show, Generic)
data Signin = Signin{
username :: String,
password :: String
} deriving (Eq, Show, Generic)
instance ToJSON User
instance FromJSON User
instance ToJSON Signin
instance FromJSON Signin
instance ToBSON User
instance FromBSON User
type ApiHandler = ExceptT ServantErr IO
serverport :: String
serverport = "8082"
serverhost :: String
serverhost = "localhost"
type AuthApi =
"signin" :> ReqBody '[JSON] Signin :> Post '[JSON] User :<|>
"register" :> ReqBody '[JSON] Signin :> Post '[JSON] Response :<|>
-- "isvalid" :> Capture "token" User :> Get '[JSON] Response
authApi :: Proxy AuthApi
authApi = Proxy
server :: Server AuthApi
server =
login :<|>
newuser-- :<|>
-- checkToken
authApp :: Application
authApp = serve authApi server
mkApp :: IO()
mkApp = do
run (read (serverport) ::Int) authApp
login :: Signin -> ApiHandler User
login signin = do
uname <- username signin
psswrd <- password signin
warnLog $ "Searching for value for key: " ++ key
user <- withMongoDbConnection $ do
docs <- find (select ["uname" =: uname, "password" =: psswrd] "USER_RECORD") >>= drainCursor
return $ catMaybes $ DL.map (\ b -> fromBSON b :: Maybe User) docs
thisuser <- head $ user
token <- toString UUID
currentTime <- getCurrentTime
currentZone <- getCurrentTimeZone
let fiveMinutes = 5 * 60
let newTime = addUTCTime hundredMinutes currentTime
timeout <- utcToLocalTime currentZone newTime
warnLog $ "Storing Session key under key " ++ uname ++ "."
let user = (User uname psswrd timeout token)
withMongoDbConnection $ upsert (select ["id" =: uname] "SESSION_RECORD") $ toBSON user
return user
newuser :: Signin -> ApiHandler Response
newuser signin = do
uname <- username signin
psswrd <- password signin
warnLog $ "Storing value under key: " ++ uname
let user = (User uname psswrd)
withMongoDbConnection $ upsert (select ["uname" =: uname, "password" =: psswrd] "USER_RECORD") $ toBSON user
return (Response "Success")
-- | Logging stuff
iso8601 :: UTCTime -> String
iso8601 = formatTime defaultTimeLocale "%FT%T%q%z"
-- global loggin functions
debugLog, warnLog, errorLog :: String -> IO ()
debugLog = doLog debugM
warnLog = doLog warningM
errorLog = doLog errorM
noticeLog = doLog noticeM
doLog f s = getProgName >>= \ p -> do
t <- getCurrentTime
f p $ (iso8601 t) ++ " " ++ s
withLogging act = withStdoutLogger $ \aplogger -> do
lname <- getProgName
llevel <- logLevel
updateGlobalLogger lname
(setLevel $ case llevel of
"WARNING" -> WARNING
"ERROR" -> ERROR
_ -> DEBUG)
act aplogger
-- | Mongodb helpers...
-- | helper to open connection to mongo database and run action
-- generally run as follows:
-- withMongoDbConnection $ do ...
--
withMongoDbConnection :: Action IO a -> IO a
withMongoDbConnection act = do
ip <- mongoDbIp
port <- mongoDbPort
database <- mongoDbDatabase
pipe <- connect (host ip)
ret <- runResourceT $ liftIO $ access pipe master (pack database) act
Database.MongoDB.close pipe
return ret
-- | helper method to ensure we force extraction of all results
-- note how it is defined recursively - meaning that draincursor' calls itself.
-- the purpose is to iterate through all documents returned if the connection is
-- returning the documents in batch mode, meaning in batches of retruned results with more
-- to come on each call. The function recurses until there are no results left, building an
-- array of returned [Document]
drainCursor :: Cursor -> Action IO [Document]
drainCursor cur = drainCursor' cur []
where
drainCursor' cur res = do
batch <- nextBatch cur
if null batch
then return res
else drainCursor' cur (res ++ batch)
-- | Environment variable functions, that return the environment variable if set, or
-- default values if not set.
-- | The IP address of the mongoDB database that devnostics-rest uses to store and access data
mongoDbIp :: IO String
mongoDbIp = defEnv "MONGODB_IP" Prelude.id "127.0.0.1" True
-- | The port number of the mongoDB database that devnostics-rest uses to store and access data
mongoDbPort :: IO Integer
mongoDbPort = defEnv "MONGODB_PORT" read 27017 False -- 27017 is the default mongodb port
-- | The name of the mongoDB database that devnostics-rest uses to store and access data
mongoDbDatabase :: IO String
mongoDbDatabase = defEnv "MONGODB_DATABASE" Prelude.id "USEHASKELLDB" True
-- | Determines log reporting level. Set to "DEBUG", "WARNING" or "ERROR" as preferred. Loggin is
-- provided by the hslogger library.
logLevel :: IO String
logLevel = defEnv "LOG_LEVEL" Prelude.id "DEBUG" True
-- | Helper function to simplify the setting of environment variables
-- function that looks up environment variable and returns the result of running funtion fn over it
-- or if the environment variable does not exist, returns the value def. The function will optionally log a
-- warning based on Boolean tag
defEnv :: Show a
=> String -- Environment Variable name
-> (String -> a) -- function to process variable string (set as 'id' if not needed)
-> a -- default value to use if environment variable is not set
-> Bool -- True if we should warn if environment variable is not set
-> IO a
defEnv env fn def doWarn = lookupEnv env >>= \ e -> case e of
Just s -> return $ fn s
Nothing -> do
when doWarn (doLog warningM $ "Environment variable: " ++ env ++
" is not set. Defaulting to " ++ (show def))
return def
|
Garygunn94/DFS
|
AuthServer/.stack-work/intero/intero5744AMY.hs
|
bsd-3-clause
| 7,881 | 121 | 13 | 2,057 | 1,608 | 888 | 720 | -1 | -1 |
module TestsCommon
( module X
, pMapM_
, rndSelect
, testFieldSpec
) where
import GalFld.Core.FiniteField
-- from hspec
import Test.Hspec as X
import Control.Exception as X
-- from monad-parallel
import qualified Control.Monad.Parallel as P
import System.Random
import Control.Monad (replicateM)
rndSelect xs n = do
gen <- getStdGen
return $ take n [xs!!x | x <- randomRs (0, length xs - 1) gen]
pMapM_ f = P.sequence_ . map f
testFieldSpec e = testFieldSpec' $ elems e
testFieldSpec' es = do
it "Assoziativität"
(testAsso es `shouldBe` True)
it "Kommutativität"
(testKommu es `shouldBe` True)
it "Einheiten"
(testEinh es `shouldBe` True)
it "Inversen"
(testInv es `shouldBe` True)
it "Distributivität"
(testDist es `shouldBe` True)
|
maximilianhuber/softwareProjekt
|
test/TestsCommon.hs
|
bsd-3-clause
| 791 | 0 | 15 | 165 | 267 | 144 | 123 | 27 | 1 |
module Main where
import Development.Shake
import Data.Monoid ((<>))
import Control.Arrow ((>>>))
import Control.Monad.Extra
import System.FilePath ((</>), takeDirectory)
import qualified System.Directory as Directory
import qualified GHC.Conc as Conc
import qualified Data.ByteString as ByteString
import qualified Network.HostName as HostName
import qualified TalkBank.Media as Media
-- | Hidden Shake build directory for this project.
getShakeBuildDir :: IO FilePath
getShakeBuildDir = (</> ".update-chat-site") <$> Directory.getHomeDirectory
-- | Where to find "data-orig" and "media".
defaultRootDir :: HostName.HostName -> FilePath
defaultRootDir "childes.talkbank.org" = "/web/childes"
defaultRootDir "talkbank.talkbank.org" = "/TalkBank"
defaultRootDir "homebank.talkbank.org" = "/HomeBank"
defaultRootDir hostName = error $ "Unknown host " <> hostName
main :: IO ()
main = do
-- Check all the dirs first.
rootDir <- defaultRootDir <$> HostName.getHostName
unlessM (Directory.doesDirectoryExist rootDir) $
fail $ "root dir " <> rootDir <> " does not exist"
let dataOrigDir = rootDir </> "data-orig"
unlessM (liftIO $ Directory.doesDirectoryExist dataOrigDir) $
fail $ "data-orig dir " <> dataOrigDir <> " does not exist"
let mediaDir = rootDir </> "media"
unlessM (liftIO $ Directory.doesDirectoryExist mediaDir) $
fail $ "media dir " <> mediaDir <> " does not exist"
buildDir <- getShakeBuildDir
numProcessors <- Conc.getNumProcessors
shakeArgs shakeOptions { shakeFiles = buildDir
, shakeThreads = numProcessors
, shakeProgress = progressSimple
} $ do
action $ checkMediaFiles dataOrigDir mediaDir
-- | For each CHAT file, check as needed for the existence of video or audio
-- if not marked as "missing".
--
-- Print out anything not found.
checkMediaFiles :: FilePath -- ^ data-orig
-> FilePath -- ^ media
-> Action ()
checkMediaFiles dataOrigDir mediaDir = do
getDirectoryFiles dataOrigDir ["//*.cha"]
>>= (mapM_ (reportChatFileDeps dataOrigDir mediaDir) >>> liftIO)
-- | Report on existence of relevant dependent files (media, pic).
reportChatFileDeps :: FilePath -- ^ data-orig
-> FilePath -- ^ media
-> FilePath -- ^ CHAT path relative to root
-> IO ()
reportChatFileDeps dataOrigDir mediaDir relativeChatPath = do
let chatPath = dataOrigDir </> relativeChatPath
let chatMediaDir = mediaDir </> takeDirectory relativeChatPath
text <- ByteString.readFile chatPath
checkMedia chatPath chatMediaDir (Media.typeExpected text)
mapM_ (checkPic chatPath chatMediaDir) (Media.picRelativePaths text)
-- | Report whether a specific dependent file exists.
reportExistence :: FilePath -- ^ CHAT path
-> FilePath -- ^ an external dep path
-> IO ()
reportExistence chatPath depPath = do
unlessM (Directory.doesFileExist depPath) $
putStrLn $ chatPath <> ": cannot find " <> depPath
videoExtension :: FilePath
videoExtension = ".mp4"
audioExtension :: FilePath
audioExtension = ".mp3"
-- | Check whether what is expected exists.
checkMedia :: FilePath -- ^ CHAT path
-> FilePath -- ^ CHAT media dir
-> Media.ExpectedType
-> IO ()
checkMedia _ _ Media.Skip = pure ()
checkMedia chatPath chatMediaDir (Media.Video name) =
reportExistence chatPath (chatMediaDir </> name <> videoExtension)
checkMedia chatPath chatMediaDir (Media.Audio name) = do
reportExistence chatPath (chatMediaDir </> name <> audioExtension)
checkPic :: FilePath -- ^ CHAT path
-> FilePath -- ^ Chat media dir
-> FilePath -- ^ pic relative path
-> IO ()
checkPic chatPath chatMediaDir picRelativePath =
reportExistence chatPath (chatMediaDir </> picRelativePath)
|
TalkBank/update-chat-site
|
app/Main.hs
|
bsd-3-clause
| 3,872 | 0 | 15 | 824 | 820 | 433 | 387 | 77 | 1 |
--------------------------------------------------------------------------------
module Firefly.Input.Internal
( Key (..)
, MouseButton (..)
) where
--------------------------------------------------------------------------------
import Foreign.C.Types (CInt)
--------------------------------------------------------------------------------
-- | See "Firefly.Input.Keys" for a list
newtype Key = Key CInt
--------------------------------------------------------------------------------
-- | See "Firefly.Input.MouseButtons" for a list
newtype MouseButton = MouseButton CInt
|
jaspervdj/firefly
|
src/Firefly/Input/Internal.hs
|
bsd-3-clause
| 602 | 0 | 5 | 69 | 56 | 39 | 17 | 6 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module FinishCmd (
finishCmd, finishCmdOpts
) where
-- standard libraries
import Data.Text (Text)
import qualified Data.Text as T
import Text.Printf
-- import System.Exit
-- import Control.Monad
import Data.Time
import Data.Maybe (mapMaybe, isJust, fromJust)
-- import Data.Either (either)
--
-- -- friends
import GetOpt
import Time
import Errors
import RecordSet
import ParseOpts
--
import Record (Record)
import qualified Record as R
--
-- | Flags for the "start" command
--
data FinishCmdFlag =
FinishCmdTime UTCTime
--
-- Given a UTCTime (to determine what day it is) and a TimeZone returns option descriptions
-- for the start command
--
finishCmdOpts :: ZonedTime -> [OptDescr (Either String FinishCmdFlag)]
finishCmdOpts zt =
[ Option "t" ["time"] (OptArg timeToFinishCmd "time") "finish at time" ]
where
timeToFinishCmd :: Maybe String -> Either String FinishCmdFlag
timeToFinishCmd = maybe (Left "You have not provided a time argument.")
(either Left (Right . FinishCmdTime) . parseTimeFlag zt)
finishCmd :: ZonedTime -> [String] -> IO ()
finishCmd zt args = do
let (opts, nonOpts, errors) = getOptEither Permute (finishCmdOpts zt) args
exitWithErrorIf' (length errors > 0) (unlines errors)
exitWithErrorIf (length nonOpts > 0) (printf "Unknown parameters '%s'" (show nonOpts))
rs <- readRecordSet
exitWithErrorIf (not . R.isCurrent $ rs) "There is no current task. Run 'task start'."
let current = fromJust . R.current $ rs
finish <- getFinishTime opts
exitWithErrorIf (R.crecStart current >= finish)
(printf "Finish time is the same as or before start time")
rs' <- R.finishCurrent rs finish
writeRecordSet rs'
printf "Finishing current task '%s' at '%s'\n" (T.unpack $ R.crecDescr current)
(prettyTime (zonedTimeZone zt) finish)
getFinishTime :: [FinishCmdFlag] -> IO UTCTime
getFinishTime flags
| null utcTimes = getCurrentTime
| otherwise = return . head $ utcTimes
where
utcTimes = mapMaybe f flags
f (FinishCmdTime t) = Just t
|
sseefried/task
|
src/FinishCmd.hs
|
bsd-3-clause
| 2,123 | 0 | 12 | 437 | 558 | 291 | 267 | 44 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Ordinal.HR.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.HR.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "HR Tests"
[ makeCorpusTest [This Ordinal] corpus
]
|
rfranek/duckling
|
tests/Duckling/Ordinal/HR/Tests.hs
|
bsd-3-clause
| 599 | 0 | 9 | 95 | 80 | 51 | 29 | 11 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
module Fragment.KiArr.Ast (
module X
) where
import Fragment.KiArr.Ast.Kind as X
import Fragment.KiArr.Ast.Error as X
|
dalaing/type-systems
|
src/Fragment/KiArr/Ast.hs
|
bsd-3-clause
| 278 | 0 | 4 | 51 | 32 | 24 | 8 | 4 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
#if __GLASGOW_HASKELL__ >= 710
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ViewPatterns #-}
#endif
#include "lens-common.h"
#if !(MIN_VERSION_exceptions(0,4,0))
#define MonadThrow MonadCatch
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Exception.Lens
-- Copyright : (C) 2012-16 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : Control.Exception
--
-- @Control.Exception@ provides an example of a large open hierarchy
-- that we can model with prisms and isomorphisms.
--
-- Additional combinators for working with 'IOException' results can
-- be found in "System.IO.Error.Lens".
--
-- The combinators in this module have been generalized to work with
-- 'MonadCatch' instead of just 'IO'. This enables them to be used
-- more easily in 'Monad' transformer stacks.
----------------------------------------------------------------------------
module Control.Exception.Lens
(
-- * Handling
catching, catching_
, handling, handling_
-- * Trying
, trying, trying_
-- * Throwing
, throwing
, throwing_
, throwingM
, throwingTo
-- * Mapping
, mappedException, mappedException'
-- * Exceptions
, exception
#if __GLASGOW_HASKELL__ >= 710
, pattern Exception
#endif
-- * Exception Handlers
, Handleable(..)
-- ** IOExceptions
, AsIOException(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern IOException_
#endif
-- ** Arithmetic Exceptions
, AsArithException(..)
, _Overflow, _Underflow, _LossOfPrecision, _DivideByZero, _Denormal
#if MIN_VERSION_base(4,6,0)
, _RatioZeroDenominator
#endif
#if __GLASGOW_HASKELL__ >= 710
, pattern ArithException_
, pattern Overflow_
, pattern Underflow_
, pattern LossOfPrecision_
, pattern DivideByZero_
, pattern Denormal_
, pattern RatioZeroDenominator_
#endif
-- ** Array Exceptions
, AsArrayException(..)
, _IndexOutOfBounds
, _UndefinedElement
#if __GLASGOW_HASKELL__ >= 710
, pattern ArrayException_
, pattern IndexOutOfBounds_
, pattern UndefinedElement_
#endif
-- ** Assertion Failed
, AsAssertionFailed(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern AssertionFailed__
, pattern AssertionFailed_
#endif
-- ** Async Exceptions
, AsAsyncException(..)
, _StackOverflow
, _HeapOverflow
, _ThreadKilled
, _UserInterrupt
#if __GLASGOW_HASKELL__ >= 710
, pattern AsyncException_
, pattern StackOverflow_
, pattern HeapOverflow_
, pattern ThreadKilled_
, pattern UserInterrupt_
#endif
-- ** Non-Termination
, AsNonTermination(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern NonTermination__
, pattern NonTermination_
#endif
-- ** Nested Atomically
, AsNestedAtomically(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern NestedAtomically__
, pattern NestedAtomically_
#endif
-- ** Blocked Indefinitely
-- *** on MVar
, AsBlockedIndefinitelyOnMVar(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern BlockedIndefinitelyOnMVar__
, pattern BlockedIndefinitelyOnMVar_
#endif
-- *** on STM
, AsBlockedIndefinitelyOnSTM(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern BlockedIndefinitelyOnSTM__
, pattern BlockedIndefinitelyOnSTM_
#endif
-- ** Deadlock
, AsDeadlock(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern Deadlock__
, pattern Deadlock_
#endif
-- ** No Such Method
, AsNoMethodError(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern NoMethodError__
, pattern NoMethodError_
#endif
-- ** Pattern Match Failure
, AsPatternMatchFail(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern PatternMatchFail__
, pattern PatternMatchFail_
#endif
-- ** Record
, AsRecConError(..)
, AsRecSelError(..)
, AsRecUpdError(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern RecConError__
, pattern RecConError_
, pattern RecSelError__
, pattern RecSelError_
, pattern RecUpdError__
, pattern RecUpdError_
#endif
-- ** Error Call
, AsErrorCall(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern ErrorCall__
, pattern ErrorCall_
#endif
#if MIN_VERSION_base(4,8,0)
-- ** Allocation Limit Exceeded
, AsAllocationLimitExceeded(..)
, pattern AllocationLimitExceeded__
, pattern AllocationLimitExceeded_
#endif
#if MIN_VERSION_base(4,9,0)
-- ** Type Error
, AsTypeError(..)
, pattern TypeError__
, pattern TypeError_
#endif
#if MIN_VERSION_base(4,10,0)
-- ** Compaction Failed
, AsCompactionFailed(..)
, pattern CompactionFailed__
, pattern CompactionFailed_
#endif
-- * Handling Exceptions
, AsHandlingException(..)
#if __GLASGOW_HASKELL__ >= 710
, pattern HandlingException__
, pattern HandlingException_
#endif
) where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Catch as Catch
import Control.Exception as Exception hiding (try, tryJust, catchJust)
import Control.Lens
import Control.Lens.Internal.Exception
import Data.Monoid
import GHC.Conc (ThreadId)
import Prelude
( const, either, flip, id
, (.)
, Maybe(..), Either(..), String
#if __GLASGOW_HASKELL__ >= 710
, Bool(..)
#endif
)
#ifdef HLINT
{-# ANN module "HLint: ignore Use Control.Exception.catch" #-}
#endif
-- $setup
-- >>> :set -XNoOverloadedStrings
-- >>> :m + Control.Exception Control.Monad Data.List Prelude
------------------------------------------------------------------------------
-- Exceptions as Prisms
------------------------------------------------------------------------------
-- | Traverse the strongly typed 'Exception' contained in 'SomeException' where the type of your function matches
-- the desired 'Exception'.
--
-- @
-- 'exception' :: ('Applicative' f, 'Exception' a)
-- => (a -> f a) -> 'SomeException' -> f 'SomeException'
-- @
exception :: Exception a => Prism' SomeException a
exception = prism' toException fromException
{-# INLINE exception #-}
#if __GLASGOW_HASKELL__ >= 710
pattern Exception e <- (preview exception -> Just e) where
Exception e = review exception e
#endif
------------------------------------------------------------------------------
-- Catching
------------------------------------------------------------------------------
-- | Catch exceptions that match a given 'Prism' (or any 'Fold', really).
--
-- >>> catching _AssertionFailed (assert False (return "uncaught")) $ \ _ -> return "caught"
-- "caught"
--
-- @
-- 'catching' :: 'MonadCatch' m => 'Prism'' 'SomeException' a -> m r -> (a -> m r) -> m r
-- 'catching' :: 'MonadCatch' m => 'Lens'' 'SomeException' a -> m r -> (a -> m r) -> m r
-- 'catching' :: 'MonadCatch' m => 'Traversal'' 'SomeException' a -> m r -> (a -> m r) -> m r
-- 'catching' :: 'MonadCatch' m => 'Iso'' 'SomeException' a -> m r -> (a -> m r) -> m r
-- 'catching' :: 'MonadCatch' m => 'Getter' 'SomeException' a -> m r -> (a -> m r) -> m r
-- 'catching' :: 'MonadCatch' m => 'Fold' 'SomeException' a -> m r -> (a -> m r) -> m r
-- @
catching :: MonadCatch m => Getting (First a) SomeException a -> m r -> (a -> m r) -> m r
catching l = catchJust (preview l)
{-# INLINE catching #-}
-- | Catch exceptions that match a given 'Prism' (or any 'Getter'), discarding
-- the information about the match. This is particularly useful when you have
-- a @'Prism'' e ()@ where the result of the 'Prism' or 'Fold' isn't
-- particularly valuable, just the fact that it matches.
--
-- >>> catching_ _AssertionFailed (assert False (return "uncaught")) $ return "caught"
-- "caught"
--
-- @
-- 'catching_' :: 'MonadCatch' m => 'Prism'' 'SomeException' a -> m r -> m r -> m r
-- 'catching_' :: 'MonadCatch' m => 'Lens'' 'SomeException' a -> m r -> m r -> m r
-- 'catching_' :: 'MonadCatch' m => 'Traversal'' 'SomeException' a -> m r -> m r -> m r
-- 'catching_' :: 'MonadCatch' m => 'Iso'' 'SomeException' a -> m r -> m r -> m r
-- 'catching_' :: 'MonadCatch' m => 'Getter' 'SomeException' a -> m r -> m r -> m r
-- 'catching_' :: 'MonadCatch' m => 'Fold' 'SomeException' a -> m r -> m r -> m r
-- @
catching_ :: MonadCatch m => Getting (First a) SomeException a -> m r -> m r -> m r
catching_ l a b = catchJust (preview l) a (const b)
{-# INLINE catching_ #-}
------------------------------------------------------------------------------
-- Handling
------------------------------------------------------------------------------
-- | A version of 'catching' with the arguments swapped around; useful in
-- situations where the code for the handler is shorter.
--
-- >>> handling _NonTermination (\_ -> return "caught") $ throwIO NonTermination
-- "caught"
--
-- @
-- 'handling' :: 'MonadCatch' m => 'Prism'' 'SomeException' a -> (a -> m r) -> m r -> m r
-- 'handling' :: 'MonadCatch' m => 'Lens'' 'SomeException' a -> (a -> m r) -> m r -> m r
-- 'handling' :: 'MonadCatch' m => 'Traversal'' 'SomeException' a -> (a -> m r) -> m r -> m r
-- 'handling' :: 'MonadCatch' m => 'Iso'' 'SomeException' a -> (a -> m r) -> m r -> m r
-- 'handling' :: 'MonadCatch' m => 'Fold' 'SomeException' a -> (a -> m r) -> m r -> m r
-- 'handling' :: 'MonadCatch' m => 'Getter' 'SomeException' a -> (a -> m r) -> m r -> m r
-- @
handling :: MonadCatch m => Getting (First a) SomeException a -> (a -> m r) -> m r -> m r
handling l = flip (catching l)
{-# INLINE handling #-}
-- | A version of 'catching_' with the arguments swapped around; useful in
-- situations where the code for the handler is shorter.
--
-- >>> handling_ _NonTermination (return "caught") $ throwIO NonTermination
-- "caught"
--
-- @
-- 'handling_' :: 'MonadCatch' m => 'Prism'' 'SomeException' a -> m r -> m r -> m r
-- 'handling_' :: 'MonadCatch' m => 'Lens'' 'SomeException' a -> m r -> m r -> m r
-- 'handling_' :: 'MonadCatch' m => 'Traversal'' 'SomeException' a -> m r -> m r -> m r
-- 'handling_' :: 'MonadCatch' m => 'Iso'' 'SomeException' a -> m r -> m r -> m r
-- 'handling_' :: 'MonadCatch' m => 'Getter' 'SomeException' a -> m r -> m r -> m r
-- 'handling_' :: 'MonadCatch' m => 'Fold' 'SomeException' a -> m r -> m r -> m r
-- @
handling_ :: MonadCatch m => Getting (First a) SomeException a -> m r -> m r -> m r
handling_ l = flip (catching_ l)
{-# INLINE handling_ #-}
------------------------------------------------------------------------------
-- Trying
------------------------------------------------------------------------------
-- | A variant of 'Control.Exception.try' that takes a 'Prism' (or any 'Fold') to select which
-- exceptions are caught (c.f. 'Control.Exception.tryJust', 'Control.Exception.catchJust'). If the
-- 'Exception' does not match the predicate, it is re-thrown.
--
-- @
-- 'trying' :: 'MonadCatch' m => 'Prism'' 'SomeException' a -> m r -> m ('Either' a r)
-- 'trying' :: 'MonadCatch' m => 'Lens'' 'SomeException' a -> m r -> m ('Either' a r)
-- 'trying' :: 'MonadCatch' m => 'Traversal'' 'SomeException' a -> m r -> m ('Either' a r)
-- 'trying' :: 'MonadCatch' m => 'Iso'' 'SomeException' a -> m r -> m ('Either' a r)
-- 'trying' :: 'MonadCatch' m => 'Getter' 'SomeException' a -> m r -> m ('Either' a r)
-- 'trying' :: 'MonadCatch' m => 'Fold' 'SomeException' a -> m r -> m ('Either' a r)
-- @
trying :: MonadCatch m => Getting (First a) SomeException a -> m r -> m (Either a r)
trying l = tryJust (preview l)
{-# INLINE trying #-}
-- | A version of 'trying' that discards the specific exception thrown.
--
-- @
-- 'trying_' :: 'MonadCatch' m => 'Prism'' 'SomeException' a -> m r -> m (Maybe r)
-- 'trying_' :: 'MonadCatch' m => 'Lens'' 'SomeException' a -> m r -> m (Maybe r)
-- 'trying_' :: 'MonadCatch' m => 'Traversal'' 'SomeException' a -> m r -> m (Maybe r)
-- 'trying_' :: 'MonadCatch' m => 'Iso'' 'SomeException' a -> m r -> m (Maybe r)
-- 'trying_' :: 'MonadCatch' m => 'Getter' 'SomeException' a -> m r -> m (Maybe r)
-- 'trying_' :: 'MonadCatch' m => 'Fold' 'SomeException' a -> m r -> m (Maybe r)
-- @
trying_ :: MonadCatch m => Getting (First a) SomeException a -> m r -> m (Maybe r)
trying_ l m = preview _Right `liftM` trying l m
{-# INLINE trying_ #-}
------------------------------------------------------------------------------
-- Throwing
------------------------------------------------------------------------------
-- | Throw an 'Exception' described by a 'Prism'. Exceptions may be thrown from
-- purely functional code, but may only be caught within the 'IO' 'Monad'.
--
-- @
-- 'throwing' l ≡ 'reviews' l 'throw'
-- @
--
-- @
-- 'throwing' :: 'Prism'' 'SomeException' t -> t -> r
-- 'throwing' :: 'Iso'' 'SomeException' t -> t -> r
-- @
throwing :: AReview SomeException b -> b -> r
throwing l = reviews l Exception.throw
{-# INLINE throwing #-}
-- | Similar to 'throwing' but specialised for the common case of
-- error constructors with no arguments.
--
-- @
-- data MyError = Foo | Bar
-- makePrisms ''MyError
-- 'throwing_' _Foo :: 'MonadError' MyError m => m a
-- @
throwing_ :: AReview SomeException () -> m x
throwing_ l = throwing l ()
{-# INLINE throwing_ #-}
-- | A variant of 'throwing' that can only be used within the 'IO' 'Monad'
-- (or any other 'MonadCatch' instance) to throw an 'Exception' described
-- by a 'Prism'.
--
-- Although 'throwingM' has a type that is a specialization of the type of
-- 'throwing', the two functions are subtly different:
--
-- @
-- 'throwing' l e \`seq\` x ≡ 'throwing' e
-- 'throwingM' l e \`seq\` x ≡ x
-- @
--
-- The first example will cause the 'Exception' @e@ to be raised, whereas the
-- second one won't. In fact, 'throwingM' will only cause an 'Exception' to
-- be raised when it is used within the 'MonadCatch' instance. The 'throwingM'
-- variant should be used in preference to 'throwing' to raise an 'Exception'
-- within the 'Monad' because it guarantees ordering with respect to other
-- monadic operations, whereas 'throwing' does not.
--
-- @
-- 'throwingM' l ≡ 'reviews' l 'CatchIO.throw'
-- @
--
-- @
-- 'throwingM' :: 'MonadThrow' m => 'Prism'' 'SomeException' t -> t -> m r
-- 'throwingM' :: 'MonadThrow' m => 'Iso'' 'SomeException' t -> t -> m r
-- @
throwingM :: MonadThrow m => AReview SomeException b -> b -> m r
throwingM l = reviews l throwM
{-# INLINE throwingM #-}
-- | 'throwingTo' raises an 'Exception' specified by a 'Prism' in the target thread.
--
-- @
-- 'throwingTo' thread l ≡ 'reviews' l ('throwTo' thread)
-- @
--
-- @
-- 'throwingTo' :: 'ThreadId' -> 'Prism'' 'SomeException' t -> t -> m a
-- 'throwingTo' :: 'ThreadId' -> 'Iso'' 'SomeException' t -> t -> m a
-- @
throwingTo :: MonadIO m => ThreadId -> AReview SomeException b -> b -> m ()
throwingTo tid l = reviews l (liftIO . throwTo tid)
{-# INLINE throwingTo #-}
----------------------------------------------------------------------------
-- Mapping
----------------------------------------------------------------------------
-- | This 'Setter' can be used to purely map over the 'Exception's an
-- arbitrary expression might throw; it is a variant of 'mapException' in
-- the same way that 'mapped' is a variant of 'fmap'.
--
-- > 'mapException' ≡ 'over' 'mappedException'
--
-- This view that every Haskell expression can be regarded as carrying a bag
-- of 'Exception's is detailed in “A Semantics for Imprecise Exceptions” by
-- Peyton Jones & al. at PLDI ’99.
--
-- The following maps failed assertions to arithmetic overflow:
--
-- >>> handling _Overflow (\_ -> return "caught") $ assert False (return "uncaught") & mappedException %~ \ (AssertionFailed _) -> Overflow
-- "caught"
mappedException :: (Exception e, Exception e') => Setter s s e e'
mappedException = sets mapException
{-# INLINE mappedException #-}
-- | This is a type restricted version of 'mappedException', which avoids
-- the type ambiguity in the input 'Exception' when using 'set'.
--
-- The following maps any exception to arithmetic overflow:
--
-- >>> handling _Overflow (\_ -> return "caught") $ assert False (return "uncaught") & mappedException' .~ Overflow
-- "caught"
mappedException' :: Exception e' => Setter s s SomeException e'
mappedException' = mappedException
{-# INLINE mappedException' #-}
----------------------------------------------------------------------------
-- IOException
----------------------------------------------------------------------------
-- | Exceptions that occur in the 'IO' 'Monad'. An 'IOException' records a
-- more specific error type, a descriptive string and maybe the handle that was
-- used when the error was flagged.
--
-- Due to their richer structure relative to other exceptions, these have
-- a more carefully overloaded signature.
class AsIOException t where
-- | Unfortunately the name 'ioException' is taken by @base@ for
-- throwing IOExceptions.
--
-- @
-- '_IOException' :: 'Prism'' 'IOException' 'IOException'
-- '_IOException' :: 'Prism'' 'SomeException' 'IOException'
-- @
--
-- Many combinators for working with an 'IOException' are available
-- in "System.IO.Error.Lens".
_IOException :: Prism' t IOException
instance AsIOException IOException where
_IOException = id
{-# INLINE _IOException #-}
instance AsIOException SomeException where
_IOException = exception
{-# INLINE _IOException #-}
#if __GLASGOW_HASKELL__ >= 710
pattern IOException_ a <- (preview _IOException -> Just a) where
IOException_ a = review _IOException a
#endif
----------------------------------------------------------------------------
-- ArithException
----------------------------------------------------------------------------
-- | Arithmetic exceptions.
class AsArithException t where
-- |
-- @
-- '_ArithException' :: 'Prism'' 'ArithException' 'ArithException'
-- '_ArithException' :: 'Prism'' 'SomeException' 'ArithException'
-- @
_ArithException :: Prism' t ArithException
#if __GLASGOW_HASKELL__ >= 710
pattern ArithException_ a <- (preview _ArithException -> Just a) where
ArithException_ a = review _ArithException a
#endif
instance AsArithException ArithException where
_ArithException = id
{-# INLINE _ArithException #-}
instance AsArithException SomeException where
_ArithException = exception
{-# INLINE _ArithException #-}
-- | Handle arithmetic '_Overflow'.
--
-- @
-- '_Overflow' ≡ '_ArithException' '.' '_Overflow'
-- @
--
-- @
-- '_Overflow' :: 'Prism'' 'ArithException' 'ArithException'
-- '_Overflow' :: 'Prism'' 'SomeException' 'ArithException'
-- @
_Overflow :: AsArithException t => Prism' t ()
_Overflow = _ArithException . dimap seta (either id id) . right' . rmap (Overflow <$) where
seta Overflow = Right ()
seta t = Left (pure t)
{-# INLINE _Overflow #-}
#if __GLASGOW_HASKELL__ >= 710
pattern Overflow_ <- (has _Overflow -> True) where
Overflow_ = review _Overflow ()
#endif
-- | Handle arithmetic '_Underflow'.
--
-- @
-- '_Underflow' ≡ '_ArithException' '.' '_Underflow'
-- @
--
-- @
-- '_Underflow' :: 'Prism'' 'ArithException' 'ArithException'
-- '_Underflow' :: 'Prism'' 'SomeException' 'ArithException'
-- @
_Underflow :: AsArithException t => Prism' t ()
_Underflow = _ArithException . dimap seta (either id id) . right' . rmap (Underflow <$) where
seta Underflow = Right ()
seta t = Left (pure t)
{-# INLINE _Underflow #-}
#if __GLASGOW_HASKELL__ >= 710
pattern Underflow_ <- (has _Underflow -> True) where
Underflow_ = review _Underflow ()
#endif
-- | Handle arithmetic loss of precision.
--
-- @
-- '_LossOfPrecision' ≡ '_ArithException' '.' '_LossOfPrecision'
-- @
--
-- @
-- '_LossOfPrecision' :: 'Prism'' 'ArithException' 'ArithException'
-- '_LossOfPrecision' :: 'Prism'' 'SomeException' 'ArithException'
-- @
_LossOfPrecision :: AsArithException t => Prism' t ()
_LossOfPrecision = _ArithException . dimap seta (either id id) . right' . rmap (LossOfPrecision <$) where
seta LossOfPrecision = Right ()
seta t = Left (pure t)
{-# INLINE _LossOfPrecision #-}
#if __GLASGOW_HASKELL__ >= 710
pattern LossOfPrecision_ <- (has _LossOfPrecision -> True) where
LossOfPrecision_ = review _LossOfPrecision ()
#endif
-- | Handle division by zero.
--
-- @
-- '_DivideByZero' ≡ '_ArithException' '.' '_DivideByZero'
-- @
--
-- @
-- '_DivideByZero' :: 'Prism'' 'ArithException' 'ArithException'
-- '_DivideByZero' :: 'Prism'' 'SomeException' 'ArithException'
-- @
_DivideByZero :: AsArithException t => Prism' t ()
_DivideByZero = _ArithException . dimap seta (either id id) . right' . rmap (DivideByZero <$) where
seta DivideByZero = Right ()
seta t = Left (pure t)
{-# INLINE _DivideByZero #-}
#if __GLASGOW_HASKELL__ >= 710
pattern DivideByZero_ <- (has _DivideByZero -> True) where
DivideByZero_ = review _DivideByZero ()
#endif
-- | Handle exceptional _Denormalized floating pure.
--
-- @
-- '_Denormal' ≡ '_ArithException' '.' '_Denormal'
-- @
--
-- @
-- '_Denormal' :: 'Prism'' 'ArithException' 'ArithException'
-- '_Denormal' :: 'Prism'' 'SomeException' 'ArithException'
-- @
_Denormal :: AsArithException t => Prism' t ()
_Denormal = _ArithException . dimap seta (either id id) . right' . rmap (Denormal <$) where
seta Denormal = Right ()
seta t = Left (pure t)
{-# INLINE _Denormal #-}
#if __GLASGOW_HASKELL__ >= 710
pattern Denormal_ <- (has _Denormal -> True) where
Denormal_ = review _Denormal ()
#endif
#if MIN_VERSION_base(4,6,0)
-- | Added in @base@ 4.6 in response to this libraries discussion:
--
-- <http://haskell.1045720.n5.nabble.com/Data-Ratio-and-exceptions-td5711246.html>
--
-- @
-- '_RatioZeroDenominator' ≡ '_ArithException' '.' '_RatioZeroDenominator'
-- @
--
-- @
-- '_RatioZeroDenominator' :: 'Prism'' 'ArithException' 'ArithException'
-- '_RatioZeroDenominator' :: 'Prism'' 'SomeException' 'ArithException'
-- @
_RatioZeroDenominator :: AsArithException t => Prism' t ()
_RatioZeroDenominator = _ArithException . dimap seta (either id id) . right' . rmap (RatioZeroDenominator <$) where
seta RatioZeroDenominator = Right ()
seta t = Left (pure t)
{-# INLINE _RatioZeroDenominator #-}
#if __GLASGOW_HASKELL__ >= 710
pattern RatioZeroDenominator_ <- (has _RatioZeroDenominator -> True) where
RatioZeroDenominator_ = review _RatioZeroDenominator ()
#endif
#endif
----------------------------------------------------------------------------
-- ArrayException
----------------------------------------------------------------------------
-- | Exceptions generated by array operations.
class AsArrayException t where
-- | Extract information about an 'ArrayException'.
--
-- @
-- '_ArrayException' :: 'Prism'' 'ArrayException' 'ArrayException'
-- '_ArrayException' :: 'Prism'' 'SomeException' 'ArrayException'
-- @
_ArrayException :: Prism' t ArrayException
instance AsArrayException ArrayException where
_ArrayException = id
{-# INLINE _ArrayException #-}
instance AsArrayException SomeException where
_ArrayException = exception
{-# INLINE _ArrayException #-}
#if __GLASGOW_HASKELL__ >= 710
pattern ArrayException_ e <- (preview _ArrayException -> Just e) where
ArrayException_ e = review _ArrayException e
#endif
-- | An attempt was made to index an array outside its declared bounds.
--
-- @
-- '_IndexOutOfBounds' ≡ '_ArrayException' '.' '_IndexOutOfBounds'
-- @
--
-- @
-- '_IndexOutOfBounds' :: 'Prism'' 'ArrayException' 'String'
-- '_IndexOutOfBounds' :: 'Prism'' 'SomeException' 'String'
-- @
_IndexOutOfBounds :: AsArrayException t => Prism' t String
_IndexOutOfBounds = _ArrayException . dimap seta (either id id) . right' . rmap (fmap IndexOutOfBounds) where
seta (IndexOutOfBounds r) = Right r
seta t = Left (pure t)
{-# INLINE _IndexOutOfBounds #-}
#if __GLASGOW_HASKELL__ >= 710
pattern IndexOutOfBounds_ e <- (preview _IndexOutOfBounds -> Just e) where
IndexOutOfBounds_ e = review _IndexOutOfBounds e
#endif
-- | An attempt was made to evaluate an element of an array that had not been initialized.
--
-- @
-- '_UndefinedElement' ≡ '_ArrayException' '.' '_UndefinedElement'
-- @
--
-- @
-- '_UndefinedElement' :: 'Prism'' 'ArrayException' 'String'
-- '_UndefinedElement' :: 'Prism'' 'SomeException' 'String'
-- @
_UndefinedElement :: AsArrayException t => Prism' t String
_UndefinedElement = _ArrayException . dimap seta (either id id) . right' . rmap (fmap UndefinedElement) where
seta (UndefinedElement r) = Right r
seta t = Left (pure t)
{-# INLINE _UndefinedElement #-}
#if __GLASGOW_HASKELL__ >= 710
pattern UndefinedElement_ e <- (preview _UndefinedElement -> Just e) where
UndefinedElement_ e = review _UndefinedElement e
#endif
----------------------------------------------------------------------------
-- AssertionFailed
----------------------------------------------------------------------------
-- | 'assert' was applied to 'Prelude.False'.
class AsAssertionFailed t where
-- |
-- @
-- '__AssertionFailed' :: 'Prism'' 'AssertionFailed' 'AssertionFailed'
-- '__AssertionFailed' :: 'Prism'' 'SomeException' 'AssertionFailed'
-- @
__AssertionFailed :: Prism' t AssertionFailed
-- | This 'Exception' contains provides information about what assertion failed in the 'String'.
--
-- >>> handling _AssertionFailed (\ xs -> "caught" <$ guard ("<interactive>" `isInfixOf` xs) ) $ assert False (return "uncaught")
-- "caught"
--
-- @
-- '_AssertionFailed' :: 'Prism'' 'AssertionFailed' 'String'
-- '_AssertionFailed' :: 'Prism'' 'SomeException' 'String'
-- @
_AssertionFailed :: Prism' t String
_AssertionFailed = __AssertionFailed._AssertionFailed
{-# INLINE _AssertionFailed #-}
instance AsAssertionFailed AssertionFailed where
__AssertionFailed = id
{-# INLINE __AssertionFailed #-}
_AssertionFailed = _Wrapping AssertionFailed
{-# INLINE _AssertionFailed #-}
instance AsAssertionFailed SomeException where
__AssertionFailed = exception
{-# INLINE __AssertionFailed #-}
#if __GLASGOW_HASKELL__ >= 710
pattern AssertionFailed__ e <- (preview __AssertionFailed -> Just e) where
AssertionFailed__ e = review __AssertionFailed e
pattern AssertionFailed_ e <- (preview _AssertionFailed -> Just e) where
AssertionFailed_ e = review _AssertionFailed e
#endif
----------------------------------------------------------------------------
-- AsyncException
----------------------------------------------------------------------------
-- | Asynchronous exceptions.
class AsAsyncException t where
-- | There are several types of 'AsyncException'.
--
-- @
-- '_AsyncException' :: 'Equality'' 'AsyncException' 'AsyncException'
-- '_AsyncException' :: 'Prism'' 'SomeException' 'AsyncException'
-- @
_AsyncException :: Prism' t AsyncException
instance AsAsyncException AsyncException where
_AsyncException = id
{-# INLINE _AsyncException #-}
instance AsAsyncException SomeException where
_AsyncException = exception
{-# INLINE _AsyncException #-}
#if __GLASGOW_HASKELL__ >= 710
pattern AsyncException_ e <- (preview _AsyncException -> Just e) where
AsyncException_ e = review _AsyncException e
#endif
-- | The current thread's stack exceeded its limit. Since an 'Exception' has
-- been raised, the thread's stack will certainly be below its limit again,
-- but the programmer should take remedial action immediately.
--
-- @
-- '_StackOverflow' :: 'Prism'' 'AsyncException' ()
-- '_StackOverflow' :: 'Prism'' 'SomeException' ()
-- @
_StackOverflow :: AsAsyncException t => Prism' t ()
_StackOverflow = _AsyncException . dimap seta (either id id) . right' . rmap (StackOverflow <$) where
seta StackOverflow = Right ()
seta t = Left (pure t)
{-# INLINE _StackOverflow #-}
#if __GLASGOW_HASKELL__ >= 710
pattern StackOverflow_ <- (has _StackOverflow -> True) where
StackOverflow_ = review _StackOverflow ()
#endif
-- | The program's heap is reaching its limit, and the program should take action
-- to reduce the amount of live data it has.
--
-- Notes:
--
-- * It is undefined which thread receives this 'Exception'.
--
-- * GHC currently does not throw 'HeapOverflow' exceptions.
--
-- @
-- '_HeapOverflow' :: 'Prism'' 'AsyncException' ()
-- '_HeapOverflow' :: 'Prism'' 'SomeException' ()
-- @
_HeapOverflow :: AsAsyncException t => Prism' t ()
_HeapOverflow = _AsyncException . dimap seta (either id id) . right' . rmap (HeapOverflow <$) where
seta HeapOverflow = Right ()
seta t = Left (pure t)
{-# INLINE _HeapOverflow #-}
#if __GLASGOW_HASKELL__ >= 710
pattern HeapOverflow_ <- (has _HeapOverflow -> True) where
HeapOverflow_ = review _HeapOverflow ()
#endif
-- | This 'Exception' is raised by another thread calling
-- 'Control.Concurrent.killThread', or by the system if it needs to terminate
-- the thread for some reason.
--
-- @
-- '_ThreadKilled' :: 'Prism'' 'AsyncException' ()
-- '_ThreadKilled' :: 'Prism'' 'SomeException' ()
-- @
_ThreadKilled :: AsAsyncException t => Prism' t ()
_ThreadKilled = _AsyncException . dimap seta (either id id) . right' . rmap (ThreadKilled <$) where
seta ThreadKilled = Right ()
seta t = Left (pure t)
{-# INLINE _ThreadKilled #-}
#if __GLASGOW_HASKELL__ >= 710
pattern ThreadKilled_ <- (has _ThreadKilled -> True) where
ThreadKilled_ = review _ThreadKilled ()
#endif
-- | This 'Exception' is raised by default in the main thread of the program when
-- the user requests to terminate the program via the usual mechanism(s)
-- (/e.g./ Control-C in the console).
--
-- @
-- '_UserInterrupt' :: 'Prism'' 'AsyncException' ()
-- '_UserInterrupt' :: 'Prism'' 'SomeException' ()
-- @
_UserInterrupt :: AsAsyncException t => Prism' t ()
_UserInterrupt = _AsyncException . dimap seta (either id id) . right' . rmap (UserInterrupt <$) where
seta UserInterrupt = Right ()
seta t = Left (pure t)
{-# INLINE _UserInterrupt #-}
#if __GLASGOW_HASKELL__ >= 710
pattern UserInterrupt_ <- (has _UserInterrupt -> True) where
UserInterrupt_ = review _UserInterrupt ()
#endif
----------------------------------------------------------------------------
-- AsyncException
----------------------------------------------------------------------------
-- | Thrown when the runtime system detects that the computation is guaranteed
-- not to terminate. Note that there is no guarantee that the runtime system
-- will notice whether any given computation is guaranteed to terminate or not.
class AsNonTermination t where
-- |
-- @
-- '__NonTermination' :: 'Prism'' 'NonTermination' 'NonTermination'
-- '__NonTermination' :: 'Prism'' 'SomeException' 'NonTermination'
-- @
__NonTermination :: Prism' t NonTermination
-- | There is no additional information carried in a 'NonTermination' 'Exception'.
--
-- @
-- '_NonTermination' :: 'Prism'' 'NonTermination' ()
-- '_NonTermination' :: 'Prism'' 'SomeException' ()
-- @
_NonTermination :: Prism' t ()
_NonTermination = __NonTermination._NonTermination
{-# INLINE _NonTermination #-}
instance AsNonTermination NonTermination where
__NonTermination = id
{-# INLINE __NonTermination #-}
_NonTermination = trivial NonTermination
{-# INLINE _NonTermination #-}
instance AsNonTermination SomeException where
__NonTermination = exception
{-# INLINE __NonTermination #-}
#if __GLASGOW_HASKELL__ >= 710
pattern NonTermination__ e <- (preview __NonTermination -> Just e) where
NonTermination__ e = review __NonTermination e
pattern NonTermination_ <- (has _NonTermination -> True) where
NonTermination_ = review _NonTermination ()
#endif
----------------------------------------------------------------------------
-- NestedAtomically
----------------------------------------------------------------------------
-- | Thrown when the program attempts to call atomically, from the
-- 'Control.Monad.STM' package, inside another call to atomically.
class AsNestedAtomically t where
-- |
-- @
-- '__NestedAtomically' :: 'Prism'' 'NestedAtomically' 'NestedAtomically'
-- '__NestedAtomically' :: 'Prism'' 'SomeException' 'NestedAtomically'
-- @
__NestedAtomically :: Prism' t NestedAtomically
-- | There is no additional information carried in a 'NestedAtomically' 'Exception'.
--
-- @
-- '_NestedAtomically' :: 'Prism'' 'NestedAtomically' ()
-- '_NestedAtomically' :: 'Prism'' 'SomeException' ()
-- @
_NestedAtomically :: Prism' t ()
_NestedAtomically = __NestedAtomically._NestedAtomically
{-# INLINE _NestedAtomically #-}
instance AsNestedAtomically NestedAtomically where
__NestedAtomically = id
{-# INLINE __NestedAtomically #-}
_NestedAtomically = trivial NestedAtomically
{-# INLINE _NestedAtomically #-}
instance AsNestedAtomically SomeException where
__NestedAtomically = exception
{-# INLINE __NestedAtomically #-}
#if __GLASGOW_HASKELL__ >= 710
pattern NestedAtomically__ e <- (preview __NestedAtomically -> Just e) where
NestedAtomically__ e = review __NestedAtomically e
pattern NestedAtomically_ <- (has _NestedAtomically -> True) where
NestedAtomically_ = review _NestedAtomically ()
#endif
----------------------------------------------------------------------------
-- BlockedIndefinitelyOnMVar
----------------------------------------------------------------------------
-- | The thread is blocked on an 'Control.Concurrent.MVar.MVar', but there
-- are no other references to the 'Control.Concurrent.MVar.MVar' so it can't
-- ever continue.
class AsBlockedIndefinitelyOnMVar t where
-- |
-- @
-- '__BlockedIndefinitelyOnMVar' :: 'Prism'' 'BlockedIndefinitelyOnMVar' 'BlockedIndefinitelyOnMVar'
-- '__BlockedIndefinitelyOnMVar' :: 'Prism'' 'SomeException' 'BlockedIndefinitelyOnMVar'
-- @
__BlockedIndefinitelyOnMVar :: Prism' t BlockedIndefinitelyOnMVar
-- | There is no additional information carried in a 'BlockedIndefinitelyOnMVar' 'Exception'.
--
-- @
-- '_BlockedIndefinitelyOnMVar' :: 'Prism'' 'BlockedIndefinitelyOnMVar' ()
-- '_BlockedIndefinitelyOnMVar' :: 'Prism'' 'SomeException' ()
-- @
_BlockedIndefinitelyOnMVar :: Prism' t ()
_BlockedIndefinitelyOnMVar = __BlockedIndefinitelyOnMVar._BlockedIndefinitelyOnMVar
{-# INLINE _BlockedIndefinitelyOnMVar #-}
instance AsBlockedIndefinitelyOnMVar BlockedIndefinitelyOnMVar where
__BlockedIndefinitelyOnMVar = id
{-# INLINE __BlockedIndefinitelyOnMVar #-}
_BlockedIndefinitelyOnMVar = trivial BlockedIndefinitelyOnMVar
{-# INLINE _BlockedIndefinitelyOnMVar #-}
instance AsBlockedIndefinitelyOnMVar SomeException where
__BlockedIndefinitelyOnMVar = exception
{-# INLINE __BlockedIndefinitelyOnMVar #-}
#if __GLASGOW_HASKELL__ >= 710
pattern BlockedIndefinitelyOnMVar__ e <- (preview __BlockedIndefinitelyOnMVar -> Just e) where
BlockedIndefinitelyOnMVar__ e = review __BlockedIndefinitelyOnMVar e
pattern BlockedIndefinitelyOnMVar_ <- (has _BlockedIndefinitelyOnMVar -> True) where
BlockedIndefinitelyOnMVar_ = review _BlockedIndefinitelyOnMVar ()
#endif
----------------------------------------------------------------------------
-- BlockedIndefinitelyOnSTM
----------------------------------------------------------------------------
-- | The thread is waiting to retry an 'Control.Monad.STM.STM' transaction,
-- but there are no other references to any TVars involved, so it can't ever
-- continue.
class AsBlockedIndefinitelyOnSTM t where
-- |
-- @
-- '__BlockedIndefinitelyOnSTM' :: 'Prism'' 'BlockedIndefinitelyOnSTM' 'BlockedIndefinitelyOnSTM'
-- '__BlockedIndefinitelyOnSTM' :: 'Prism'' 'SomeException' 'BlockedIndefinitelyOnSTM'
-- @
__BlockedIndefinitelyOnSTM :: Prism' t BlockedIndefinitelyOnSTM
-- | There is no additional information carried in a 'BlockedIndefinitelyOnSTM' 'Exception'.
--
-- @
-- '_BlockedIndefinitelyOnSTM' :: 'Prism'' 'BlockedIndefinitelyOnSTM' ()
-- '_BlockedIndefinitelyOnSTM' :: 'Prism'' 'SomeException' ()
-- @
_BlockedIndefinitelyOnSTM :: Prism' t ()
_BlockedIndefinitelyOnSTM = __BlockedIndefinitelyOnSTM._BlockedIndefinitelyOnSTM
{-# INLINE _BlockedIndefinitelyOnSTM #-}
instance AsBlockedIndefinitelyOnSTM BlockedIndefinitelyOnSTM where
__BlockedIndefinitelyOnSTM = id
{-# INLINE __BlockedIndefinitelyOnSTM #-}
_BlockedIndefinitelyOnSTM = trivial BlockedIndefinitelyOnSTM
{-# INLINE _BlockedIndefinitelyOnSTM #-}
instance AsBlockedIndefinitelyOnSTM SomeException where
__BlockedIndefinitelyOnSTM = exception
{-# INLINE __BlockedIndefinitelyOnSTM #-}
#if __GLASGOW_HASKELL__ >= 710
pattern BlockedIndefinitelyOnSTM__ e <- (preview __BlockedIndefinitelyOnSTM -> Just e) where
BlockedIndefinitelyOnSTM__ e = review __BlockedIndefinitelyOnSTM e
pattern BlockedIndefinitelyOnSTM_ <- (has _BlockedIndefinitelyOnSTM -> True) where
BlockedIndefinitelyOnSTM_ = review _BlockedIndefinitelyOnSTM ()
#endif
----------------------------------------------------------------------------
-- Deadlock
----------------------------------------------------------------------------
-- | There are no runnable threads, so the program is deadlocked. The
-- 'Deadlock' 'Exception' is raised in the main thread only.
class AsDeadlock t where
-- |
-- @
-- '__Deadlock' :: 'Prism'' 'Deadlock' 'Deadlock'
-- '__Deadlock' :: 'Prism'' 'SomeException' 'Deadlock'
-- @
__Deadlock :: Prism' t Deadlock
-- | There is no information carried in a 'Deadlock' 'Exception'.
--
-- @
-- '_Deadlock' :: 'Prism'' 'Deadlock' ()
-- '_Deadlock' :: 'Prism'' 'SomeException' ()
-- @
_Deadlock :: Prism' t ()
_Deadlock = __Deadlock._Deadlock
{-# INLINE _Deadlock #-}
instance AsDeadlock Deadlock where
__Deadlock = id
{-# INLINE __Deadlock #-}
_Deadlock = trivial Deadlock
{-# INLINE _Deadlock #-}
instance AsDeadlock SomeException where
__Deadlock = exception
{-# INLINE __Deadlock #-}
#if __GLASGOW_HASKELL__ >= 710
pattern Deadlock__ e <- (preview __Deadlock -> Just e) where
Deadlock__ e = review __Deadlock e
pattern Deadlock_ <- (has _Deadlock -> True) where
Deadlock_ = review _Deadlock ()
#endif
----------------------------------------------------------------------------
-- NoMethodError
----------------------------------------------------------------------------
-- | A class method without a definition (neither a default definition,
-- nor a definition in the appropriate instance) was called.
class AsNoMethodError t where
-- |
-- @
-- '__NoMethodError' :: 'Prism'' 'NoMethodError' 'NoMethodError'
-- '__NoMethodError' :: 'Prism'' 'SomeException' 'NoMethodError'
-- @
__NoMethodError :: Prism' t NoMethodError
-- | Extract a description of the missing method.
--
-- @
-- '_NoMethodError' :: 'Prism'' 'NoMethodError' 'String'
-- '_NoMethodError' :: 'Prism'' 'SomeException' 'String'
-- @
_NoMethodError :: Prism' t String
_NoMethodError = __NoMethodError._NoMethodError
{-# INLINE _NoMethodError #-}
instance AsNoMethodError NoMethodError where
__NoMethodError = id
{-# INLINE __NoMethodError #-}
_NoMethodError = _Wrapping NoMethodError
{-# INLINE _NoMethodError #-}
instance AsNoMethodError SomeException where
__NoMethodError = exception
{-# INLINE __NoMethodError #-}
#if __GLASGOW_HASKELL__ >= 710
pattern NoMethodError__ e <- (preview __NoMethodError -> Just e) where
NoMethodError__ e = review __NoMethodError e
pattern NoMethodError_ e <- (preview _NoMethodError -> Just e) where
NoMethodError_ e = review _NoMethodError e
#endif
----------------------------------------------------------------------------
-- PatternMatchFail
----------------------------------------------------------------------------
-- | A pattern match failed.
class AsPatternMatchFail t where
-- |
-- @
-- '__PatternMatchFail' :: 'Prism'' 'PatternMatchFail' 'PatternMatchFail'
-- '__PatternMatchFail' :: 'Prism'' 'SomeException' 'PatternMatchFail'
-- @
__PatternMatchFail :: Prism' t PatternMatchFail
-- | Information about the source location of the pattern.
--
-- @
-- '_PatternMatchFail' :: 'Prism'' 'PatternMatchFail' 'String'
-- '_PatternMatchFail' :: 'Prism'' 'SomeException' 'String'
-- @
_PatternMatchFail :: Prism' t String
_PatternMatchFail = __PatternMatchFail._PatternMatchFail
{-# INLINE _PatternMatchFail #-}
instance AsPatternMatchFail PatternMatchFail where
__PatternMatchFail = id
{-# INLINE __PatternMatchFail #-}
_PatternMatchFail = _Wrapping PatternMatchFail
{-# INLINE _PatternMatchFail #-}
instance AsPatternMatchFail SomeException where
__PatternMatchFail = exception
{-# INLINE __PatternMatchFail #-}
#if __GLASGOW_HASKELL__ >= 710
pattern PatternMatchFail__ e <- (preview __PatternMatchFail -> Just e) where
PatternMatchFail__ e = review __PatternMatchFail e
pattern PatternMatchFail_ e <- (preview _PatternMatchFail -> Just e) where
PatternMatchFail_ e = review _PatternMatchFail e
#endif
----------------------------------------------------------------------------
-- RecConError
----------------------------------------------------------------------------
-- | An uninitialised record field was used.
class AsRecConError t where
-- |
-- @
-- '__RecConError' :: 'Prism'' 'RecConError' 'RecConError'
-- '__RecConError' :: 'Prism'' 'SomeException' 'RecConError'
-- @
__RecConError :: Prism' t RecConError
-- | Information about the source location where the record was
-- constructed.
--
-- @
-- '_RecConError' :: 'Prism'' 'RecConError' 'String'
-- '_RecConError' :: 'Prism'' 'SomeException' 'String'
-- @
_RecConError :: Prism' t String
_RecConError = __RecConError._RecConError
{-# INLINE _RecConError #-}
instance AsRecConError RecConError where
__RecConError = id
{-# INLINE __RecConError #-}
_RecConError = _Wrapping RecConError
{-# INLINE _RecConError #-}
instance AsRecConError SomeException where
__RecConError = exception
{-# INLINE __RecConError #-}
#if __GLASGOW_HASKELL__ >= 710
pattern RecConError__ e <- (preview __RecConError -> Just e) where
RecConError__ e = review __RecConError e
pattern RecConError_ e <- (preview _RecConError -> Just e) where
RecConError_ e = review _RecConError e
#endif
----------------------------------------------------------------------------
-- RecSelError
----------------------------------------------------------------------------
-- | A record selector was applied to a constructor without the appropriate
-- field. This can only happen with a datatype with multiple constructors,
-- where some fields are in one constructor but not another.
class AsRecSelError t where
-- |
-- @
-- '__RecSelError' :: 'Prism'' 'RecSelError' 'RecSelError'
-- '__RecSelError' :: 'Prism'' 'SomeException' 'RecSelError'
-- @
__RecSelError :: Prism' t RecSelError
-- | Information about the source location where the record selection occurred.
--
-- @
-- '_RecSelError' :: 'Prism'' 'RecSelError' 'String'
-- '_RecSelError' :: 'Prism'' 'SomeException' 'String'
-- @
_RecSelError :: Prism' t String
_RecSelError = __RecSelError._RecSelError
{-# INLINE _RecSelError #-}
instance AsRecSelError RecSelError where
__RecSelError = id
{-# INLINE __RecSelError #-}
_RecSelError = _Wrapping RecSelError
{-# INLINE _RecSelError #-}
instance AsRecSelError SomeException where
__RecSelError = exception
{-# INLINE __RecSelError #-}
#if __GLASGOW_HASKELL__ >= 710
pattern RecSelError__ e <- (preview __RecSelError -> Just e) where
RecSelError__ e = review __RecSelError e
pattern RecSelError_ e <- (preview _RecSelError -> Just e) where
RecSelError_ e = review _RecSelError e
#endif
----------------------------------------------------------------------------
-- RecUpdError
----------------------------------------------------------------------------
-- | A record update was performed on a constructor without the
-- appropriate field. This can only happen with a datatype with multiple
-- constructors, where some fields are in one constructor but not another.
class AsRecUpdError t where
-- |
-- @
-- '__RecUpdError' :: 'Prism'' 'RecUpdError' 'RecUpdError'
-- '__RecUpdError' :: 'Prism'' 'SomeException' 'RecUpdError'
-- @
__RecUpdError :: Prism' t RecUpdError
-- | Information about the source location where the record was updated.
--
-- @
-- '_RecUpdError' :: 'Prism'' 'RecUpdError' 'String'
-- '_RecUpdError' :: 'Prism'' 'SomeException' 'String'
-- @
_RecUpdError :: Prism' t String
_RecUpdError = __RecUpdError._RecUpdError
{-# INLINE _RecUpdError #-}
instance AsRecUpdError RecUpdError where
__RecUpdError = id
{-# INLINE __RecUpdError #-}
_RecUpdError = _Wrapping RecUpdError
{-# INLINE _RecUpdError #-}
instance AsRecUpdError SomeException where
__RecUpdError = exception
{-# INLINE __RecUpdError #-}
#if __GLASGOW_HASKELL__ >= 710
pattern RecUpdError__ e <- (preview __RecUpdError -> Just e) where
RecUpdError__ e = review __RecUpdError e
pattern RecUpdError_ e <- (preview _RecUpdError -> Just e) where
RecUpdError_ e = review _RecUpdError e
#endif
----------------------------------------------------------------------------
-- ErrorCall
----------------------------------------------------------------------------
-- | This is thrown when the user calls 'Prelude.error'.
class AsErrorCall t where
-- |
-- @
-- '__ErrorCall' :: 'Prism'' 'ErrorCall' 'ErrorCall'
-- '__ErrorCall' :: 'Prism'' 'SomeException' 'ErrorCall'
-- @
__ErrorCall :: Prism' t ErrorCall
-- | Retrieve the argument given to 'Prelude.error'.
--
-- 'ErrorCall' is isomorphic to a 'String'.
--
-- >>> catching _ErrorCall (error "touch down!") return
-- "touch down!"
--
-- @
-- '_ErrorCall' :: 'Prism'' 'ErrorCall' 'String'
-- '_ErrorCall' :: 'Prism'' 'SomeException' 'String'
-- @
_ErrorCall :: Prism' t String
_ErrorCall = __ErrorCall._ErrorCall
{-# INLINE _ErrorCall #-}
instance AsErrorCall ErrorCall where
__ErrorCall = id
{-# INLINE __ErrorCall #-}
_ErrorCall = _Wrapping ErrorCall
{-# INLINE _ErrorCall #-}
instance AsErrorCall SomeException where
__ErrorCall = exception
{-# INLINE __ErrorCall #-}
#if __GLASGOW_HASKELL__ >= 710
pattern ErrorCall__ e <- (preview __ErrorCall -> Just e) where
ErrorCall__ e = review __ErrorCall e
pattern ErrorCall_ e <- (preview _ErrorCall -> Just e) where
ErrorCall_ e = review _ErrorCall e
#endif
#if MIN_VERSION_base(4,8,0)
----------------------------------------------------------------------------
-- AllocationLimitExceeded
----------------------------------------------------------------------------
-- | This thread has exceeded its allocation limit.
class AsAllocationLimitExceeded t where
-- |
-- @
-- '__AllocationLimitExceeded' :: 'Prism'' 'AllocationLimitExceeded' 'AllocationLimitExceeded'
-- '__AllocationLimitExceeded' :: 'Prism'' 'SomeException' 'AllocationLimitExceeded'
-- @
__AllocationLimitExceeded :: Prism' t AllocationLimitExceeded
-- | There is no additional information carried in an
-- 'AllocationLimitExceeded' 'Exception'.
--
-- @
-- '_AllocationLimitExceeded' :: 'Prism'' 'AllocationLimitExceeded' ()
-- '_AllocationLimitExceeded' :: 'Prism'' 'SomeException' ()
-- @
_AllocationLimitExceeded :: Prism' t ()
_AllocationLimitExceeded = __AllocationLimitExceeded._AllocationLimitExceeded
{-# INLINE _AllocationLimitExceeded #-}
instance AsAllocationLimitExceeded AllocationLimitExceeded where
__AllocationLimitExceeded = id
{-# INLINE __AllocationLimitExceeded #-}
_AllocationLimitExceeded = trivial AllocationLimitExceeded
{-# INLINE _AllocationLimitExceeded #-}
instance AsAllocationLimitExceeded SomeException where
__AllocationLimitExceeded = exception
{-# INLINE __AllocationLimitExceeded #-}
pattern AllocationLimitExceeded__ e <- (preview __AllocationLimitExceeded -> Just e) where
AllocationLimitExceeded__ e = review __AllocationLimitExceeded e
pattern AllocationLimitExceeded_ <- (has _AllocationLimitExceeded -> True) where
AllocationLimitExceeded_ = review _AllocationLimitExceeded ()
#endif
#if MIN_VERSION_base(4,9,0)
----------------------------------------------------------------------------
-- TypeError
----------------------------------------------------------------------------
-- | An expression that didn't typecheck during compile time was called.
-- This is only possible with @-fdefer-type-errors@.
class AsTypeError t where
-- |
-- @
-- '__TypeError' :: 'Prism'' 'TypeError' 'TypeError'
-- '__TypeError' :: 'Prism'' 'SomeException' 'TypeError'
-- @
__TypeError :: Prism' t TypeError
-- | Details about the failed type check.
--
-- @
-- '_TypeError' :: 'Prism'' 'TypeError' 'String'
-- '_TypeError' :: 'Prism'' 'SomeException' 'String'
-- @
_TypeError :: Prism' t String
_TypeError = __TypeError._TypeError
{-# INLINE _TypeError #-}
instance AsTypeError TypeError where
__TypeError = id
{-# INLINE __TypeError #-}
_TypeError = _Wrapping TypeError
{-# INLINE _TypeError #-}
instance AsTypeError SomeException where
__TypeError = exception
{-# INLINE __TypeError #-}
pattern TypeError__ e <- (preview __TypeError -> Just e) where
TypeError__ e = review __TypeError e
pattern TypeError_ e <- (preview _TypeError -> Just e) where
TypeError_ e = review _TypeError e
#endif
#if MIN_VERSION_base(4,10,0)
----------------------------------------------------------------------------
-- CompactionFailed
----------------------------------------------------------------------------
-- | Compaction found an object that cannot be compacted.
-- Functions cannot be compacted, nor can mutable objects or pinned objects.
class AsCompactionFailed t where
-- |
-- @
-- '__CompactionFailed' :: 'Prism'' 'CompactionFailed' 'CompactionFailed'
-- '__CompactionFailed' :: 'Prism'' 'SomeException' 'CompactionFailed'
-- @
__CompactionFailed :: Prism' t CompactionFailed
-- | Information about why a compaction failed.
--
-- @
-- '_CompactionFailed' :: 'Prism'' 'CompactionFailed' 'String'
-- '_CompactionFailed' :: 'Prism'' 'SomeException' 'String'
-- @
_CompactionFailed :: Prism' t String
_CompactionFailed = __CompactionFailed._CompactionFailed
{-# INLINE _CompactionFailed #-}
instance AsCompactionFailed CompactionFailed where
__CompactionFailed = id
{-# INLINE __CompactionFailed #-}
_CompactionFailed = _Wrapping CompactionFailed
{-# INLINE _CompactionFailed #-}
instance AsCompactionFailed SomeException where
__CompactionFailed = exception
{-# INLINE __CompactionFailed #-}
pattern CompactionFailed__ e <- (preview __CompactionFailed -> Just e) where
CompactionFailed__ e = review __CompactionFailed e
pattern CompactionFailed_ e <- (preview _CompactionFailed -> Just e) where
CompactionFailed_ e = review _CompactionFailed e
#endif
------------------------------------------------------------------------------
-- HandlingException
------------------------------------------------------------------------------
-- | This 'Exception' is thrown by @lens@ when the user somehow manages to rethrow
-- an internal 'HandlingException'.
class AsHandlingException t where
-- |
-- @
-- '__HandlingException' :: 'Prism'' 'HandlingException' 'HandlingException'
-- '__HandlingException' :: 'Prism'' 'SomeException' 'HandlingException'
-- @
__HandlingException :: Prism' t HandlingException
-- | There is no information carried in a 'HandlingException'.
--
-- @
-- '_HandlingException' :: 'Prism'' 'HandlingException' ()
-- '_HandlingException' :: 'Prism'' 'SomeException' ()
-- @
_HandlingException :: Prism' t ()
_HandlingException = __HandlingException._HandlingException
{-# INLINE _HandlingException #-}
instance AsHandlingException HandlingException where
__HandlingException = id
{-# INLINE __HandlingException #-}
_HandlingException = trivial HandlingException
{-# INLINE _HandlingException #-}
instance AsHandlingException SomeException where
__HandlingException = exception
{-# INLINE __HandlingException #-}
#if __GLASGOW_HASKELL__ >= 710
pattern HandlingException__ e <- (preview __HandlingException -> Just e) where
HandlingException__ e = review __HandlingException e
pattern HandlingException_ <- (has _HandlingException -> True) where
HandlingException_ = review _HandlingException ()
#endif
------------------------------------------------------------------------------
-- Helper Functions
------------------------------------------------------------------------------
trivial :: t -> Iso' t ()
trivial t = const () `iso` const t
|
ddssff/lens
|
src/Control/Exception/Lens.hs
|
bsd-3-clause
| 52,743 | 0 | 11 | 8,532 | 6,313 | 3,656 | 2,657 | 352 | 2 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.Aeson.Forms.Internal.Types
(
-- * Types
Form (..)
, Result (..)
, Errors (..)
, Field
) where
import Control.Applicative
import Data.Aeson (Value (..), ToJSON (..), object, (.=))
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import Data.Monoid
import Data.Text (Text)
------------------------------------------------------------------------------
-- | 'Form' represents a computation that when given a JSON 'Value' will
-- yield either 'Success' with the parsed data value or 'Failed' with
-- validation errors.
data Form m a where
Form :: Monad m => (Maybe Value -> m (Result a)) -> Form m a
------------------------------------------------------------------------------
instance Functor (Form m) where
fmap f (Form action) = Form $ \json -> do
g <- action json
return $ fmap f g
------------------------------------------------------------------------------
instance Monad m => Applicative (Form m) where
pure x = Form $ \_ -> return $ Success x
Form f <*> Form g = Form $ \json -> do
f' <- f json
g' <- g json
return $ f' <*> g'
------------------------------------------------------------------------------
instance Monad m => Alternative (Form m) where
empty = Form $ \_ -> return empty
Form f <|> Form g = Form $ \json -> do
f' <- f json
g' <- g json
return $ f' <|> g'
------------------------------------------------------------------------------
-- | The result of running a 'Form'. If parsing is successful, Success is
-- returned along with the parsed value, else Failed is returned with a
-- HashMap of validation Errors.
data Result a =
Success a
| Failed Errors
deriving (Show, Eq)
------------------------------------------------------------------------------
instance Functor Result where
fmap f (Success a) = Success $ f a
fmap _ (Failed a) = Failed a
------------------------------------------------------------------------------
instance Applicative Result where
pure = Success
Success a <*> Success b = Success (a b)
Success _ <*> Failed b = Failed b
Failed a <*> Success _ = Failed a
Failed a <*> Failed b = Failed (a <> b)
------------------------------------------------------------------------------
instance Alternative Result where
empty = Failed . Errors $ HashMap.empty
Success a <|> _ = Success a
Failed _ <|> b = b
------------------------------------------------------------------------------
instance Monad Result where
return = pure
Failed x >>= _ = Failed x
Success x >>= f = f x
------------------------------------------------------------------------------
-- | Validation errors, keyed by 'Field' name.
newtype Errors = Errors (HashMap Field [Text]) deriving (Show, Eq)
------------------------------------------------------------------------------
instance Monoid Errors where
mempty = Errors HashMap.empty
Errors a `mappend` Errors b = Errors (a `combine` b)
where
combine = HashMap.unionWith (++)
------------------------------------------------------------------------------
instance ToJSON Errors where
toJSON (Errors errors) = object ["errors" .= toJSON errors]
------------------------------------------------------------------------------
-- | The name of a JSON field.
type Field = Text
|
lukerandall/aeson-forms
|
src/Data/Aeson/Forms/Internal/Types.hs
|
bsd-3-clause
| 3,551 | 0 | 11 | 743 | 852 | 441 | 411 | 61 | 0 |
-- Copyright (c) 2018 Leandro T. C. Melo ([email protected])
-- License: GPLv3
-- This implementation focus readability and formalism.
{-# LANGUAGE NamedFieldPuns #-}
import Control.Monad
import Control.Monad.Except
import Control.Monad.State
import Data.List
import Data.Map (Map)
import Data.Set (Set)
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Set as Set
import Debug.Trace
import System.Environment
import System.Exit
import System.IO
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import Text.ParserCombinators.Parsec.Language
import qualified Text.ParserCombinators.Parsec.Token as Token
import qualified Text.PrettyPrint.HughesPJ as PP
----------------
-- The driver --
----------------
main :: IO ()
main = do
putStrLn "compile muC program"
args <- getArgs
case args of
[file] -> do
src <- readFile file
src' <- compile src
putStrLn $ "\n\n" ++ src'
let (name, _) = break (== '.') file
writeFile ("new_" ++ name ++ ".c") src'
_ -> error "invalid argument"
compile :: String -> IO (String)
compile src =
case parseSource src of
Left err -> return err
Right p -> do
debug "AST" (show (fmt 0 p))
let m = buildLattice p (M $ Map.empty)
debug "lattice of shapes" (show $ ppM m)
k <- generateConstraints p m
debug "K" (show $ ppK k)
let
phi_i = Map.empty
psi_i = Map.empty
theta_i = (Map.fromList
[ (hat IntTy, IntTy),
(hat DoubleTy, DoubleTy),
(hat VoidTy, VoidTy) ])
cfg = Config phi_i psi_i theta_i k [] [] [] []
cfg'@(Config { phi, psi , theta }) <- solveConstraints k cfg
debug "final config" (showConfig cfg')
let ok = satisfyK (phi, psi, theta) k
debug "semantics" (if ok then "OK" else error "does NOT hold\n")
let ts = verifyTyping cfg' Map.empty p
debug "typing" ("OK " ++ show ts)
let preamble = rewriteInC (theta Map.\\ theta_i)
src' = preamble ++ src
return src'
---------------------------
-- The definition of muC --
---------------------------
data Stamp = Stamp Int deriving (Eq, Ord, Show)
newtype Ident = Ident { _x :: String } deriving (Eq, Ord, Show)
data Type = IntTy
| DoubleTy
| VoidTy
| PtrTy Type
| ConstTy Type
| ArrowTy Type [Type]
| RecTy [Decl] Ident
| NamedTy Ident
| TyVar Stamp
deriving (Eq, Ord, Show)
data BinOptr = Add
| Divide
| Multiply
| And
| Or
| Assign
deriving (Eq, Ord, Show)
data Lit = IntLit Int
| DoubleLit Double
deriving (Eq, Ord, Show)
data Expr = NumLit Lit
| Var Ident
| FldAcc Expr Ident
| Deref Expr
| AddrOf Expr
| BinExpr BinOptr Expr Expr
deriving (Eq, Ord, Show)
data Stmt = ExprStmt Expr
| DeclStmt Decl
| RetStmt Expr
deriving (Eq, Ord, Show)
data Decl = Decl { _ft :: Type, _fx :: Ident } deriving (Eq, Ord, Show)
data FunDef = FunDef Type Ident [Decl] [Stmt] deriving (Eq, Ord, Show)
data TypeDef = TypeDef Type Type deriving (Eq, Ord, Show)
data Prog = Prog [TypeDef] [FunDef] deriving (Eq, Ord, Show)
----------------------------
-- The constraints syntax --
----------------------------
data K = T
| B
| K :&: K
| Exists [Type] K
| Def Ident Type K
| Fun Ident Type K
| TypeOf Ident Type
| Syn Type Type
| Has Type Decl
| Type :=: Type
| Type :<=: Type
deriving (Eq, Ord, Show)
-------------------
-- Substitutions --
-------------------
data Subst = Stamp :-> Type
| Trivial
deriving (Eq, Ord, Show)
class Substitutable a where
-- | Apply a single substitution.
apply :: Subst -> a -> a
-- | Apply multiple substitutions at once.
applyMany :: [Subst] -> a -> a
applyMany sl a = foldr (\s acc -> apply s acc) a sl
-- | Obtain the free type variables.
ftv :: a -> [Stamp]
instance Substitutable a => Substitutable [a] where
apply s = map (apply s)
ftv = foldr (union . ftv) []
instance Substitutable Type where
apply Trivial t = t
apply s t@(IntTy) = t
apply s t@(DoubleTy) = t
apply s t@(VoidTy) = t
apply s (PtrTy t) = PtrTy (apply s t)
apply s (ConstTy t) = ConstTy (apply s t)
apply s (ArrowTy rt pt) = ArrowTy (apply s rt) (apply s pt)
apply s (RecTy fs x) = RecTy (apply s fs) x
apply s t@(NamedTy _) = t
apply (st :-> t) t'@(TyVar st') = if st == st' then t else t'
ftv IntTy = []
ftv DoubleTy = []
ftv VoidTy = []
ftv (PtrTy t) = ftv t
ftv (ConstTy t) = ftv t
ftv (ArrowTy rt pt) = ftv rt `union` ftv pt
ftv (RecTy fs _) = ftv fs
ftv (NamedTy _) = []
ftv (TyVar st) = [st]
instance Substitutable K where
apply Trivial k = k
apply _ T = T
apply _ B = B
apply s (k1 :&: k2) = (apply s k1) :&: (apply s k2)
apply s (Exists t k) = Exists (apply s t) (apply s k)
apply s (Def x t@(TyVar _) k) = Def x (apply s t) (apply s k)
apply s (Fun f t@(ArrowTy rt pl) k) = Fun f (apply s t) (apply s k)
apply s (TypeOf x t) = TypeOf x (apply s t)
apply s (Syn t1 t2) = Syn (apply s t1) (apply s t2)
apply s (Has t fld) = Has (apply s t) (apply s fld)
apply s (t1 :=: t2) = (apply s t1) :=: (apply s t2)
apply s (t1 :<=: t2) = (apply s t1) :<=: (apply s t2)
ftv _ = []
instance Substitutable Decl where
apply s (Decl t x) = Decl (apply s t) x
ftv (Decl t _) = ftv t
instance Substitutable v => Substitutable (Map k v) where
apply s = Map.map (apply s)
ftv = Map.foldr (union . ftv) []
-- This function exists for presentation purposes.
foreachValue :: Substitutable a => [Subst] -> Map k a -> Map k a
foreachValue s = Map.map (applyMany s)
-------------------------
-- Type identification --
-------------------------
newtype TypeId = TypeId { _id :: String } deriving (Eq, Ord, Show)
-- | Compute the typeid of a type.
hat :: Type -> TypeId
hat IntTy = TypeId "int"
hat DoubleTy = TypeId "double"
hat VoidTy = TypeId "void"
hat (PtrTy t) = TypeId $ (_id (hat t) ++ "*")
hat (ConstTy t) = TypeId $ "const[" ++ (_id (hat t)) ++ "]"
hat (ArrowTy rt pt) = TypeId $
"[" ++ (_id (hat rt)) ++ "(*)(" ++
(foldr (\t acc -> (_id (hat t)) ++ acc) ")" pt) ++ "]"
hat (RecTy _ x) = TypeId (_x x)
hat (NamedTy x) = TypeId (_x x)
hat (TyVar (Stamp n)) = TypeId $ "α" ++ (show n)
------------------------------
-- The mappings ψ, Φ, and θ --
------------------------------
type Phi = Map Stamp Type
type Psi = Map Ident Type
type Theta = Map TypeId Type
-- | Find, in Psi, the type mapped to an identifier.
findInPsi :: Ident -> Psi -> Type
findInPsi x psi =
case Map.lookup x psi of
Just t -> t
_ -> error $ "no τ for identifier " ++ show (ppK x) ++ " in ψ\n"
-- | Find, in Phi, the type mapped to a stamp.
findInPhi :: Stamp -> Phi -> Type
findInPhi st phi =
case Map.lookup st phi of
Just t -> t
_ -> error $
"no τ for stamp " ++ show (ppK st) ++ " in Φ\n"
-- | Find, in Theta, the type mapped to a typeid.
findInTheta :: TypeId -> Theta -> Type
findInTheta hat theta =
case Map.lookup hat theta of
Just t -> t
_ -> error $
"no τ for typeid " ++ show (ppK hat) ++ " in θ\n"
-- | Add, to Psi, an identifier to type mapping.
addToPsi :: Ident -> Type -> Psi -> Psi
addToPsi x t psi =
case Map.lookup x psi of
Just t -> error $
show (ppK t) ++ ", indexed by " ++
show (ppK x) ++ ", is already in ψ\n"
_ -> Map.insert x t psi
-- | Add, to Phi, a stamp to type mapping.
addToPhi :: Stamp -> Type -> Phi -> Phi
addToPhi st t phi =
case Map.lookup st phi of
Just t -> error $
show (ppK t) ++ ", indexed by " ++
show (ppK st) ++ ", is already in Φ\n"
_ -> Map.insert st t phi
-- | Add, to Theta, a type id to type mapping.
addToTheta :: TypeId -> Type -> Theta -> Theta
addToTheta tid t theta =
case Map.lookup tid theta of
Just t -> error $
show (ppK t) ++ ", indexed by " ++
show (ppK tid) ++ ", is already in θ\n"
_ -> Map.insert tid t theta
----------------------------------
-- The semantics of constraints --
----------------------------------
trace_Sema = False
satisfyK :: (Phi, Psi, Theta) -> K -> Bool
-- | KT
satisfyK (_, _, _) T = True
-- | KAnd
satisfyK d@(phi, psi, theta) k@(k1 :&: k2) =
let check1 = satisfyK (phi, psi, theta) k1
check2 = satisfyK (phi, psi, theta) k2
in if trace_Sema
then trace ("\nsatisfy " ++ show (ppK k) ++ "\n" ++ show (formatPhiPsiTheta d) ++
"k1 " ++ show (ppK k1) ++ "\nk2 " ++ show (ppK k2)) (check1 && check2)
else check1 && check2
-- | KEx
satisfyK (phi, psi, theta) kk@(Exists tl k) =
let go st t = isSubTy phi (findInPhi st phi) t
in foldr (\(TyVar st) acc -> (Map.foldr (\t acc_ -> go st t || acc_) False phi) && acc) True tl
&& satisfyK (phi, psi, theta) k
-- | KDef
satisfyK (phi, psi, theta) (Def x (TyVar st) k) =
findInPsi x psi == findInPhi st phi
&& satisfyK (phi, psi, theta) k
-- | KFun
satisfyK (phi, psi, theta) (Fun f (ArrowTy rt@(TyVar st) p) k) =
findInPsi f psi == ArrowTy (findInPhi st phi) p
&& satisfyK (phi, psi, theta) k
-- | KInst
satisfyK (phi, psi, theta) k@(TypeOf x t@(TyVar _)) =
satisfyK (phi, psi, theta) ((findInPsi x psi) :=: t)
-- | KSyn
satisfyK (phi, psi, theta) (Syn t a@(TyVar _)) =
let t' = findInTheta (hat t) theta
in satisfyK (phi, psi, theta) (t' :=: a)
-- | KHas
satisfyK (phi, psi, theta) (Has (TyVar st) (Decl t x)) =
let t' = case findInPhi st phi of
(TyVar _) -> t
gt -> field x (findInTheta (hat gt) theta)
in satisfyK (phi, psi, theta) (t' :=: t)
-- | KEq
satisfyK (phi, _, _) k@(t1 :=: t2) = isSubTy phi t1 t2 && isSubTy phi t2 t1
-- | KIq
satisfyK (phi, _, _) k@(t1 :<=: t2) = isSubTy phi t1 t2
satisfyC_ :: (Config, Config) -> K -> Config
satisfyC_ (cfg, cfg') k =
if satisfyK ((phi cfg), (psi cfg), (theta cfg)) k
then cfg'
else error $ "entailment failed"
satisfyC :: Config -> Config
satisfyC cfg =
if satisfyK ((phi cfg), (psi cfg), (theta cfg)) (groupK cfg)
then cfg
else error $ "entailment failed"
-- | Return the type of the field in a record.
field :: Ident -> Type -> Type
field x (RecTy ds _) =
let ds' = filter (\(Decl _ x') -> x == x') ds
in case length ds' of
1 -> _ft (ds' !! 0)
_ -> error $ "record has no field " ++ show (ppK x) ++ "\n"
field _ t = error $ "type " ++ show (ppK t) ++ " is not a record\n"
-- | Return whether the type is a ground type.
isGround :: Type -> Bool
isGround t =
if ftv t == []
then True
else False
------------------------
-- The type predicate --
------------------------
trace_Pred = False
isSubTy :: Phi -> Type -> Type -> Bool
isSubTy phi t1@(TyVar st1) t2 =
if (isIdentity phi t1)
then True
else (isGround (findInPhi st1 phi))
&& isSubTy phi (findInPhi st1 phi) t2
isSubTy phi t1 t2@(TyVar st2) =
if (isIdentity phi t2)
then True
else (isGround (findInPhi st2 phi))
&& isSubTy phi t1 (findInPhi st2 phi)
isSubTy phi (ConstTy t1) (ConstTy t2) =
isSubTy phi t1 t2
isSubTy phi (ConstTy t1) t2 =
isSubTy phi t1 t2
isSubTy phi t@(PtrTy t1) t'@(PtrTy t2) =
isSubTyPtr phi t1 t2
isSubTy _ IntTy IntTy = True
isSubTy _ IntTy DoubleTy = True
isSubTy _ DoubleTy DoubleTy = True
isSubTy _ VoidTy VoidTy = True
isSubTy _ (NamedTy x1) (NamedTy x2) = x1 == x2
isSubTy _ t1@(RecTy _ _) t2@(RecTy _ _) = t1 == t2
isSubTy phi t1 t2 =
if trace_Pred
then trace ("unknown (value) subtyping " ++ show (ppK t1) ++ "<:" ++ show (ppK t2)) False
else False
isSubTyPtr :: Phi -> Type -> Type -> Bool
isSubTyPtr phi t1@(TyVar st1) t2 =
if (isIdentity phi t1)
then True
else (isGround (findInPhi st1 phi))
&& isSubTyPtr phi (findInPhi st1 phi) t2
isSubTyPtr phi t1 t2@(TyVar st2) =
if (isIdentity phi t2)
then True
else (isGround (findInPhi st2 phi))
&& isSubTyPtr phi t1 (findInPhi st2 phi)
isSubTyPtr phi (ConstTy t1) (ConstTy t2) =
isSubTyPtr phi t1 t2
isSubTyPtr phi t1 (ConstTy t2) =
isSubTyPtr phi t1 t2
isSubTyPtr _ _ VoidTy = True
isSubTyPtr _ IntTy IntTy = True
isSubTyPtr _ DoubleTy DoubleTy = True
isSubTyPtr _ (NamedTy x1) (NamedTy x2) = x1 == x2
isSubTyPtr _ t1@(RecTy _ _) t2@(RecTy _ _) = t1 == t2
isSubTyPtr phi t1 t2 =
if trace_Pred
then trace ("unknown (pointer) subtyping " ++ show (ppK t1) ++ "<:" ++ show (ppK t2)) False
else False
-- | Subtyping predicate for ground types.
isSubTy' :: Type -> Type -> Bool
isSubTy' (TyVar _) _ = error $ "expected ground type "
isSubTy' _ (TyVar _) = error $ "expected ground type "
isSubTy' (ConstTy t1) (ConstTy t2) =
isSubTy' t1 t2
isSubTy' (ConstTy t1) t2 =
isSubTy' t1 t2
isSubTy' (PtrTy t1) (PtrTy t2) =
isSubTyPtr' t1 t2
isSubTy' IntTy IntTy = True
isSubTy' DoubleTy DoubleTy = True
isSubTy' VoidTy VoidTy = True
isSubTy' IntTy DoubleTy = True
isSubTy' (NamedTy x1) (NamedTy x2) = x1 == x2
isSubTy' t1@(RecTy _ _) t2@(RecTy _ _) = t1 == t2
isSubTy' t1 t2 =
if trace_Pred
then trace ("unknown (value/ground) subtyping " ++ show (ppK t1) ++ "<:" ++ show (ppK t2)) False
else False
isSubTyPtr' :: Type -> Type -> Bool
isSubTyPtr' (TyVar _) _ = error $ "expected ground type "
isSubTyPtr' _ (TyVar _) = error $ "expected ground type "
isSubTyPtr' (ConstTy t1) (ConstTy t2) =
isSubTyPtr' t1 t2
isSubTyPtr' t1 (ConstTy t2) =
isSubTyPtr' t1 t2
isSubTyPtr' (PtrTy t1) (PtrTy t2) =
isSubTyPtr' t2 t2
isSubTyPtr' _ VoidTy = True
isSubTyPtr' IntTy IntTy = True
isSubTyPtr' DoubleTy DoubleTy = True
isSubTyPtr' (NamedTy x1) (NamedTy x2) = x1 == x2
isSubTyPtr' t1@(RecTy _ _) t2@(RecTy _ _) = t1 == t2
isSubTyPtr' t1 t2 =
if trace_Pred
then trace ("unknown (pointer/ground) subtyping " ++ show (ppK t1) ++ "<:" ++ show (ppK t2)) False
else False
-- | Whether we have an identity relation.
isIdentity phi t@(TyVar st) = t == findInPhi st phi
---------------------------
-- Constraint generation --
---------------------------
generateConstraints :: Prog -> M -> IO K
generateConstraints p m = do
(c, _) <- runStateT (genProg p m) 0
return c
-- | The generator is typed as a monad to allow isolation of
-- the fresh variable supply.
type GenMonad a = StateT Int IO a
fresh :: GenMonad Type
fresh = do
n <- get
put (n + 1)
return $ TyVar (Stamp n)
-- | Constraint generation for a program.
genProg :: Prog -> M -> GenMonad K
genProg (Prog _ []) _ = return T
genProg (Prog [] ((FunDef rt f d s):fs)) m = do
a <- fresh
syn <- buildSyn rt a
let pt = foldl (\acc (Decl t _) -> t:acc) [] d
k <- genFun d s a m
k' <- genProg (Prog [] fs) m
return $
Exists [a] $
syn :&:
Fun f (ArrowTy a pt) k :&:
k'
genProg (Prog ((TypeDef t1 t2):tds) fs) m = do
a <- fresh
k <- genProg (Prog tds fs) m
return $
Exists [a] $
(Syn t2 a) :&:
(a :=: t1) :&:
k
-- | Constraint generation for functions.
genFun :: [Decl] -> [Stmt] -> Type -> M -> GenMonad K
genFun [] s rt m = genStmt s rt m
genFun ((Decl t x):dx) s rt m = do
a <- fresh
syn <- buildSyn t a
k <- genFun dx s rt m
return $
Exists [a] $
syn :&:
Def x a k
-- | Constraint generation for statements.
genStmt :: [Stmt] -> Type -> M -> GenMonad K
genStmt ((DeclStmt (Decl t x)):sl) rt m = do
a <- fresh
syn <- buildSyn t a
k <- genStmt sl rt m
return $
Exists [a] $
syn :&:
Def x a k
genStmt ((ExprStmt e):sl) rt m = do
a <- fresh
k1 <- genExpr e a m
k2 <- genStmt sl rt m
return $
Exists [a] k1 :&:
k2
genStmt ((RetStmt e):[]) rt m = do
a <- fresh
k <- genExpr e a m
return $
Exists [a] $
keepOrDrop (shapeOf FunRole m) rt (shapeOf (ValRole e) m) a Assign :&:
k
-- | Constraint generation for expressions.
genExpr :: Expr -> Type -> M -> GenMonad K
genExpr (NumLit l) t _ = return (rho l :=: t)
genExpr (Var x) t _ = return (TypeOf x t)
genExpr (FldAcc e x) t m = do
a1 <- fresh
a2 <- fresh
a3 <- fresh
k <- genExpr e a1 m
return $
Exists [a1, a2, a3] $
(Has a2 (Decl a3 x)) :&:
(a1 :=: (PtrTy a2)) :&:
(a3 :=: t) :&:
k
genExpr (Deref e) t m = do
a <- fresh
k <- genExpr e a m
return $
Exists [a] $
(a :=: PtrTy t) :&:
k
genExpr (AddrOf e) t m = do
a1 <- fresh
a2 <- fresh
k <- genExpr e a2 m
return $
Exists [a1, a2] $
(a1 :=: PtrTy a2) :&:
(a1 :=: t) :&:
k
genExpr e@(BinExpr op e1 e2) t m = do
a1 <- fresh
a2 <- fresh
k1 <- genExpr e1 a1 m
k2 <- genExpr e2 a2 m
return $
Exists [a1, a2] $
k1 :&:
k2 :&:
keepOrDrop (shapeOf (ValRole e1) m) a1 (shapeOf (ValRole e2) m) a2 op :&:
select (shapeOf (ValRole e1) m) a1 (shapeOf (ValRole e2) m) a2 t op
-- | The type of a literal.
rho :: Lit -> Type
rho (IntLit _) = IntTy
rho (DoubleLit _) = DoubleTy
---------------------
-- Synonym builder --
---------------------
-- | Recursively build type synonyms.
buildSyn :: Type -> Type -> GenMonad K
buildSyn t@(PtrTy tt) a = do
b <- fresh
syn <- buildSyn tt b
return $
Exists [b] $
Syn t a :&:
Syn tt b :&:
((PtrTy b) :=: a) :&:
syn
buildSyn t@(ConstTy tt) a = do
b <- fresh
syn <- buildSyn tt b
return $
Exists [b] $
Syn t a :&:
Syn tt b :&:
((ConstTy b) :=: a) :&:
syn
buildSyn t a =
return $ Syn t a
--------------------------
-- Auxiliary generators --
--------------------------
-- | Keep or drop a constraint.
keepOrDrop :: Shape -> Type -> Shape -> Type -> BinOptr -> K
keepOrDrop sp1 a1 sp2 a2 op =
if (fst sp1 /= fst sp2
&& (fst sp1 == P || fst sp2 == P)
&& fst sp1 /= U
&& fst sp2 /= U)
then T
else if (op == Assign)
then (a2 :<=: a1)
else if (fst sp1 == I && fst sp2 == FP)
then (a1 :<=: a2)
else if (fst sp1 == FP && fst sp2 == I)
then (a2 :<=: a1)
else if (snd sp1 == snd sp2)
then (a1 :=: a2)
-- The shape carries an annotation, use it accordinly.
else if (snd sp1 == (ConstTy (snd sp2)))
then (a1 :<=: a2)
else (a2 :<=: a1)
-- | Select operands and result types.
select :: Shape -> Type -> Shape -> Type -> Type -> BinOptr -> K
select sp1 a1 sp2 a2 t op =
case op of
Add -> select_Add sp1 a1 sp2 a2 t
Assign -> (t :=: a1)
And -> select_AndOr sp1 a1 sp2 a2 t
Or -> select_AndOr sp1 a1 sp2 a2 t
Divide -> select_DivideMultiply sp1 a1 sp2 a2 t
Multiply -> select_DivideMultiply sp1 a1 sp2 a2 t
select_Add :: Shape -> Type -> Shape -> Type -> Type -> K
select_Add sp1 a1 sp2 a2 t =
if (fst sp1 == P)
then (a1 :=: t) :&: ((ConstTy IntTy) :<=: a2)
else if (fst sp2 == P)
then (a2 :=: t) :&: ((ConstTy IntTy) :<=: a1)
else (t :<=: DoubleTy)
select_AndOr :: Shape -> Type -> Shape -> Type -> Type -> K
select_AndOr sp1 a1 sp2 a2 t = t :=: IntTy
select_DivideMultiply :: Shape -> Type -> Shape -> Type -> Type -> K
select_DivideMultiply sp1 a1 sp2 a2 t =
if (fst sp1 == I && fst sp2 == I)
then (t :=: IntTy)
:&: ((ConstTy IntTy) :<=: a1)
:&: ((ConstTy IntTy) :<=: a2)
else if (fst sp1 == I)
then (t :<=: DoubleTy)
:&: ((ConstTy IntTy) :<=: a1)
:&: (a2 :<=: DoubleTy)
else if (fst sp2 == I)
then (t :<=: DoubleTy)
:&: (a1 :<=: DoubleTy)
:&: ((ConstTy IntTy) :<=: a2)
else (t :<=: DoubleTy)
:&: (a1 :<=: DoubleTy)
:&: (a2 :<=: DoubleTy)
---------------------------
-- The lattice of shapes --
---------------------------
data ShapeKey = U
| S
| P
| N
| I
| FP
deriving (Eq, Ord, Show)
type Shape = (ShapeKey, Type)
-- | Create a shapeFromUse based on use.
shapeFromUse :: ShapeKey -> Shape
shapeFromUse sk = (sk, NamedTy $ Ident "<empty type>")
-- | Create a shapeFromUse out of a type.
shapeFromTy :: Type -> Shape
shapeFromTy t@IntTy = (I, t)
shapeFromTy t@DoubleTy = (FP, t)
shapeFromTy t@(ConstTy t') = (fst (shapeFromTy t'), t)
shapeFromTy t@(PtrTy _) = (P, t)
shapeFromTy _ = shapeFromUse U
data SyntaxRole = ValRole Expr
| FunRole
deriving (Eq, Ord, Show)
-- | The table M.
newtype M = M { _shapes :: Map SyntaxRole Shape } deriving (Eq, Ord, Show)
insertOrUpdate :: SyntaxRole -> Shape -> M -> (Shape, M)
insertOrUpdate ro sp m =
(sp', M $ Map.insert ro sp' (_shapes m))
where
sp' = case Map.lookup ro (_shapes m) of
Just sp'' ->
case fst sp'' of
P -> sp''
I -> sp''
FP -> sp''
N -> if (fst sp == I || fst sp == FP)
then sp
else sp''
S -> if (fst sp == I
|| fst sp == FP
|| fst sp == P)
then sp
else sp''
U -> sp
Nothing -> sp
shapeOf :: SyntaxRole -> M -> Shape
shapeOf ro m =
case Map.lookup ro (_shapes m) of
Just sp -> sp
Nothing -> shapeFromUse U
classifyE :: Expr -> Shape -> M -> (Shape, M)
classifyE e@(NumLit v) _ m =
insertOrUpdate (ValRole e) sp m
where
sp = case v of
(IntLit 0) -> shapeFromUse S
(IntLit _) -> shapeFromUse I
_ -> shapeFromUse FP
classifyE e@(Var _) sp m =
insertOrUpdate (ValRole e) sp m
classifyE e@(FldAcc e' x) sp m =
insertOrUpdate (ValRole e) sp m'
where
(_, m') = classifyE e' (shapeFromUse P) m
classifyE e@(Deref e') sp m =
insertOrUpdate (ValRole e) sp m'
where
(_, m') = classifyE e' (shapeFromUse P) m
classifyE e@(AddrOf e') sp m =
insertOrUpdate (ValRole e) (shapeFromUse P) m'
where
(_, m') = classifyE e' sp m
classifyE e@(BinExpr Add e1 e2) sp m =
insertOrUpdate (ValRole e) sp'''' m''
where
sp' = if (sp == shapeFromUse I
|| sp == shapeFromUse FP
|| sp == shapeFromUse N)
then sp
else shapeFromUse S
(sp1, m') = classifyE e1 sp' m
sp'' = if (fst sp1 == P)
then shapeFromUse I
else if (sp == shapeFromUse I
|| sp == shapeFromUse FP
|| sp == shapeFromUse N)
then sp
else shapeFromUse S
(sp2, m'') = classifyE e2 sp'' m'
sp''' = if (fst sp2 == P)
then shapeFromUse I
else sp''
(sp3, m''') = classifyE e1 sp''' m''
sp'''' = if (fst sp3 == P || fst sp2 == P)
then shapeFromUse P
else shapeFromUse N
classifyE e@(BinExpr Divide e1 e2) sp m =
insertOrUpdate (ValRole e) (shapeFromUse N) m''
where
(_, m') = classifyE e1 (shapeFromUse N) m
(_, m'') = classifyE e2 (shapeFromUse N) m'
classifyE e@(BinExpr Multiply e1 e2) sp m =
insertOrUpdate (ValRole e) (shapeFromUse N) m''
where
(_, m') = classifyE e1 (shapeFromUse N) m
(_, m'') = classifyE e2 (shapeFromUse N) m'
classifyE e@(BinExpr And e1 e2) sp m =
insertOrUpdate (ValRole e) (shapeFromUse I) m''
where
(_, m') = classifyE e1 (shapeFromUse S) m
(_, m'') = classifyE e2 (shapeFromUse S) m'
classifyE e@(BinExpr Or e1 e2) sp m =
insertOrUpdate (ValRole e) (shapeFromUse I) m''
where
(_, m') = classifyE e1 (shapeFromUse S) m
(_, m'') = classifyE e2 (shapeFromUse S) m'
classifyE e@(BinExpr Assign e1 e2) sp m =
insertOrUpdate (ValRole e) sp1 m''
where
(sp2, m') = classifyE e2 sp m
(sp1, m'') = classifyE e1 sp2 m'
classifyD :: Decl -> M -> (Shape, M)
classifyD (Decl { _ft = t, _fx = x }) m =
insertOrUpdate (ValRole (Var x)) (shapeFromTy t) m
-- | Build lattice of shapes until stabilization.
buildLattice :: Prog -> M -> M
buildLattice p@(Prog _ fs) m =
let
go ((DeclStmt d):xs) acc =
let (sp, m) = classifyD d acc
in go xs m
go ((ExprStmt e):xs) acc =
let (sp, m) = classifyE e (shapeOf (ValRole e) acc) acc
in go xs m
go ((RetStmt e):[]) acc = snd $ classifyE e (shapeOf (ValRole e) acc) acc
handleParam ds = map (\d -> DeclStmt d) ds
handleRet rt m = M $ Map.insert FunRole (shapeFromTy rt) (_shapes m)
m' = foldr (\(FunDef rt _ ds ss) acc -> go
((handleParam ds) ++ ss) (handleRet rt acc)) m fs
in if (m' == m)
then m'
else buildLattice p m'
-----------------
-- Unification --
-----------------
data StratMode = Relax
| Enforce
deriving (Eq, Ord, Show)
class Substitutable a => UnifiableC a where
uC :: a -> a -> [Subst]
uS :: a -> a -> StratMode -> [Subst]
instance UnifiableC Type where
uC (TyVar st) t2 =
let s = st :-> t2
in if (trace_UC)
then trace("uC " ++ show (ppK s)) [s]
else [s]
uC t1 t2@(TyVar _) = uC t2 t1
uC IntTy IntTy = [Trivial]
uC DoubleTy DoubleTy = [Trivial]
uC VoidTy VoidTy = [Trivial]
uC t1@(NamedTy x1) t2@(NamedTy x2)
| x1 == x2 = [Trivial]
| otherwise = error $ "can't (classic) unify named types " ++
(show $ ppK t1) ++ "::" ++ (show $ ppK t2)
uC (ConstTy t1) (ConstTy t2) = uC t1 t2
uC (PtrTy t1) (PtrTy t2) = uC t1 t2
uC t1@(RecTy fs1 x1) t2@(RecTy fs2 x2) = undefined
uC (ArrowTy rt1 [pt1]) (ArrowTy rt2 [pt2]) = undefined
uC t1 t2 = error $ "unknown (classic) unification from " ++
(show $ ppK t1) ++ " to " ++ (show $ ppK t2)
uS t1@(PtrTy _) (TyVar st) _ = [st :-> t1]
uS t1 t2@(TyVar st) sm
| sm == Enforce = [st :-> t1]
| otherwise = [st :-> (relax t1)]
uS (TyVar st) t2 _ = [st :-> (relax t2)]
uS IntTy IntTy _ = [Trivial]
uS IntTy DoubleTy Relax = [Trivial]
uS DoubleTy DoubleTy _ = [Trivial]
uS VoidTy VoidTy _ = [Trivial]
uS t1@(NamedTy x1) t2@(NamedTy x2) _
| x1 == x2 = [Trivial]
| otherwise = error $ "can't (directional) unify named types " ++
(show $ ppK t1) ++ "::" ++ (show $ ppK t2)
uS (ConstTy t1) (ConstTy t2) sm = uS t1 t2 sm
uS (ConstTy t1) t2 Relax = uS t1 t2 Relax
uS t1 (ConstTy t2) Enforce = uS t1 t2 Enforce
uS (PtrTy (TyVar st)) (PtrTy t2@VoidTy) _ = [st :-> t2]
uS (PtrTy _) (PtrTy VoidTy) _ = [Trivial]
uS (PtrTy t1) (PtrTy t2) _ = uS t1 t2 Enforce
uS t1@(RecTy fs1 x1) t2@(RecTy fs2 x2) _ = undefined
uS (ArrowTy rt1 [pt1]) (ArrowTy rt2 [pt2]) _ = undefined
uS t1 t2 _ = error $ "unknown (directional) unification from " ++
(show $ ppK t1) ++ " to " ++ (show $ ppK t2)
instance UnifiableC Decl where
uC (Decl t1 x1) (Decl t2 x2)
| x1 == x2 = uC t1 t2
| otherwise = error $ "can't unify decl " ++
(show x1) ++ "::" ++ (show x2)
uS (Decl t1 x1) (Decl t2 x2) m
| x1 == x2 = uS t1 t2 m
| otherwise = error $ "can't unify decl " ++
(show x1) ++ "::" ++ (show x2)
instance UnifiableC a => UnifiableC [a] where
uC [] [] = [Trivial]
uC _ [] = error "can't unify lists, different lengths"
uC [] x = uC x []
uC (a1:as1) (a2:as2) =
let s = uC a1 a2
s' = uC (applyMany s as1) (applyMany s as2)
in s ++ s'
uS [] [] _ = [Trivial]
uS _ [] _ = error "can't unify lists, different lengths"
uS [] x m = uS x [] m
uS (a1:as1) (a2:as2) m =
let s = uS a1 a2 m
s' = uS (applyMany s as1) (applyMany s as2) m
in s ++ s'
trace_UC = False
----------------------------------
-- The constness-relax function --
----------------------------------
-- | Relax constness.
relax :: Type -> Type
relax (ConstTy t) = t
relax (PtrTy t) = PtrTy (relax t)
relax t = t
------------------------------
-- The solver configuration --
------------------------------
data Config = Config
{ phi :: Phi,
psi :: Psi,
theta :: Theta,
k :: K,
kE :: [K],
kI :: [K],
kW :: [K],
kF :: [K]
}
deriving (Eq, Ord, Show)
groupK :: Config -> K
groupK cfg@(Config { k, kE, kI, kW, kF }) =
let go el acc = acc :&: el
in foldr go k (kE ++ kI ++ kW ++ kF)
---------------------------
-- Solver: preprocessing --
---------------------------
preprocess :: Config -> Config
-- | PP-and
preprocess cfg@(Config { k = k1 :&: k2 }) =
let cfg' = preprocess (cfg { k = k1 })
cfg'' = satisfyC cfg'
in preprocess $ (cfg'' { k = k2 })
-- | PP-ex
preprocess cfg@(Config { k = Exists ts k' }) =
let self t@(TyVar st) acc = addToPhi st t acc
phi' = foldr self (phi cfg) ts
in preprocess $ cfg { phi = phi', k = k' }
-- | PP-syn
preprocess cfg@(Config { k = Syn t a, theta } ) =
let theta' = if Map.member (hat t) theta
then theta
else addToTheta (hat t) a theta
cfg' = cfg { theta = theta', k = ((findInTheta (hat t) theta') :=: a) }
in preprocess $ cfg'
-- | PP-def
preprocess cfg@(Config { k = Def x (TyVar st) k' }) =
let psi' = addToPsi x (findInPhi st (phi cfg)) (psi cfg)
in preprocess $ cfg { psi = psi', k = k' }
-- | PP-fun
preprocess cfg@(Config { k = Fun f (ArrowTy (TyVar st) t) k' }) =
let psi' = addToPsi f (ArrowTy (findInPhi st (phi cfg)) t) (psi cfg)
in preprocess $ cfg { psi = psi', k = k' }
-- | PP-inst
preprocess cfg@(Config { k = TypeOf x t }) =
let k' = findInPsi x (psi cfg) :=: t
in preprocess $ cfg { k = k' }
-- | PP-eq
preprocess cfg@(Config { k = k'@(t1 :=: t2), kE }) =
preprocess $ cfg { k = T, kE = k':kE }
-- | PP-has
preprocess cfg@(Config { k = k'@(Has _ _), kF }) =
preprocess $ cfg { k = T, kF = k':kF }
-- | PP-iq
preprocess cfg@(Config { k = k'@(t1 :<=: t2), kI } ) =
preprocess $ cfg { k = T, kI = k':kI }
-- | PP-end
preprocess cfg@(Config { k = T }) =
if (not trace_PP)
then cfg
else trace (showConfig cfg ++ "\n") cfg
trace_PP = False
-----------------------------------
-- Solver: 1st unification round --
-----------------------------------
trace_U = False
unifyEq :: Config -> Config
-- | UE-base
unifyEq cfg@(Config { kE = k@(t1 :=: t2):kE_ }) =
let s = uC t1 t2
phi' = foreachValue s (phi cfg)
psi' = foreachValue s (psi cfg)
theta' = foreachValue s (theta cfg)
kE' = applyMany s kE_
kF' = applyMany s (kF cfg)
kI' = applyMany s (kI cfg)
cfg' = cfg { phi = phi',
psi = psi',
theta = theta',
kE = kE',
kF = kF',
kI = kI' }
rw = unifyEq (satisfyC_ (cfg, cfg') k)
in if (not trace_U)
then rw
else trace("uC: " ++ show (ppK s) ++
"\n≡' " ++ show (ppK kE') ++
"\n≤' " ++ show (ppK kI') ++ "\n") rw
-- | UE-end
unifyEq cfg@(Config { kE = [] }) = cfg
-----------------------------------
-- Solver: 2nd unification round --
-----------------------------------
splitOrderLift :: Config -> Config
-- | SOL
splitOrderLift cfg =
let (kI', kW') = splitWob ((kI cfg) ++ [B]) []
kI'' = orderSub (kI' ++ [B]) []
kI''' = liftSub(kI'' ++ [B]) []
in cfg { kI = kI''', kW = kW'}
unifyIq :: Config -> Config
-- | UI-base
unifyIq cfg@(Config {
kI = k@(t1 :<=: t2):kI_ }) =
let s = uS t1 t2 Relax
phi' = foreachValue s (phi cfg)
psi' = foreachValue s (psi cfg)
theta' = foreachValue s (theta cfg)
kI' = applyMany s kI_
kF' = applyMany s (kF cfg)
kW' = applyMany s (kW cfg)
(kI'', kW'') = splitWob (kI' ++ kW' ++ [B]) []
kI''' = orderSub (kI'' ++ [B]) []
kI'''' = liftSub (kI''' ++ [B]) []
cfg' = cfg { phi = phi',
psi = psi',
theta = theta',
kI = kI'''',
kF = kF',
kW = kW'' }
rw = unifyIq (satisfyC_ (cfg, cfg') k)
in if (not trace_U)
then rw
else trace("uS: " ++ show (ppK s) ++
"\n≤' " ++ show (ppK kI') ++
"\n≤'' " ++ show (ppK kI'') ++
"\n≤''' " ++ show (ppK kI''') ++
"\n≤'''' " ++ show (ppK kI'''') ++ "\n") rw
-- | UI-end
unifyIq cfg = cfg
unifyWb :: Config -> Config
-- | UW-base
unifyWb cfg@(Config { kI = k@(t1 :<=: t2):kI_ }) =
let s = uS t1 t2 Relax
phi' = foreachValue s (phi cfg)
psi' = foreachValue s (psi cfg)
theta' = foreachValue s (theta cfg)
kI_' = applyMany s kI_
kF' = applyMany s (kF cfg)
cfg' = cfg { phi = phi',
psi = psi',
theta = theta',
kI = kI_',
kF = kF' }
rw = unifyWb (satisfyC_ (cfg, cfg') k)
in if (not trace_U)
then rw
else trace("uS (wobbly): " ++ show (ppK s) ++ "\n... " ++ show (ppK kI_') ++ "\n") rw
-- | UW-end
unifyWb cfg = cfg
----------------------------------
-- Splitting, ordering, lifting --
----------------------------------
-- | Split wobbly relations.
splitWob :: [K] -> [K] -> ([K], [K])
splitWob (w@((TyVar _) :<=: (TyVar _)):k) kW =
splitWob k (w:kW)
splitWob (nw@(t1 :<=: t2):k) kW =
splitWob (k ++ [nw]) kW
splitWob (B:k) kW =
(k, kW)
-- | Order inequality constraints.
orderSub :: [K] -> [K] -> [K]
orderSub ((t1@(PtrTy (ConstTy _)) :<=: t2):kW) kS =
orderSub kW ((t1 :<=: t2):kS)
orderSub ((t1 :<=: t2@(PtrTy _)):kW) kS =
orderSub kW ((t1 :<=: t2):kS)
orderSub ((t1@DoubleTy :<=: t2):kW) kS =
orderSub kW ((t1 :<=: t2):kS)
orderSub ((t1 :<=: t2@IntTy):kW) kS =
orderSub kW ((t1 :<=: t2):kS)
orderSub ((t1 :<=: t2):kW) kS =
orderSub (kW ++ [t1 :<=: t2]) kS
orderSub (B:kW) kS =
kS ++ kW
-- | Detect presence of top type.
liftSub :: [K] -> [K] -> [K]
liftSub (k1@((PtrTy t1) :<=: t2@(PtrTy (TyVar (Stamp n)))):
k2@((PtrTy t1') :<=: t2'@(PtrTy (TyVar (Stamp n')))):k) kn
| n == n'
&& ((unqualTy t1) /= (unqualTy t1'))
&& (isGround t1)
&& (isGround t1') =
-- Check t1 only, since `const' pointers (when existing) appear first.
let t = case t1 of
ConstTy _ -> (PtrTy (ConstTy VoidTy))
_ -> PtrTy VoidTy
in liftSub k (kn ++ ((t :<=: t2):k1:[k2]))
| otherwise = liftSub (k2:k) (kn ++ [k1])
liftSub (k1@( _ :<=: _):k) kn =
liftSub k (kn ++ [k1])
liftSub (B:k) kn =
kn ++ k
-- | Ensure unqualified type.
unqualTy :: Type -> Type
unqualTy (ConstTy t) = t
unqualTy t = t
------------------------------
-- Membership normalization --
------------------------------
-- | SH
sortHas :: Config -> Config
sortHas cfg =
let criteria k1@(Has t1 (Decl _ x1)) k2@(Has t2 (Decl _ x2))
| t1 == t2 = compare x1 x2
| otherwise = compare t1 t2
in cfg { kF = sortBy criteria (kF cfg) }
-- | MN-join
normFlds :: Config -> Config
normFlds cfg@(Config {
kE,
kF = h@(Has t1 (Decl ft1 x1)):kF_@(Has t2 (Decl ft2 x2):_) }) =
let kE' = if (t1 == t2) && (x1 == x2)
then (ft1 :=: ft2):kE
else kE
in normFlds cfg { kE = kE', kF = kF_ ++ [h] }
-- | MN-skip
normFlds cfg@(Config { kF = h@(Has _ _):B:kF_ }) =
cfg { kF = kF_ ++ [h] }
-- | MN-nfld
normFlds cfg@(Config { kE = [], kF = [B] }) =
cfg { kF = [] }
-------------------------------------
-- Convergence of field membership --
-------------------------------------
-- | Convergence of has constraints
converge :: Config -> IO Config
converge cfg = do
let cfg' = sortHas cfg
debug "sort-membership" (showConfig cfg')
let cfg'' = normFlds cfg' { kF = (kF cfg') ++ [B] }
debug "equalize-fields" (showConfig cfg'')
let cfg''' = unifyEq cfg''
debug "unify-fields" (showConfig cfg''')
if (kF cfg'') == (kF cfg''')
then return $ cfg'''
else converge cfg'''
--------------------------------
-- Solver: record composition --
--------------------------------
composeRecs :: Config -> Config
-- | RC-inst
composeRecs cfg@(Config {
phi,
psi,
theta,
kF = k@(Has t@(TyVar st@(Stamp n)) d):kF_ }) =
let x = Ident $ "TYPE_" ++ (show n)
t = NamedTy x
s = st :-> t
theta' = (addToTheta (hat t) (RecTy [d] x) theta)
in composeRecs cfg { phi = apply s phi,
psi = apply s psi,
theta = apply s theta',
kF = apply s kF_ }
-- | RC-upd
composeRecs cfg@(Config {
theta,
kF = k@(Has t@(NamedTy _) d):kF_ }) =
case findInTheta (hat t) theta of
r@(RecTy dl x) ->
let r' = if (elem d dl) then r else (RecTy (d:dl) x)
in composeRecs cfg { kF = kF_,
theta = getsUpdate (hat t) r' theta }
_ -> error $ "can't recognized record " ++ show (ppK k)
-- | RC-end
composeRecs cfg = cfg
-- | Update the type id to type mapping of Theta.
getsUpdate :: TypeId -> Type -> Theta -> Theta
getsUpdate tid t theta =
case Map.lookup tid theta of
Just _ -> Map.alter (\_ -> Just t) tid theta
_ -> error $ show (ppK tid) ++ " can't be found in Φ for update\n"
------------------------------
-- Solver: de-orphanization --
------------------------------
deorph :: Config -> Config
-- | DO
deorph cfg@Config { phi, psi, theta } =
cfg { phi = phi', psi = psi', theta = theta' }
where
bind ((tid, TyVar st@(Stamp n)):l) =
if "struct " `isPrefixOf` (_id tid)
then (st :-> (RecTy [Decl IntTy (Ident "dummy")] (Ident (_id tid)))):(bind l)
else (st :-> NamedTy (Ident "int/*orphan*/ ")):(bind l)
bind (_:l) = bind l
bind [] = []
s = bind (Map.toList theta)
phi' = foreachValue s phi
psi' = foreachValue s psi
theta' = foreachValue s theta
---------------------
-- Complete solver --
---------------------
solveConstraints :: K -> Config -> IO Config
solveConstraints k cfg = do
let cfgPP = preprocess cfg
debug "preprocessing" (showConfig cfgPP)
let cfgUE = unifyEq cfgPP
debug "unify-equivalences" (showConfig cfgUE)
let cfgSOL = splitOrderLift cfgUE
debug "split-order-lift" (showConfig cfgSOL)
let cfgUI = unifyIq cfgSOL
debug "unify-inequalities" (showConfig cfgUI)
let cfgUP = unifyWb cfgUI { kI = (kW cfgUI), kW = [] }
debug "unify-wobbly" (showConfig cfgUP)
cfgUF <- converge cfgUP
let cfgCR = composeRecs cfgUF
debug "compose-records" (showConfig cfgCR)
let cfgOR = deorph cfgCR
debug "deorph" (showConfig cfgOR)
return cfgOR
debug msg content = do
putStrLn $ "\n<<< " ++ msg ++ " >>>\n" ++ content
writeFile (msg ++ ".log") content
------------------
-- Typing rules --
------------------
type Gamma = Map Ident Type
verifyTyping :: Config -> Gamma -> Prog -> [Type]
verifyTyping c g p = typeProg c g p
-- | TCPrg
typeProg :: Config -> Gamma -> Prog -> [Type]
typeProg c gam (Prog _ fl) =
foldr (\f acc -> (typeFunDef c gam f):acc) [] fl
-- | TCFun
typeFunDef :: Config -> Gamma -> FunDef -> Type
typeFunDef c gam (FunDef rt f dl s) =
typeParam c gam dl s rt
-- | TCPar
typeParam :: Config -> Gamma -> [Decl] -> [Stmt] -> Type -> Type
typeParam c gam [] s rt = typeStmt c gam s rt
typeParam c gam ((Decl t x):dl) s rt =
let t' = findInPsi x (psi c)
gam' = addToGamma x t' gam
in typeParam c gam' dl s rt
-- Type checking signature for statements.
typeStmt :: Config -> Gamma -> [Stmt] -> Type -> Type
-- | TCDcl
typeStmt c gam ((DeclStmt (Decl t x)):sl) rt =
let t' = findInPsi x (psi c)
gam' = addToGamma x t' gam
in typeStmt c gam' sl rt
-- | TCExp
typeStmt c gam ((ExprStmt e):sl) rt =
let t = typeExpr c gam e
in if (isGround t)
then typeStmt c gam sl rt
else error $ "expected ground type " ++ show (ppC t)
++ " for expression " ++ show e
-- | TCRetZr
typeStmt c gam ((RetStmt (NumLit (IntLit 0))):[]) rt =
let rt' = (findInTheta (hat rt) (theta c))
in if isScaTy rt'
then rt'
else error $ "0 doesn't type with " ++ show (ppC rt') ++ " as return"
-- | TCRet
typeStmt c gam ((RetStmt e):[]) rt =
let t = typeExpr c gam e
rt' = (findInTheta (hat rt) (theta c))
in if isSubTy' t rt'
then rt'
else error $ "return doesn't type "
++ show (ppC rt) ++ "::" ++ show (ppC rt')
-- Type checking signature for expressions.
typeExpr :: Config -> Gamma -> Expr -> Type
-- | TCLit
typeExpr _ _ (NumLit l) = rho l
-- | TCVar
typeExpr c gam e@(Var x) =
let t = findInPsi x (psi c)
in if (t == findInGamma x gam && isGround t)
then t
else error $ "Γ and C type mismatch " ++ (show e)
-- | TCFld
typeExpr c gam (FldAcc e x) =
let pt = typeExpr c gam e
in case pt of
PtrTy rt ->
case findInTheta (hat rt) (theta c) of
t@(RecTy dl _) -> field x t
_ -> error $ "expected record in Γ " ++ show (ppC rt)
_ -> error $ "expected " ++ show (ppC pt) ++ " typed as pointer"
-- | TCDrf
typeExpr c gam (Deref e) =
let t = typeExpr c gam e
in case t of
PtrTy t' -> t'
_ -> error $ "dereference doesn't type check"
-- | TCAdr
typeExpr c gam (AddrOf e) =
PtrTy (typeExpr c gam e)
-- | TCAsgZr
typeExpr c gam (BinExpr Assign e1 (NumLit (IntLit 0))) =
let lht = typeExpr c gam e1
in if isScaTy lht
then lht
else error $ "assignment to 0 doesn't type check"
-- | TCAsg
typeExpr c gam (BinExpr Assign e1 e2) =
let lht = typeExpr c gam e1
rht = typeExpr c gam e2
in if isSubTy' rht lht
then rht
else error $ "assignment doesn't type check"
-- | TCAdd
typeExpr c gam (BinExpr Add e1 e2) =
let lht = typeExpr c gam e1
lht' = case lht of {(ConstTy t) -> t; _ -> lht}
rht = typeExpr c gam e2
rht' = case rht of {(ConstTy t) -> t; _ -> rht}
in case lht' of
PtrTy _ -> if rht' == IntTy
then lht
else error $ "expected int as RHS of +"
_ -> case rht' of
PtrTy _ -> if lht' == IntTy
then rht
else error $ "expected int as LHS of +"
_ -> if isAriTy lht && isAriTy rht
then highRank lht rht
else error $ "incompatible types in + (Add)"
-- | TCAnd
typeExpr c gam (BinExpr And e1 e2) =
let lht = typeExpr c gam e1
rht = typeExpr c gam e2
in if isScaTy lht && isScaTy rht
then IntTy
else error $ "incompatible types in && (logical AND)"
-- | TCOr
typeExpr c gam (BinExpr Or e1 e2) =
let lht = typeExpr c gam e1
rht = typeExpr c gam e2
in if isScaTy lht && isScaTy rht
then IntTy
else error $ "incompatible types in || (logical OR)"
-- | TCDiv
typeExpr c gam (BinExpr Divide e1 e2) =
let lht = typeExpr c gam e1
rht = typeExpr c gam e2
in if isAriTy lht && isAriTy rht
then highRank lht rht
else error $ "incompatible types in / (division)"
-- | TCMul
typeExpr c gam (BinExpr Multiply e1 e2) =
let lht = typeExpr c gam e1
rht = typeExpr c gam e2
in if isAriTy lht && isAriTy rht
then highRank lht rht
else error $ "incompatible types in * (multiplication)"
-- | Find, in Gamma, the type mapped to an identifier.
findInGamma :: Ident -> Gamma -> Type
findInGamma x gam =
case Map.lookup x gam of
Just t -> t
_ -> error $
"no τ for identier " ++ show (ppK x) ++ " in Γ\n"
-- | Add, to Gamma, an identifier and its mapped type.
addToGamma :: Ident -> Type -> Gamma -> Gamma
addToGamma x t gam =
case Map.lookup x gam of
Just t -> error $
show (ppK t) ++ ", indexed by " ++
show (ppK x) ++ ", is already in Γ\n"
_ -> Map.insert x t gam
--------------------
-- Typing support --
--------------------
-- | Return whether the type is an arithmetic type.
isAriTy :: Type -> Bool
isAriTy (ConstTy t) = isAriTy t
isAriTy IntTy = True
isAriTy DoubleTy = True
isAriTy _ = error $ "expected arithmetic type"
-- | Return whether the type is scalar.
isScaTy :: Type -> Bool
isScaTy (ConstTy t) = isScaTy t
isScaTy (PtrTy _) = True
isScaTy IntTy = True
isScaTy DoubleTy = True
isScaTy _ = error $ "expected scalar type"
-- | Return whether the type is a pointer.
isPtrTy :: Type -> Bool
isPtrTy (ConstTy t) = isPtrTy t
isPtrTy (PtrTy _) = True
isPtrTy _ = error $ "expected pointer type"
-- | Return the highest ranked of 2 arithmetic types.
highRank :: Type -> Type -> Type
highRank t1 t2 =
case t1 of
(ConstTy t1') -> highRank t1' t2
IntTy -> t2
_ -> t1
-------------------------------------
-- Pretty printing for constraints --
-------------------------------------
class PrettyK a where
ppK :: a -> PP.Doc
instance PrettyK a => PrettyK [a] where
ppK v = foldr (\x acc -> ppK x PP.<+> PP.text " " PP.<+> acc )
PP.empty v
instance PrettyK Ident where
ppK = PP.text . _x
instance PrettyK TypeId where
ppK = PP.text . _id
instance PrettyK Type where
ppK IntTy = PP.text "int"
ppK DoubleTy = PP.text "double"
ppK VoidTy = PP.text "void"
ppK (PtrTy t) = ppK t PP.<> PP.text "*"
ppK (ConstTy t) = PP.text "const " PP.<> ppK t
ppK (ArrowTy rt ps) =
PP.text " (" PP.<>
(PP.hcat $ PP.punctuate (PP.text ", ") (map ppK ps)) PP.<>
PP.text " )" PP.<>
PP.text "⟶ " PP.<> ppK rt
ppK (RecTy flds x) = PP.char '@' PP.<> ppK x PP.<> PP.char '@'
ppK (NamedTy x) = ppK x
ppK (TyVar (Stamp n)) = PP.text "α" PP.<> PP.text (show n)
instance PrettyK Subst where
ppK Trivial = PP.text "[]"
ppK (st :-> t) = ppK st PP.<> PP.text "->" PP.<> ppK t
instance PrettyK K where
ppK T = PP.text "⊤"
ppK B = PP.text "⊥"
ppK (k1 :&: k2) = ppK k1 PP.<+> PP.text " ^ " PP.<+> ppK k2
ppK (Exists t k) =
PP.text "∃" PP.<>
(foldl (\acc t@(TyVar _) -> acc PP.<> ppK t) PP.empty t) PP.<>
PP.text ". " PP.<> ppK k
ppK (Def x t k) =
PP.text "def" PP.<+> ppK x PP.<> PP.colon PP.<> ppK t PP.<+>
PP.text "in" PP.<+> ppK k
ppK (Fun f t@(ArrowTy _ _) k) =
PP.text "fun" PP.<+> ppK f PP.<> PP.colon PP.<> ppK t PP.<+>
PP.text "in " PP.<+> ppK k
ppK (TypeOf x t) =
PP.text "typeof(" PP.<> ppK x PP.<> PP.text ","
PP.<> ppK t PP.<> PP.char ')'
ppK (Syn t1 t2) =
PP.text "syn " PP.<> ppK t1 PP.<> PP.text " as " PP.<> ppK t2
ppK (Has t fld) =
PP.text "has" PP.<>
PP.parens (ppK t PP.<> PP.comma PP.<+>
ppK (_fx fld) PP.<> PP.colon PP.<>
ppK (_ft fld))
ppK (t1 :=: t2) = ppK t1 PP.<> PP.text "≡" PP.<> ppK t2
ppK (t1 :<=: t2) = ppK t1 PP.<> PP.text "≤" PP.<> ppK t2
instance PrettyK Stamp where
ppK (Stamp n) = PP.text "α" PP.<> PP.text (show n)
--------------------------------
-- Pretty printing of lattice --
--------------------------------
class PrettyM a where
ppM :: a -> PP.Doc
instance PrettyM ShapeKey where
ppM U = PP.text "<Undefined>"
ppM S = PP.text "<Scalar>"
ppM P = PP.text "<Pointer>"
ppM I = PP.text "<Integral>"
ppM FP = PP.text "<FloatingPoint>"
ppM N = PP.text "<Numeric>"
instance PrettyM SyntaxRole where
ppM (ValRole e) = ppM e
ppM FunRole = PP.text "... ⟶ "
instance PrettyM Expr where
ppM (NumLit v) = PP.text $ show v
ppM (Var x) = PP.text (_x x)
ppM (FldAcc e x) = ppM e PP.<> PP.text "->" PP.<> PP.text (_x x)
ppM (Deref e) = PP.char '*' PP.<> ppM e
ppM (AddrOf e) = PP.char '&' PP.<> ppM e
ppM (BinExpr Add e1 e2) = ppM e1 PP.<> PP.char '+' PP.<> ppM e2
ppM (BinExpr Divide e1 e2) = ppM e1 PP.<> PP.char '/' PP.<> ppM e2
ppM (BinExpr Multiply e1 e2) = ppM e1 PP.<> PP.char '*' PP.<> ppM e2
ppM (BinExpr And e1 e2) = ppM e1 PP.<> PP.text "&&" PP.<> ppM e2
ppM (BinExpr Or e1 e2) = ppM e1 PP.<> PP.text "||" PP.<> ppM e2
ppM (BinExpr Assign e1 e2) = ppM e1 PP.<> PP.char '=' PP.<> ppM e2
instance PrettyM M where
ppM m = Map.foldrWithKey (\k v acc -> ppM k PP.<+> ppM (fst v) PP.$$ acc)
PP.empty
(_shapes m)
----------------------------
-- Pretty printing of AST --
----------------------------
class PrettyAST a where
fmt :: Int -> a -> PP.Doc
instance PrettyAST a => PrettyAST [a] where
fmt n (s:sl) =
fmt n s PP.<> (foldr (\s d -> fmt n s PP.<> d) PP.empty sl)
instance PrettyAST Lit where
fmt _ (IntLit l) =
PP.char '`' PP.<>
PP.text (show l) PP.<>
PP.char '\''
fmt _ (DoubleLit l) =
PP.char '`' PP.<>
PP.text (show l) PP.<>
PP.char '\''
instance PrettyAST Expr where
fmt n e@(NumLit l) = indent n PP.<> PP.text "NumLit" PP.<+> fmt 0 l
fmt n e@(Var x) =
indent n PP.<>
PP.text "Var" PP.<+>
PP.char '`' PP.<>
PP.text (_x x) PP.<>
PP.char '\''
fmt n e@(FldAcc x t) = indent n PP.<> PP.text "FieldAccess"
fmt n e@(Deref e1) = indent n PP.<> PP.text "Deref" PP.<> fmt (n + 1) e1
fmt n e@(AddrOf e1) = indent n PP.<> PP.text "AddrOf" PP.<> fmt (n + 1) e1
fmt n e@(BinExpr op e1 e2) =
indent n PP.<>
PP.text "BinExpr" PP.<+>
PP.char '`' PP.<> PP.text (show op) PP.<> PP.char '\'' PP.<+>
fmt (n + 1) e1 PP.<>
fmt (n + 1) e2
instance PrettyAST Stmt where
fmt n (DeclStmt d) = indent n PP.<> PP.text "DeclStmt" PP.<> fmt (n + 1) d
fmt n (ExprStmt e) = indent n PP.<> PP.text "ExprStmt" PP.<> fmt (n + 1) e
fmt n (RetStmt e) = indent n PP.<> PP.text "RetStmt" PP.<> fmt (n + 1) e
instance PrettyAST Decl where
fmt n (Decl _ x) =
indent n PP.<>
PP.text "Decl" PP.<+>
PP.char '`' PP.<>
PP.text (_x x) PP.<>
PP.char '\''
instance PrettyAST FunDef where
fmt n f@(FunDef _ _ ps sl) =
PP.text "Function" PP.<>
(foldr (\p d -> fmt (n + 1) p PP.<> d) PP.empty ps) PP.<>
fmt (n + 1) sl
instance PrettyAST Prog where
fmt n p@(Prog _ fs) =
(foldr (\f d -> fmt (n + 1) f PP.<> d) PP.empty fs)
indent :: Int -> PP.Doc
indent n = PP.char '\n' PP.<> PP.text (replicate n ' ')
---------------------------------
-- Pretty printing for C types --
---------------------------------
class PrettyC a where
ppC :: a -> PP.Doc
instance PrettyC TypeId where
ppC tn = PP.text $ _id tn
instance PrettyC Ident where
ppC x = PP.text $ _x x
instance PrettyC Decl where
ppC (Decl t x) = PP.space PP.<> ppC t PP.<+> ppC x PP.<> PP.semi
instance PrettyC Type where
ppC IntTy = PP.text "int"
ppC DoubleTy = PP.text "double"
ppC VoidTy = PP.text "void"
ppC (PtrTy t) = ppC t PP.<> PP.char '*'
ppC (ConstTy t) = PP.text "const" PP.<+> ppC t
ppC (ArrowTy rt pt) =
ppC rt PP.<> PP.text "(*)" PP.<>
PP.parens (PP.hcat $ PP.punctuate (PP.text ", ") (map ppC pt))
ppC (RecTy fld x) =
let prefix = if "struct " `isPrefixOf` (_x x)
then ""
else "struct "
in PP.text prefix PP.<> PP.text (_x x) PP.<+>
PP.braces (PP.hcat $ (map ppC fld))
ppC (NamedTy x) = PP.text $ _x x
ppC (TyVar (Stamp n)) = PP.text "α" PP.<> PP.text (show n)
formatMap m = show $
Map.foldrWithKey
(\k v acc -> PP.lbrace PP.<> PP.space PP.<> ppK k PP.<>
PP.comma PP.<> PP.space PP.<> ppC v PP.<>
PP.rbrace PP.<> PP.comma PP.<> PP.space PP.<> acc)
PP.empty m
formatPhiPsiTheta (phi, psi, theta) =
PP.text (" Φ = { " ++ formatMap phi) PP.<+> PP.text "}\n" PP.<>
PP.text (" ψ = { " ++ formatMap psi) PP.<+> PP.text "}\n" PP.<>
PP.text (" Θ = { " ++ formatMap theta) PP.<+> PP.text "}\n"
showConfig cfg@(Config { phi, psi, theta, k, kE, kF, kI, kW }) =
show $
formatPhiPsiTheta (phi, psi, theta) PP.<>
PP.text " [Ke] = " PP.<> ppK kE PP.<+> PP.text "\n" PP.<>
PP.text " [Kf] = " PP.<> ppK kF PP.<+> PP.text "\n" PP.<>
PP.text " [Ki] = " PP.<> ppK kI PP.<+> PP.text "\n" PP.<>
PP.text " [Kw] = " PP.<> ppK kW
--------------------
-- Parser for muC --
--------------------
langDef = emptyDef {
Token.identStart = letter,
Token.identLetter = alphaNum <|> char '_',
Token.reservedNames =
[ "int", "double", "void", "const", "return", "struct", "typedef" ],
Token.reservedOpNames =
[ "*", "/", "+", "||", "=", "&", "->" ]
}
lexer = Token.makeTokenParser langDef
identifier = Token.identifier lexer
reserved = Token.reserved lexer
reservedOp = Token.reservedOp lexer
parens = Token.parens lexer
braces = Token.braces lexer
integer = Token.integer lexer
float = Token.float lexer
semi = Token.semi lexer
whiteSpace = Token.whiteSpace lexer
comma = Token.comma lexer
symbol = Token.symbol lexer
parseSource :: String -> Either String Prog
parseSource = either (Left . show) Right . parse progParser ""
progParser :: Parser Prog
progParser = Prog <$> many tydefParser <*> many funParser
tydefParser :: Parser TypeDef
tydefParser = TypeDef <$> (reserved "typedef" *>) qualPtrTyParser <*> tyParser <* semi
funParser :: Parser FunDef
funParser = FunDef
<$> qualPtrTyParser
<*> identParser
<*> parens (declParser `sepBy` comma)
<*> stmtListParser
qualPtrTyParser :: Parser Type
qualPtrTyParser = f <$> tyParser <*> (optionMaybe (reserved "const"))
where
f t Nothing = t
f t _ = ConstTy t
tyParser :: Parser Type
tyParser = f <$> nonPtrTyParser <*> (many starParser)
where
f t ts = foldr (\_ ac -> PtrTy ac) t ts
nonPtrTyParser :: Parser Type
nonPtrTyParser = try (qualTyParser intTyParser)
<|> try (qualTyParser fpTyParser)
<|> try (qualTyParser namedTyParser)
<|> qualTyParser recTyParser
starParser :: Parser ()
starParser = () <$ symbol "*"
intTyParser :: Parser Type
intTyParser = IntTy <$ reserved "int"
fpTyParser :: Parser Type
fpTyParser = DoubleTy <$ reserved "double"
voidTyParser :: Parser Type
voidTyParser = VoidTy <$ reserved "void"
namedTyParser :: Parser Type
namedTyParser = f <$> (optionMaybe (reserved "struct")) <*> identParser
where
f Nothing n = NamedTy n
f _ n = NamedTy (Ident ("struct " ++ (_x n)))
qualTyParser :: Parser Type -> Parser Type
qualTyParser p = f <$> (optionMaybe (reserved "const")) <*> p
where
f Nothing t = t
f _ t = ConstTy t
recTyParser :: Parser Type
recTyParser = RecTy
<$> braces (declParser `sepBy` semi)
<*> identParser
declParser :: Parser Decl
declParser = Decl <$> qualPtrTyParser <*> identParser
exprParser :: Parser Expr
exprParser = buildExpressionParser table baseExprParser
where
table =
[ [ Prefix (reservedOp "*" >> return Deref) ],
[ Prefix (reservedOp "&" >> return AddrOf) ],
[ Infix (reservedOp "/" >> return (BinExpr Divide)) AssocLeft ],
[ Infix (reservedOp "*" >> return (BinExpr Divide)) AssocLeft ],
[ Infix (reservedOp "+" >> return (BinExpr Add)) AssocLeft ],
[ Infix (reservedOp "&&" >> return (BinExpr And)) AssocLeft ],
[ Infix (reservedOp "||" >> return (BinExpr Or)) AssocLeft ],
[ Infix (reservedOp "=" >> return (BinExpr Assign)) AssocLeft ] ]
baseExprParser :: Parser Expr
baseExprParser = f <$> fldAccParser <*> (many (reservedOp "->" *> identParser))
where
f fld xs = foldr (\ x acc -> FldAcc acc x) fld xs
fldAccParser :: Parser Expr
fldAccParser = f <$> primExprParser <*> option id fldAcc
where
f x expr = expr x
fldAcc = flip FldAcc <$> (reservedOp "->" *> identParser)
intParser :: Parser Lit
intParser = IntLit <$> (fromInteger <$> integer)
fpParser :: Parser Lit
fpParser = DoubleLit <$> float
primExprParser :: Parser Expr
primExprParser = NumLit <$> (try fpParser <|> intParser)
<|> Var <$> identParser
stmtParser :: Parser Stmt
stmtParser = RetStmt <$> (reserved "return" *> exprParser)
<|> try (DeclStmt <$> declParser)
<|> ExprStmt <$> exprParser
stmtListParser :: Parser [Stmt]
stmtListParser = braces (stmtParser `endBy` semi)
identParser :: Parser Ident
identParser = Ident <$> identifier
------------------
-- Rewrite to C --
------------------
-- | Rewrite inferred types to their C form. This function is simplified.
-- Check PyscheC for a complete implementation.
rewriteInC :: Map TypeId Type -> String
rewriteInC theta =
let
p (tid@(TypeId id), RecTy _ _ )
| "struct " `isPrefixOf` id = False
| otherwise = True
p (tid@(TypeId id), t)
| "const " `isPrefixOf` id = False
| "*" `isSuffixOf` id = False
| tid == (hat t) = False -- Duplicate: self-definition.
| otherwise = True
filtered = filter p (Map.toList theta)
print (t1, t2) acc = "typedef " ++
PP.render (ppC t2) ++ " " ++
PP.render (ppC t1) ++ ";\n" ++ acc
tydefs = foldr print "" filtered
in
(rewriteInC' theta) ++ tydefs
rewriteInC' :: Map TypeId Type -> String
rewriteInC' theta =
let print' (RecTy _ (Ident x)) = "typedef struct " ++ x ++ " " ++ x ++ ";\n"
print' (PtrTy t) = print' t
print' (ConstTy t) = print' t
print' _ = ""
print (tid@(TypeId id), t) acc
| "struct " `isPrefixOf` id = PP.render (ppC t) ++ ";\n"
| otherwise = print' t ++ acc
in foldr print "" (Map.toList theta)
|
ltcmelo/psychec
|
formalism/muC.hs
|
bsd-3-clause
| 56,627 | 0 | 22 | 15,749 | 23,940 | 12,248 | 11,692 | 1,444 | 18 |
{-# LANGUAGE TypeOperators, TypeFamilies, UndecidableInstances, CPP
, FlexibleContexts, DeriveFunctor, StandaloneDeriving
, GADTs
#-}
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP
-- {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP
----------------------------------------------------------------------
-- |
-- Module : FunctorCombo.StrictMemo
-- Copyright : (c) Conal Elliott 2010-2012
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Functor-based memo tries (strict for now)
--
----------------------------------------------------------------------
module FunctorCombo.StrictMemo
(
HasTrie(..),(:->:),(!),memo,memo2,memo3,idTrie
, onUntrie, onUntrie2
, TrieTree(..)
) where
import Data.Functor ((<$>))
import Data.Foldable (Foldable(..),toList)
import Data.Traversable (Traversable(..))
import Control.Applicative (Applicative(..),liftA2)
-- import Control.Arrow (first)
-- import Data.Tree
import qualified Data.IntTrie as IT -- data-inttrie
import Data.Tree
-- import Control.Compose (result,(<~)) -- TypeCompose
import TypeUnary.Vec (Z,S,Vec(..),IsNat(..),Nat(..))
-- import FunctorCombo.Strict
import FunctorCombo.Functor
import FunctorCombo.Pair
import FunctorCombo.Regular
{--------------------------------------------------------------------
Class
--------------------------------------------------------------------}
infixr 0 :->:
-- | Memo trie from k to v
type k :->: v = Trie k v
#define FunctorSuperClass
#ifdef FunctorSuperClass
#define HasTrieContext(Ty) Functor (Trie(Ty))
#define HF(Ty) HasTrie (Ty)
#else
#define HasTrieContext(Ty) ()
#define HF(Ty) HasTrie (Ty), Functor (Trie (Ty))
#endif
-- | Domain types with associated memo tries
class HasTrieContext(k) => HasTrie k where
-- | Representation of trie with domain type @a@
type Trie k :: * -> *
-- | Create the trie for the entire domain of a function
trie :: (k -> v) -> (k :->: v)
-- | Convert k trie to k function, i.e., access k field of the trie
untrie :: (k :->: v) -> (k -> v)
-- -- | List the trie elements. Order of keys (@:: k@) is always the same.
-- enumerate :: (k :->: v) -> [(k,v)]
-- | Indexing. Synonym for 'untrie'.
(!) :: HasTrie k => (k :->: v) -> k -> v
(!) = untrie
-- -- | Domain elements of a trie
-- domain :: HasTrie a => [a]
-- domain = map fst (enumerate (trie (const oops)))
-- where
-- oops = error "Data.MemoTrie.domain: range element evaluated."
-- Identity trie. To do: make idTrie the method, and define trie via idTrie.
idTrie :: HasTrie k => k :->: k
idTrie = trie id
-- | List the trie elements. Order of keys (@:: k@) is always the same.
enumerate :: (Foldable (Trie k), HasTrie k) => (k :->: v) -> [(k,v)]
enumerate = zip (toList idTrie) . toList
-- TODO: Improve this implementation, using an interface from Edward
-- Kmett. Something about collections with keys, so that I can efficiently
-- implement `(k :->: v) -> (k :->: (k,v))`.
{--------------------------------------------------------------------
Memo functions
--------------------------------------------------------------------}
-- | Trie-based function memoizer
memo :: HasTrie k => Unop (k -> v)
memo = untrie . trie
-- | Memoize a binary function, on its first argument and then on its
-- second. Take care to exploit any partial evaluation.
memo2 :: (HasTrie s,HasTrie t) => Unop (s -> t -> a)
-- | Memoize a ternary function on successive arguments. Take care to
-- exploit any partial evaluation.
memo3 :: (HasTrie r,HasTrie s,HasTrie t) => Unop (r -> s -> t -> a)
-- | Lift a memoizer to work with one more argument.
mup :: HasTrie t => (b -> c) -> (t -> b) -> (t -> c)
mup mem f = memo (mem . f)
memo2 = mup memo
memo3 = mup memo2
{--------------------------------------------------------------------
Instances
--------------------------------------------------------------------}
instance HasTrie () where
type Trie () = Id
trie f = Id (f ())
untrie (Id v) = \ () -> v
-- enumerate (Id a) = [((),a)]
instance (HasTrie a, HasTrie b) => HasTrie (Either a b) where
type Trie (Either a b) = Trie a :*: Trie b
trie f = trie (f . Left) :*: trie (f . Right)
untrie (ta :*: tb) = untrie ta `either` untrie tb
-- enumerate (ta :*: tb) = enum' Left ta `weave` enum' Right tb
-- enum' :: (HasTrie a) => (a -> a') -> (a :->: b) -> [(a', b)]
-- enum' f = (fmap.first) f . enumerate
weave :: [a] -> [a] -> [a]
[] `weave` as = as
as `weave` [] = as
(a:as) `weave` bs = a : (bs `weave` as)
instance (HF(a), HasTrie b) => HasTrie (a , b) where
type Trie (a , b) = Trie a :. Trie b
trie f = O (trie (trie . curry f))
-- untrie (O tt) = uncurry (untrie . untrie tt)
untrie (O tt) = uncurry (untrie (fmap untrie tt))
-- With the first form of untrie, I only need HasTrie a, not also
-- Functor (Trie a) in the case of FunctorSuperClass
-- enumerate (O tt) =
-- [ ((a,b),x) | (a,t) <- enumerate tt , (b,x) <- enumerate t ]
#define HasTrieIsomorph(Context,Type,IsoType,toIso,fromIso) \
instance Context => HasTrie (Type) where {\
type Trie (Type) = Trie (IsoType); \
trie f = trie (f . (fromIso)); \
untrie t = untrie t . (toIso); \
}
-- enumerate = (result.fmap.first) (fromIso) enumerate;
-- HasTrieIsomorph( (), Bool, Either () ()
-- , bool (Right ()) (Left ())
-- , either (\ () -> False) (\ () -> True))
instance HasTrie Bool where
type Trie Bool = Pair
trie f = (f False :# f True)
untrie (f :# t) c = if c then t else f
HasTrieIsomorph( (HF(a),HF(b), HasTrie c)
, (a,b,c), ((a,b),c)
, \ (a,b,c) -> ((a,b),c), \ ((a,b),c) -> (a,b,c))
HasTrieIsomorph( (HF(a),HF(b),HF(c), HasTrie d)
, (a,b,c,d), ((a,b,c),d)
, \ (a,b,c,d) -> ((a,b,c),d), \ ((a,b,c),d) -> (a,b,c,d))
-- As well as the functor combinators themselves
HasTrieIsomorph( HasTrie x, Const x a, x, getConst, Const )
HasTrieIsomorph( HasTrie a, Id a, a, unId, Id )
HasTrieIsomorph( ( HF(f a), HasTrie (g a) )
, (f :*: g) a, (f a,g a)
, \ (fa :*: ga) -> (fa,ga), \ (fa,ga) -> (fa :*: ga) )
HasTrieIsomorph( (HasTrie (f a), HasTrie (g a))
, (f :+: g) a, Either (f a) (g a)
, eitherF Left Right, either InL InR )
HasTrieIsomorph( HasTrie (g (f a))
, (g :. f) a, g (f a) , unO, O )
-- newtype ListTrie a v = ListTrie (PF [a] [a] :->: v)
-- instance (HF(a)) => HasTrie [a] where
-- type Trie [a] = ListTrie a
-- trie f = ListTrie (trie (f . wrap))
-- untrie (ListTrie t) = untrie t . unwrap
-- enumerate (ListTrie t) = (result.fmap.first) wrap enumerate $ t
-- deriving instance Functor (Trie a) => Functor (ListTrie a)
-- HasTrieIsomorph( HasTrie (PF ([a]) ([a]) :->: v)
-- , ListTrie a v, PF ([a]) ([a]) :->: v
-- , \ (ListTrie w) -> w, ListTrie )
-- instance HasTrie (PF ([a]) ([a]) :->: v) => HasTrie (ListTrie a v) where
-- type Trie (ListTrie a v) = Trie (PF ([a]) ([a]) :->: v)
-- trie f = trie (f . ListTrie)
-- untrie t = untrie t . \ (ListTrie w) -> w
-- instance (HasTrie (PF ([a]) ([a]) :->: v)) => HasTrie (ListTrie a v) where
-- type Trie (ListTrie a v) = Trie (PF ([a]) ([a]) :->: v)
-- instance (Functor (Trie v), HasTrie (PF ([a]) ([a]) :->: v)) => HasTrie (ListTrie a v) where
-- type Trie (ListTrie a v) = Trie (PF ([a]) ([a]) :->: v)
-- Could not deduce (Functor
-- (Trie (Trie (Const a [a]) (ListTrie a v))))
-- from the context (Functor (Trie v), HasTrie (PF [a] [a] :->: v))
-- arising from the superclasses of an instance declaration
-- Functor (Trie (Trie (Const a [a]) (ListTrie a v)))
-- Functor (Trie (Const a [a] :->: ListTrie a v))
-- Const a [a] :->: ListTrie a v
-- a :->: ListTrie a v
-- instance (Functor (Trie a), Functor (Trie v), HasTrie (PF ([a]) ([a]) :->: v)) => HasTrie (ListTrie a v) where
-- type Trie (ListTrie a v) = Trie (PF ([a]) ([a]) :->: v)
-- Could not deduce (Functor (Trie (Trie a (ListTrie a v)))) ...
-- arising from the superclasses of an instance declaration
-- newtype ListTrie a v = ListTrie (PF [a] [a] :->: v)
-- instance HasTrie a => HasTrie [a] where
-- type Trie [a] = ListTrie a
-- trie f = ListTrie (trie (f . wrap))
-- untrie (ListTrie t) = untrie t . unwrap
-- enumerate (ListTrie t) = (result.fmap.first) wrap enumerate $ t
-- HasTrieIsomorph( HasTrie (PF ([a]) ([a]) :->: v)
-- , ListTrie a v, PF ([a]) ([a]) :->: v
-- , \ (ListTrie w) -> w, ListTrie )
-- deriving instance Functor (Trie a) => Functor (ListTrie a)
-- newtype ListTrie a v = ListTrie (PF ([a]) ([a]) :->: v); \
-- instance HasTrie a => HasTrie ([a]) where { \
-- type Trie ([a]) = ListTrie a; \
-- trie f = ListTrie (trie (f . wrap)); \
-- untrie (ListTrie t) = untrie t . unwrap; \
-- enumerate (ListTrie t) = (result.fmap.first) wrap enumerate t; \
-- }; \
-- HasTrieIsomorph( HasTrie (PF ([a]) ([a]) :->: v) \
-- , ListTrie a v, PF ([a]) ([a]) :->: v \
-- , \ (ListTrie w) -> w, ListTrie )
-- deriving instance Functor (Trie a) => Functor (ListTrie a)
-- Works. Now abstract into a macro
#define HasTrieRegular(Context,Type,TrieType,TrieCon) \
newtype TrieType v = TrieCon (PF (Type) (Type) :->: v); \
instance Context => HasTrie (Type) where { \
type Trie (Type) = TrieType; \
trie f = TrieCon (trie (f . wrap)); \
untrie (TrieCon t) = untrie t . unwrap; \
}; \
HasTrieIsomorph( HasTrie (PF (Type) (Type) :->: v) \
, TrieType v, PF (Type) (Type) :->: v \
, \ (TrieCon w) -> w, TrieCon )
-- enumerate (TrieCon t) = (result.fmap.first) wrap enumerate t;
-- For instance,
-- HasTrieRegular(HasTrie a, [a] , ListTrie a, ListTrie)
-- -- deriving instance Functor (Trie a) => Functor (ListTrie a)
-- HasTrieRegular(HasTrie a, Tree a, TreeTrie a, TreeTrie)
-- -- deriving instance Functor (Trie a) => Functor (TreeTrie a)
-- Simplify a bit with a macro for unary regular types.
-- Make similar defs for binary etc as needed.
#define HasTrieRegular1(TypeCon,TrieCon) \
HasTrieRegular((HF(a)), TypeCon a, TrieCon a, TrieCon); \
deriving instance Functor (Trie a) => Functor (TrieCon a)
HasTrieRegular1([] , ListTrie)
HasTrieRegular1(Tree, TreeTrie)
-- HasTrieIsomorph(Context,Type,IsoType,toIso,fromIso)
-- HasTrieIsomorph( HasTrie (PF [a] [a] :->: v)
-- , ListTrie a v, PF [a] [a] :->: v
-- , \ (ListTrie w) -> w, ListTrie )
-- enumerateEnum :: (Enum k, Num k, HasTrie k) => (k :->: v) -> [(k,v)]
-- enumerateEnum t = [(k, f k) | k <- [0 ..] `weave` [-1, -2 ..]]
-- where
-- f = untrie t
#define HasTrieIntegral(Type) \
instance HasTrie Type where { \
type Trie Type = IT.IntTrie; \
trie = (<$> IT.identity); \
untrie = IT.apply; \
}
-- enumerate = enumerateEnum;
HasTrieIntegral(Int)
HasTrieIntegral(Integer)
-- Memoizing higher-order functions
HasTrieIsomorph((HasTrie a, HasTrie (a :->: b)), a -> b, a :->: b, trie, untrie)
-- -- Convenience Pair functor
-- instance HasTrie a => HasTrie (Pair a) where
-- type Trie (Pair a) = Trie a :. Trie a
-- trie f = O (trie (\ a -> trie (\ b -> f (a :# b))))
-- untrie (O tt) (a :# b) = untrie (untrie tt a) b
HasTrieIsomorph((HF(a))
, Pair a, (a,a)
, \ (a :# a') -> (a,a'), \ (a,a') -> (a :# a'))
{--------------------------------------------------------------------
Misc
--------------------------------------------------------------------}
type Unop a = a -> a
bool :: a -> a -> Bool -> a
bool t e b = if b then t else e
{--------------------------------------------------------------------
Testing
--------------------------------------------------------------------}
fib :: Integer -> Integer
fib m = mfib m
where
mfib = memo fib'
fib' 0 = 0
fib' 1 = 1
fib' n = mfib (n-1) + mfib (n-2)
-- The eta-redex in fib is important to prevent a CAF.
ft1 :: (Bool -> a) -> [a]
ft1 f = [f False, f True]
f1 :: Bool -> Int
f1 False = 0
f1 True = 1
trie1a :: (HF(a)) => (Bool -> a) :->: [a]
trie1a = trie ft1
trie1b :: (HF(a)) => (Bool :->: a) :->: [a]
trie1b = trie1a
trie1c :: (HF(a)) => (Either () () :->: a) :->: [a]
trie1c = trie1a
trie1d :: (HF(a)) => ((Trie () :*: Trie ()) a) :->: [a]
trie1d = trie1a
trie1e :: (HF(a)) => (Trie () a, Trie () a) :->: [a]
trie1e = trie1a
trie1f :: (HF(a)) => (() :->: a, () :->: a) :->: [a]
trie1f = trie1a
trie1g :: (HF(a)) => (a, a) :->: [a]
trie1g = trie1a
trie1h :: (HF(a)) => (Trie a :. Trie a) [a]
trie1h = trie1a
trie1i :: (HF(a)) => a :->: a :->: [a]
trie1i = unO trie1a
ft2 :: ([Bool] -> Int) -> Int
ft2 f = f (alts 15)
alts :: Int -> [Bool]
alts n = take n (cycle [True,False])
f2 :: [Bool] -> Int
f2 = length . filter id
-- Memoization fails:
-- *FunctorCombo.MemoTrie> ft2 f2
-- 8
-- *FunctorCombo.MemoTrie> memo ft2 f2
-- ... (hang forever) ...
-- Would nonstrict memoization work? <http://conal.net/blog/posts/nonstrict-memoization/>
{--------------------------------------------------------------------
Regular instances.
--------------------------------------------------------------------}
-- Re-think where to put these instances. I want different versions for
-- list, depending on whether I'm taking care with bottoms.
instance Regular [a] where
type PF [a] = Unit :+: Const a :*: Id
unwrap [] = InL (Const ())
unwrap (a:as) = InR (Const a :*: Id as)
wrap (InL (Const ())) = []
wrap (InR (Const a :*: Id as)) = a:as
-- Rose tree (from Data.Tree)
--
-- data Tree a = Node a [Tree a]
-- instance Functor Tree where
-- fmap f (Node a ts) = Node (f a) (fmap f ts)
instance Regular (Tree a) where
type PF (Tree a) = Const a :*: []
unwrap (Node a ts) = Const a :*: ts
wrap (Const a :*: ts) = Node a ts
{--------------------------------------------------------------------
Acting on function
--------------------------------------------------------------------}
onUntrie :: (HasTrie a, HasTrie b) =>
((a -> a') -> (b -> b'))
-> ((a :->: a') -> (b :->: b'))
onUntrie = trie <~ untrie
onUntrie2 :: (HasTrie a, HasTrie b, HasTrie c) =>
((a -> a') -> (b -> b') -> (c -> c'))
-> ((a :->: a') -> (b :->: b') -> (c :->: c'))
onUntrie2 = onUntrie <~ untrie
{--------------------------------------------------------------------
Vector tries
--------------------------------------------------------------------}
data TrieTree :: * -> * -> * -> * where
L :: a -> TrieTree Z k a
B :: (k :->: TrieTree n k a) -> TrieTree (S n) k a
-- deriving instance Show a => Show (TrieTree n k a)
-- instance Show a => Show (T n a) where
-- showsPrec p (L a) = showsApp1 "L" p a
-- showsPrec p (B uv) = showsApp1 "B" p uv
instance Functor (Trie k) => Functor (TrieTree n k) where
fmap f (L a ) = L (f a)
fmap f (B ts) = B ((fmap.fmap) f ts)
instance (Applicative (Trie k), IsNat n) => Applicative (TrieTree n k) where
pure = pureV nat
(<*>) = apV nat
apV :: Applicative (Trie k) => Nat n -> TrieTree n k (a -> b) -> TrieTree n k a -> TrieTree n k b
apV Zero (L f ) (L x ) = L (f x)
apV (Succ n) (B fs) (B xs) = B (liftA2 (apV n) fs xs)
apV _ _ _ = error "apV: Impossible, but GHC doesn't know it"
-- joinV :: TrieTree n k (TrieTree n k a) -> TrieTree n k a
-- joinV = ...
-- TODO: Maybe redo these instances via the semantic instances.
-- Define instance templates in StrictMemo.
pureV :: Applicative (Trie k) => Nat n -> a -> TrieTree n k a
pureV Zero = L
pureV (Succ n) = B . pure . pureV n
instance Foldable (Trie k) => Foldable (TrieTree n k) where
foldMap f (L a) = f a
foldMap f (B ts) = (foldMap.foldMap) f ts
instance (Functor (Trie k), Foldable (Trie k), Traversable (Trie k)) =>
Traversable (TrieTree n k) where
traverse f (L a) = L <$> f a
traverse f (B ts) = B <$> (traverse.traverse) f ts
instance (HasTrie k, Functor (Trie k), IsNat n) => HasTrie (Vec n k) where
type Trie (Vec n k) = TrieTree n k
untrie = untrieV nat
trie = trieV nat
untrieV :: (HasTrie k) =>
Nat n -> (Vec n k :->: v) -> (Vec n k -> v)
untrieV Zero (L a ) ZVec = a
untrieV (Succ n) (B ts) (k :< ks) = untrieV n (untrie ts k) ks
untrieV _ _ _ = error "untrieV: Impossible, but GHC doesn't know it"
trieV :: HasTrie k =>
Nat n -> (Vec n k -> v) -> (Vec n k :->: v)
trieV Zero f = L (f ZVec)
trieV (Succ _) f = B (unO (trie (f . uncurry (:<))))
-- f :: Vec (S n) k -> v
-- f . uncurry (:<) :: k :* Vec n k -> v
-- trie (f . uncurry (:<)) :: k :* Vec n k :->: v
-- :: (Trie k :. Trie (Vec n k)) v
-- :: (Trie k :. TrieTree n k) v
-- unO (trie (f . uncurry (:<))) :: k :->: TrieTree n k v
-- B (unO (trie (f . uncurry (:<)))) :: TrieTree (S n) k v
-- :: Vec (S n) k :->: v
|
conal/functor-combo
|
src/FunctorCombo/StrictMemo.hs
|
bsd-3-clause
| 17,087 | 4 | 12 | 4,028 | 3,939 | 2,196 | 1,743 | -1 | -1 |
module Main where
import Syntax.BexpSpec
import Syntax.CommonSpec
import Syntax.ControlSpec
import Syntax.FuncSpec
import Syntax.IdentifierSpec
import Syntax.ParserSpec
import Syntax.VariableExprSpec
import State.TapeSpec
import State.ConfigSpec
import State.TreeSpec
import Semantics.BexpSpec
import Semantics.StmSpec
import Semantics.ProgramSpec
import Test.Hspec
main :: IO ()
main = hspec specs where
specs = do
bexpSpec
commonSpec
controlSpec
funcSpec
identifierSpec
parserSpec
variableExprSpec
tapeSpec
configSpec
treeSpec
bexpValSpec
stmSpec
programSpec
|
BakerSmithA/Metal
|
test/Tests.hs
|
mit
| 671 | 0 | 8 | 166 | 138 | 71 | 67 | 31 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.StorageGateway.ListGateways
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- This operation lists gateways owned by an AWS account in a region
-- specified in the request. The returned list is ordered by gateway Amazon
-- Resource Name (ARN).
--
-- By default, the operation returns a maximum of 100 gateways. This
-- operation supports pagination that allows you to optionally reduce the
-- number of gateways returned in a response.
--
-- If you have more gateways than are returned in a response-that is, the
-- response returns only a truncated list of your gateways-the response
-- contains a marker that you can specify in your next request to fetch the
-- next page of gateways.
--
-- /See:/ <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_ListGateways.html AWS API Reference> for ListGateways.
--
-- This operation returns paginated results.
module Network.AWS.StorageGateway.ListGateways
(
-- * Creating a Request
listGateways
, ListGateways
-- * Request Lenses
, lgMarker
, lgLimit
-- * Destructuring the Response
, listGatewaysResponse
, ListGatewaysResponse
-- * Response Lenses
, lgrsMarker
, lgrsGateways
, lgrsResponseStatus
) where
import Network.AWS.Pager
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.StorageGateway.Types
import Network.AWS.StorageGateway.Types.Product
-- | A JSON object containing zero or more of the following fields:
--
-- - ListGatewaysInput$Limit
-- - ListGatewaysInput$Marker
--
-- /See:/ 'listGateways' smart constructor.
data ListGateways = ListGateways'
{ _lgMarker :: !(Maybe Text)
, _lgLimit :: !(Maybe Nat)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListGateways' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lgMarker'
--
-- * 'lgLimit'
listGateways
:: ListGateways
listGateways =
ListGateways'
{ _lgMarker = Nothing
, _lgLimit = Nothing
}
-- | An opaque string that indicates the position at which to begin the
-- returned list of gateways.
lgMarker :: Lens' ListGateways (Maybe Text)
lgMarker = lens _lgMarker (\ s a -> s{_lgMarker = a});
-- | Specifies that the list of gateways returned be limited to the specified
-- number of items.
lgLimit :: Lens' ListGateways (Maybe Natural)
lgLimit = lens _lgLimit (\ s a -> s{_lgLimit = a}) . mapping _Nat;
instance AWSPager ListGateways where
page rq rs
| stop (rs ^. lgrsMarker) = Nothing
| stop (rs ^. lgrsGateways) = Nothing
| otherwise =
Just $ rq & lgMarker .~ rs ^. lgrsMarker
instance AWSRequest ListGateways where
type Rs ListGateways = ListGatewaysResponse
request = postJSON storageGateway
response
= receiveJSON
(\ s h x ->
ListGatewaysResponse' <$>
(x .?> "Marker") <*> (x .?> "Gateways" .!@ mempty)
<*> (pure (fromEnum s)))
instance ToHeaders ListGateways where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("StorageGateway_20130630.ListGateways" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON ListGateways where
toJSON ListGateways'{..}
= object
(catMaybes
[("Marker" .=) <$> _lgMarker,
("Limit" .=) <$> _lgLimit])
instance ToPath ListGateways where
toPath = const "/"
instance ToQuery ListGateways where
toQuery = const mempty
-- | /See:/ 'listGatewaysResponse' smart constructor.
data ListGatewaysResponse = ListGatewaysResponse'
{ _lgrsMarker :: !(Maybe Text)
, _lgrsGateways :: !(Maybe [GatewayInfo])
, _lgrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListGatewaysResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lgrsMarker'
--
-- * 'lgrsGateways'
--
-- * 'lgrsResponseStatus'
listGatewaysResponse
:: Int -- ^ 'lgrsResponseStatus'
-> ListGatewaysResponse
listGatewaysResponse pResponseStatus_ =
ListGatewaysResponse'
{ _lgrsMarker = Nothing
, _lgrsGateways = Nothing
, _lgrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
lgrsMarker :: Lens' ListGatewaysResponse (Maybe Text)
lgrsMarker = lens _lgrsMarker (\ s a -> s{_lgrsMarker = a});
-- | Undocumented member.
lgrsGateways :: Lens' ListGatewaysResponse [GatewayInfo]
lgrsGateways = lens _lgrsGateways (\ s a -> s{_lgrsGateways = a}) . _Default . _Coerce;
-- | The response status code.
lgrsResponseStatus :: Lens' ListGatewaysResponse Int
lgrsResponseStatus = lens _lgrsResponseStatus (\ s a -> s{_lgrsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-storagegateway/gen/Network/AWS/StorageGateway/ListGateways.hs
|
mpl-2.0
| 5,700 | 0 | 13 | 1,343 | 868 | 512 | 356 | 102 | 1 |
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric #-}
-- |
-- Module : Statistics.Distribution.Hypergeometric
-- Copyright : (c) 2009 Bryan O'Sullivan
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- The Hypergeometric distribution. This is the discrete probability
-- distribution that measures the probability of /k/ successes in /l/
-- trials, without replacement, from a finite population.
--
-- The parameters of the distribution describe /k/ elements chosen
-- from a population of /l/, with /m/ elements of one type, and
-- /l/-/m/ of the other (all are positive integers).
module Statistics.Distribution.Hypergeometric
(
HypergeometricDistribution
-- * Constructors
, hypergeometric
-- ** Accessors
, hdM
, hdL
, hdK
) where
import Data.Aeson (FromJSON, ToJSON)
import Data.Binary (Binary)
import Data.Data (Data, Typeable)
import GHC.Generics (Generic)
import Numeric.MathFunctions.Constants (m_epsilon)
import Numeric.SpecFunctions (choose)
import qualified Statistics.Distribution as D
import Data.Binary (put, get)
import Control.Applicative ((<$>), (<*>))
data HypergeometricDistribution = HD {
hdM :: {-# UNPACK #-} !Int
, hdL :: {-# UNPACK #-} !Int
, hdK :: {-# UNPACK #-} !Int
} deriving (Eq, Read, Show, Typeable, Data, Generic)
instance FromJSON HypergeometricDistribution
instance ToJSON HypergeometricDistribution
instance Binary HypergeometricDistribution where
get = HD <$> get <*> get <*> get
put (HD x y z) = put x >> put y >> put z
instance D.Distribution HypergeometricDistribution where
cumulative = cumulative
instance D.DiscreteDistr HypergeometricDistribution where
probability = probability
instance D.Mean HypergeometricDistribution where
mean = mean
instance D.Variance HypergeometricDistribution where
variance = variance
instance D.MaybeMean HypergeometricDistribution where
maybeMean = Just . D.mean
instance D.MaybeVariance HypergeometricDistribution where
maybeStdDev = Just . D.stdDev
maybeVariance = Just . D.variance
instance D.Entropy HypergeometricDistribution where
entropy = directEntropy
instance D.MaybeEntropy HypergeometricDistribution where
maybeEntropy = Just . D.entropy
variance :: HypergeometricDistribution -> Double
variance (HD m l k) = (k' * ml) * (1 - ml) * (l' - k') / (l' - 1)
where m' = fromIntegral m
l' = fromIntegral l
k' = fromIntegral k
ml = m' / l'
mean :: HypergeometricDistribution -> Double
mean (HD m l k) = fromIntegral k * fromIntegral m / fromIntegral l
directEntropy :: HypergeometricDistribution -> Double
directEntropy d@(HD m _ _) =
negate . sum $
takeWhile (< negate m_epsilon) $
dropWhile (not . (< negate m_epsilon)) $
[ let x = probability d n in x * log x | n <- [0..m]]
hypergeometric :: Int -- ^ /m/
-> Int -- ^ /l/
-> Int -- ^ /k/
-> HypergeometricDistribution
hypergeometric m l k
| not (l > 0) = error $ msg ++ "l must be positive"
| not (m >= 0 && m <= l) = error $ msg ++ "m must lie in [0,l] range"
| not (k > 0 && k <= l) = error $ msg ++ "k must lie in (0,l] range"
| otherwise = HD m l k
where
msg = "Statistics.Distribution.Hypergeometric.hypergeometric: "
-- Naive implementation
probability :: HypergeometricDistribution -> Int -> Double
probability (HD mi li ki) n
| n < max 0 (mi+ki-li) || n > min mi ki = 0
| otherwise =
choose mi n * choose (li - mi) (ki - n) / choose li ki
cumulative :: HypergeometricDistribution -> Double -> Double
cumulative d@(HD mi li ki) x
| isNaN x = error "Statistics.Distribution.Hypergeometric.cumulative: NaN argument"
| isInfinite x = if x > 0 then 1 else 0
| n < minN = 0
| n >= maxN = 1
| otherwise = D.sumProbabilities d minN n
where
n = floor x
minN = max 0 (mi+ki-li)
maxN = min mi ki
|
fpco/statistics
|
Statistics/Distribution/Hypergeometric.hs
|
bsd-2-clause
| 4,019 | 0 | 14 | 924 | 1,114 | 590 | 524 | 83 | 2 |
{-# LANGUAGE BangPatterns, GeneralizedNewtypeDeriving, StandaloneDeriving #-}
import Prelude hiding (mapM)
import Options.Applicative
import Data.Monoid ((<>))
import Control.Monad.Trans.Class
import Data.Vector (Vector)
import qualified Data.Vector.Generic as V
import Statistics.Sample (mean)
import Data.Traversable (mapM)
import qualified Data.Set as S
import Data.Set (Set)
import qualified Data.Map.Strict as M
import ReadData
import SerializeText
import qualified RunSampler as Sampler
import BayesStack.DirMulti
import BayesStack.Models.Topic.LDA
import BayesStack.UniqueKey
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import System.FilePath.Posix ((</>))
import Data.Binary
import qualified Data.ByteString as BS
import Text.Printf
import Data.Random
import System.Random.MWC
data RunOpts = RunOpts { nodesFile :: FilePath
, stopwords :: Maybe FilePath
, nTopics :: Int
, samplerOpts :: Sampler.SamplerOpts
, hyperParams :: HyperParams
}
runOpts :: Parser RunOpts
runOpts = RunOpts
<$> strOption ( long "nodes"
<> short 'n'
<> metavar "FILE"
<> help "File containing nodes and their associated items"
)
<*> nullOption ( long "stopwords"
<> short 's'
<> metavar "FILE"
<> reader (pure . Just)
<> value Nothing
<> help "Stop word list"
)
<*> option ( long "topics"
<> short 't'
<> metavar "N"
<> value 20
<> help "Number of topics"
)
<*> Sampler.samplerOpts
<*> hyperOpts
hyperOpts = HyperParams
<$> option ( long "prior-theta"
<> value 1
<> help "Dirichlet parameter for prior on theta"
)
<*> option ( long "prior-phi"
<> value 0.1
<> help "Dirichlet parameter for prior on phi"
)
mapMKeys :: (Ord k, Ord k', Monad m, Applicative m)
=> (a -> m a') -> (k -> m k') -> M.Map k a -> m (M.Map k' a')
mapMKeys f g x = M.fromList <$> (mapM (\(k,v)->(,) <$> g k <*> f v) $ M.assocs x)
termsToItems :: M.Map NodeName [Term]
-> (M.Map Node [Item], (M.Map Item Term, M.Map Node NodeName))
termsToItems nodes =
let ((d', nodeMap), itemMap) =
runUniqueKey' [Item i | i <- [0..]] $
runUniqueKeyT' [Node i | i <- [0..]] $ do
mapMKeys (mapM (lift . getUniqueKey)) getUniqueKey nodes
in (d', (itemMap, nodeMap))
netData :: HyperParams -> M.Map Node [Item] -> Int -> NetData
netData hp nodeItems nTopics =
NetData { dHypers = hp
, dItems = S.unions $ map S.fromList $ M.elems nodeItems
, dTopics = S.fromList [Topic i | i <- [1..nTopics]]
, dNodeItems = M.fromList
$ zip [NodeItem i | i <- [0..]]
$ do (n,items) <- M.assocs nodeItems
item <- items
return (n, item)
, dNodes = M.keysSet nodeItems
}
opts :: ParserInfo RunOpts
opts = info runOpts ( fullDesc
<> progDesc "Learn LDA model"
<> header "run-lda - learn LDA model"
)
instance Sampler.SamplerModel MState where
estimateHypers = reestimate
modelLikelihood = modelLikelihood
summarizeHypers ms =
" phi : "++show (dmAlpha $ snd $ M.findMin $ stPhis ms)++"\n"++
" theta: "++show (dmAlpha $ snd $ M.findMin $ stThetas ms)++"\n"
main :: IO ()
main = do
args <- execParser opts
stopWords <- case stopwords args of
Just f -> S.fromList . T.words <$> TIO.readFile f
Nothing -> return S.empty
printf "Read %d stopwords\n" (S.size stopWords)
(nodeItems, (itemMap, nodeMap)) <- termsToItems
<$> readNodeItems stopWords (nodesFile args)
Sampler.createSweeps $ samplerOpts args
let sweepsDir = Sampler.sweepsDir $ samplerOpts args
encodeFile (sweepsDir </> "item-map") itemMap
encodeFile (sweepsDir </> "node-map") nodeMap
let termCounts = V.fromListN (M.size nodeItems)
$ map length $ M.elems nodeItems :: Vector Int
printf "Read %d nodes\n" (M.size nodeItems)
printf "Mean items per node: %1.2f\n" (mean $ V.map realToFrac termCounts)
withSystemRandom $ \mwc->do
let nd = netData (hyperParams args) nodeItems (nTopics args)
encodeFile (sweepsDir </> "data") nd
mInit <- runRVar (randomInitialize nd) mwc
let m = model nd mInit
Sampler.runSampler (samplerOpts args) m (updateUnits nd)
return ()
|
beni55/bayes-stack
|
network-topic-models/RunLDA.hs
|
bsd-3-clause
| 5,209 | 0 | 16 | 1,943 | 1,466 | 755 | 711 | -1 | -1 |
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
module Main ( main, write ) where
import Prelude
import qualified Control.Effect as E
import Control.Effect.State
ifThenElse :: Bool -> a -> a -> a
ifThenElse True t e = t
ifThenElse False t e = e
main :: IO ()
main = do
putStrLn $ show $ runState
( write "abc" )
( Ext (Var :-> 0 :! Eff) (Ext (Var :-> [] :! Eff) Empty) )
varC :: Var "count"
varC = Var
varS :: Var "out"
varS = Var
incC :: State '["count" :-> Int :! 'RW] Int
incC = do { x <- get varC; put varC (x + 1); return (x + 1) }
where (>>=) :: (E.Inv State f g) => State f a -> (a -> State g b) -> State (E.Plus State f g) b
(>>=) = (E.>>=)
(>>) :: (E.Inv State f g) => State f a -> State g b -> State (E.Plus State f g) b
(>>) = (E.>>)
return :: a -> State '[] a
return = E.return
fail = E.fail
writeS :: [a] -> State '["out" :-> [a] :! 'RW] ()
writeS y = do { x <- get varS; put varS (x ++ y) }
where (>>=) :: (E.Inv State f g) => State f a -> (a -> State g b) -> State (E.Plus State f g) b
(>>=) = (E.>>=)
(>>) :: (E.Inv State f g) => State f a -> State g b -> State (E.Plus State f g) b
(>>) = (E.>>)
return :: a -> State '[] a
return = E.return
fail = E.fail
write :: [a] -> State '["count" :-> Int :! 'RW, "out" :-> [a] :! 'RW] ()
write x = do { writeS x; _ <- incC; return () }
where (>>=) :: (E.Inv State f g) => State f a -> (a -> State g b) -> State (E.Plus State f g) b
(>>=) = (E.>>=)
(>>) :: (E.Inv State f g) => State f a -> State g b -> State (E.Plus State f g) b
(>>) = (E.>>)
return :: a -> State '[] a
return = E.return
fail = E.fail
|
jbracker/supermonad-plugin
|
examples/monad/effect/Main.hs
|
bsd-3-clause
| 1,838 | 0 | 16 | 546 | 937 | 505 | 432 | 48 | 1 |
-----------------------------------------------------------------------------
-- |
-- License : BSD-3-Clause
-- Maintainer : Oleg Grenrus <[email protected]>
--
module GitHub.Data.Activities where
import GitHub.Data.Id (Id, mkId)
import GitHub.Data.Repos (Repo, RepoRef)
import GitHub.Data.URL (URL)
import GitHub.Internal.Prelude
import Prelude ()
import qualified Data.Text as T
data RepoStarred = RepoStarred
{ repoStarredStarredAt :: !UTCTime
, repoStarredRepo :: !Repo
}
deriving (Show, Data, Typeable, Eq, Ord, Generic)
instance NFData RepoStarred where rnf = genericRnf
instance Binary RepoStarred
-- JSON Instances
instance FromJSON RepoStarred where
parseJSON = withObject "RepoStarred" $ \o -> RepoStarred
<$> o .: "starred_at"
<*> o .: "repo"
data Subject = Subject
{ subjectTitle :: !Text
, subjectURL :: !URL
, subjectLatestCommentURL :: !(Maybe URL)
-- https://developer.github.com/v3/activity/notifications/ doesn't indicate
-- what the possible values for this field are.
-- TODO: Make an ADT for this.
, subjectType :: !Text
}
deriving (Show, Data, Typeable, Eq, Ord, Generic)
instance NFData Subject where rnf = genericRnf
instance Binary Subject
instance FromJSON Subject where
parseJSON = withObject "Subject" $ \o -> Subject
<$> o .: "title"
<*> o .: "url"
<*> o .:? "latest_comment_url"
<*> o .: "type"
data NotificationReason
= AssignReason
| AuthorReason
| CommentReason
| InvitationReason
| ManualReason
| MentionReason
| ReviewRequestedReason
| StateChangeReason
| SubscribedReason
| TeamMentionReason
deriving (Show, Data, Enum, Bounded, Typeable, Eq, Ord, Generic)
instance NFData NotificationReason where rnf = genericRnf
instance Binary NotificationReason
instance FromJSON NotificationReason where
parseJSON = withText "NotificationReason" $ \t -> case T.toLower t of
"assign" -> pure AssignReason
"author" -> pure AuthorReason
"comment" -> pure CommentReason
"invitation" -> pure InvitationReason
"manual" -> pure ManualReason
"mention" -> pure MentionReason
"review_requested" -> pure ReviewRequestedReason
"state_change" -> pure StateChangeReason
"subscribed" -> pure SubscribedReason
"team_mention" -> pure TeamMentionReason
_ -> fail $ "Unknown NotificationReason " ++ show t
data Notification = Notification
-- XXX: The notification id field type IS in fact string. Not sure why gh
-- chose to do this when all the other ids are Numbers...
{ notificationId :: !(Id Notification)
, notificationRepo :: !RepoRef
, notificationSubject :: !Subject
, notificationReason :: !NotificationReason
, notificationUnread :: !Bool
, notificationUpdatedAt :: !(Maybe UTCTime)
, notificationLastReadAt :: !(Maybe UTCTime)
, notificationUrl :: !URL
}
deriving (Show, Data, Typeable, Eq, Ord, Generic)
instance NFData Notification where rnf = genericRnf
instance Binary Notification
instance FromJSON Notification where
parseJSON = withObject "Notification" $ \o -> Notification
<$> (mkId undefined . read <$> o .: "id")
<*> o .: "repository"
<*> o .: "subject"
<*> o .: "reason"
<*> o .: "unread"
<*> o .: "updated_at"
<*> o .: "last_read_at"
<*> o .: "url"
|
jwiegley/github
|
src/GitHub/Data/Activities.hs
|
bsd-3-clause
| 3,569 | 0 | 27 | 905 | 798 | 427 | 371 | 108 | 0 |
module RunSpec ( runSpec ) where
import TestInit
import qualified Data.Text as T
import System.IO
runSpec :: Spec
runSpec = do
describe "run" $ do
it "simple command" $ do
res <- shelly $ run "echo" [ "wibble" ]
res @?= "wibble\n"
it "with escaping" $ do
res <- shelly $ run "echo" [ "*" ]
res @?= "*\n"
it "without escaping" $ do
res <- shelly $ escaping False $ run "echo" [ "*" ]
assert $ "README.md" `elem` T.words res
it "with binary handle mode" $ do
res <- shelly $ onCommandHandles (initOutputHandles (flip hSetBinaryMode True))
$ run "cat" [ "test/data/nonascii.txt" ]
res @?= "Selbstverst\228ndlich \252berraschend\n"
-- Bash-related commands
describe "bash" $ do
it "simple command" $ do
res <- shelly $ bash "echo" [ "wibble" ]
res @?= "wibble\n"
it "without escaping" $ do
res <- shelly $ escaping False $ bash "echo" [ "*" ]
assert $ "README.md" `elem` T.words res
it "with binary handle mode" $ do
res <- shelly $ onCommandHandles (initOutputHandles (flip hSetBinaryMode True))
$ bash "cat" [ "test/data/nonascii.txt" ]
res @?= "Selbstverst\228ndlich \252berraschend\n"
{- This throws spurious errors on some systems
it "can detect failing commands in pipes" $ do
eCode <- shelly $ escaping False $ errExit False $ do
bashPipeFail
bash_ "echo" ["'foo'", "|", "ls", "\"eoueouoe\"", "2>/dev/null", "|", "echo", "'bar'" ]
lastExitCode
eCode `shouldSatisfy` (/= 0)
-}
it "preserve pipe behaviour" $ do
(eCode, res) <- shelly $ escaping False $ errExit False $ do
res <- bash "echo" [ "'foo'", "|", "echo", "'bar'" ]
eCode <- lastExitCode
return (eCode, res)
res @?= "bar\n"
eCode @?= 0
|
adinapoli/Shelly.hs
|
test/src/RunSpec.hs
|
bsd-3-clause
| 1,864 | 0 | 20 | 534 | 485 | 230 | 255 | 38 | 1 |
module FilesystemParseTest
( tests
)
where
import Test.HUnit
import Data.Time.Clock ( UTCTime )
import System.FilePath ( (</>) )
import Common
import Database.Schema.Migrations.Migration
import Database.Schema.Migrations.Filesystem
( FilesystemStoreSettings(..)
, migrationFromFile
)
tests :: IO [Test]
tests = migrationParsingTests
-- filename, result
type MigrationParsingTestCase = (FilePath, Either String Migration)
tsStr :: String
tsStr = "2009-04-15 10:02:06 UTC"
ts :: UTCTime
ts = read tsStr
valid_full :: Migration
valid_full = Migration {
mTimestamp = Just ts
, mId = "valid_full"
, mDesc = Just "A valid full migration."
, mDeps = ["another_migration"]
, mApply = "CREATE TABLE test ( a int );"
, mRevert = Just "DROP TABLE test;"
}
valid_full_comments :: Migration
valid_full_comments = Migration {
mTimestamp = Just ts
, mId = "valid_full"
, mDesc = Just "A valid full migration."
, mDeps = ["another_migration"]
, mApply = "\n-- Comment on a line\nCREATE TABLE test (\n a int -- comment inline\n);\n"
, mRevert = Just "DROP TABLE test;"
}
valid_full_colon :: Migration
valid_full_colon = Migration {
mTimestamp = Just ts
, mId = "valid_full"
, mDesc = Just "A valid full migration."
, mDeps = ["another_migration"]
, mApply = "\n-- Comment on a line with a colon:\nCREATE TABLE test (\n a int\n);\n"
, mRevert = Just "DROP TABLE test;"
}
testStorePath :: FilePath
testStorePath = testFile $ "migration_parsing"
fp :: FilePath -> FilePath
fp = (testStorePath </>)
migrationParsingTestCases :: [MigrationParsingTestCase]
migrationParsingTestCases = [ ("valid_full", Right valid_full)
, ("valid_with_comments"
, Right (valid_full { mId = "valid_with_comments" }))
, ("valid_with_comments2"
, Right (valid_full_comments { mId = "valid_with_comments2" }))
, ("valid_with_colon"
, Right (valid_full_colon { mId = "valid_with_colon" }))
, ("valid_with_multiline_deps"
, Right (valid_full { mId = "valid_with_multiline_deps"
, mDeps = ["one", "two", "three"] } ))
, ("valid_no_depends"
, Right (valid_full { mId = "valid_no_depends", mDeps = [] }))
, ("valid_no_desc"
, Right (valid_full { mId = "valid_no_desc", mDesc = Nothing }))
, ("valid_no_revert"
, Right (valid_full { mId = "valid_no_revert", mRevert = Nothing }))
, ("valid_no_timestamp"
, Right (valid_full { mId = "valid_no_timestamp", mTimestamp = Nothing }))
, ("invalid_missing_required_fields"
, Left $ "Could not parse migration " ++
(fp "invalid_missing_required_fields.txt") ++
":Error in " ++
(show $ fp "invalid_missing_required_fields.txt") ++
": missing required field(s): " ++
"[\"Depends\"]")
, ("invalid_field_name"
, Left $ "Could not parse migration " ++
(fp "invalid_field_name.txt") ++
":Error in " ++
(show $ fp "invalid_field_name.txt") ++
": unrecognized field found")
, ("invalid_syntax"
, Left $ "Could not parse migration " ++
(fp "invalid_syntax.txt") ++
":user error (syntax error: line 7, " ++
"column 0)")
, ("invalid_timestamp"
, Left $ "Could not parse migration " ++
(fp "invalid_timestamp.txt") ++
":Error in " ++
(show $ fp "invalid_timestamp.txt") ++
": unrecognized field found")
]
mkParsingTest :: MigrationParsingTestCase -> IO Test
mkParsingTest (fname, expected) = do
let store = FSStore { storePath = testStorePath }
actual <- migrationFromFile store fname
return $ test $ expected ~=? actual
migrationParsingTests :: IO [Test]
migrationParsingTests =
sequence $ map mkParsingTest migrationParsingTestCases
|
nathankot/dbmigrations
|
test/FilesystemParseTest.hs
|
bsd-3-clause
| 5,297 | 0 | 13 | 2,367 | 850 | 497 | 353 | 96 | 1 |
import Graphics.UI.Gtk
import Graphics.Rendering.Cairo
main :: IO ()
main= do
initGUI
window <- windowNew
set window [windowTitle := "Hello Cairo with Resizing",
windowDefaultWidth := 300, windowDefaultHeight := 200,
containerBorderWidth := 30 ]
frame <- frameNew
containerAdd window frame
canvas <- drawingAreaNew
containerAdd frame canvas
widgetShowAll window
onExpose canvas (\x -> do (w,h) <- widgetGetSize canvas
drw <- widgetGetDrawWindow canvas
renderWithDrawable drw
(myDraw (fromIntegral w) (fromIntegral h))
return (eventSent x))
onDestroy window mainQuit
mainGUI
myDraw :: Double -> Double -> Render ()
myDraw w h = do
setSourceRGB 1 1 1
paint
setSourceRGB 1 1 0
setLineWidth 5
moveTo (0.5 * w) (0.43 * h)
lineTo (0.33 * w) (0.71 * h)
lineTo (0.66 * w) (0.71 * h)
closePath
stroke
|
k0001/gtk2hs
|
docs/tutorial/Tutorial_Port/Example_Code/GtkApp1b.hs
|
gpl-3.0
| 1,052 | 0 | 16 | 380 | 332 | 155 | 177 | 32 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- Create a source distribution tarball
module Stack.SDist
( getSDistTarball
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative ((<$>))
import Control.Concurrent.Execute (ActionContext(..))
import Control.Monad (when)
import Control.Monad.Catch (MonadCatch, MonadMask)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Resource
import qualified Data.ByteString.Lazy as L
import Data.Either (partitionEithers)
import Data.List
import qualified Data.Map.Strict as Map
import Data.Monoid ((<>))
import qualified Data.Set as Set
import qualified Data.Text as T
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Stack.Build (mkBaseConfigOpts)
import Stack.Build.Execute
import Stack.Build.Source (loadSourceMap, localFlags)
import Stack.Build.Types
import Stack.Constants
import Stack.Package
import Stack.Types
import Stack.Types.Internal
import qualified System.FilePath as FP
import System.IO.Temp (withSystemTempDirectory)
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,HasBuildConfig env,MonadLogger m,MonadBaseControl IO m,MonadCatch m,MonadMask m,HasLogLevel env,HasEnvConfig env,HasTerminal env)
-- | Given the path to a local package, creates its source
-- distribution tarball.
--
-- While this yields a 'FilePath', the name of the tarball, this
-- tarball is not written to the disk and instead yielded as a lazy
-- bytestring.
getSDistTarball :: M env m => Path Abs Dir -> m (FilePath, L.ByteString)
getSDistTarball pkgDir = do
let pkgFp = toFilePath pkgDir
lp <- readLocalPackage pkgDir
$logInfo $ "Getting file list for " <> T.pack pkgFp
fileList <- getSDistFileList lp
$logInfo $ "Building sdist tarball for " <> T.pack pkgFp
files <- normalizeTarballPaths (lines fileList)
liftIO $ do
-- NOTE: Could make this use lazy I/O to only read files as needed
-- for upload (both GZip.compress and Tar.write are lazy).
-- However, it seems less error prone and more predictable to read
-- everything in at once, so that's what we're doing for now:
let packWith f isDir fp =
f (pkgFp FP.</> fp)
(either error id (Tar.toTarPath isDir (pkgId FP.</> fp)))
tarName = pkgId FP.<.> "tar.gz"
pkgId = packageIdentifierString (packageIdentifier (lpPackage lp))
dirEntries <- mapM (packWith Tar.packDirectoryEntry True) (dirsFromFiles files)
fileEntries <- mapM (packWith Tar.packFileEntry False) files
return (tarName, GZip.compress (Tar.write (dirEntries ++ fileEntries)))
-- Read in a 'LocalPackage' config. This makes some default decisions
-- about 'LocalPackage' fields that might not be appropriate for other
-- usecases.
--
-- TODO: Dedupe with similar code in "Stack.Build.Source".
readLocalPackage :: M env m => Path Abs Dir -> m LocalPackage
readLocalPackage pkgDir = do
econfig <- asks getEnvConfig
bconfig <- asks getBuildConfig
cabalfp <- getCabalFileName pkgDir
name <- parsePackageNameFromFilePath cabalfp
let config = PackageConfig
{ packageConfigEnableTests = False
, packageConfigEnableBenchmarks = False
, packageConfigFlags = localFlags Map.empty bconfig name
, packageConfigGhcVersion = envConfigGhcVersion econfig
, packageConfigPlatform = configPlatform $ getConfig bconfig
}
package <- readPackage config cabalfp
return LocalPackage
{ lpPackage = package
, lpWanted = False -- HACK: makes it so that sdist output goes to a log instead of a file.
, lpDir = pkgDir
, lpCabalFile = cabalfp
-- NOTE: these aren't the 'correct values, but aren't used in
-- the usage of this function in this module.
, lpPackageFinal = package
, lpDirtyFiles = True
, lpNewBuildCache = Map.empty
, lpFiles = Set.empty
, lpComponents = Set.empty
}
getSDistFileList :: M env m => LocalPackage -> m String
getSDistFileList lp =
withSystemTempDirectory (stackProgName <> "-sdist") $ \tmpdir -> do
menv <- getMinimalEnvOverride
let bopts = defaultBuildOpts
baseConfigOpts <- mkBaseConfigOpts bopts
(_mbp, locals, _extraToBuild, sourceMap) <- loadSourceMap bopts
withExecuteEnv menv bopts baseConfigOpts locals sourceMap $ \ee -> do
withSingleContext ac ee task (Just "sdist") $ \_package _cabalfp _pkgDir cabal _announce _console _mlogFile -> do
let outFile = tmpdir FP.</> "source-files-list"
cabal False ["sdist", "--list-sources", outFile]
liftIO (readFile outFile)
where
package = lpPackage lp
ac = ActionContext Set.empty
task = Task
{ taskProvides = PackageIdentifier (packageName package) (packageVersion package)
, taskType = TTLocal lp
, taskConfigOpts = TaskConfigOpts
{ tcoMissing = Set.empty
, tcoOpts = \_ -> []
}
, taskPresent = Set.empty
}
normalizeTarballPaths :: M env m => [FilePath] -> m [FilePath]
normalizeTarballPaths fps = do
--TODO: consider whether erroring out is better - otherwise the
--user might upload an incomplete tar?
when (not (null outsideDir)) $
$logWarn $ T.concat
[ "Warning: These files are outside of the package directory, and will be omitted from the tarball: "
, T.pack (show outsideDir)]
return files
where
(outsideDir, files) = partitionEithers (map pathToEither fps)
pathToEither fp = maybe (Left fp) Right (normalizePath fp)
normalizePath :: FilePath -> (Maybe FilePath)
normalizePath = fmap FP.joinPath . go . FP.splitDirectories . FP.normalise
where
go [] = Just []
go ("..":_) = Nothing
go (_:"..":xs) = go xs
go (x:xs) = (x :) <$> go xs
dirsFromFiles :: [FilePath] -> [FilePath]
dirsFromFiles dirs = Set.toAscList (Set.delete "." results)
where
results = foldl' (\s -> go s . FP.takeDirectory) Set.empty dirs
go s x
| Set.member x s = s
| otherwise = go (Set.insert x s) (FP.takeDirectory x)
|
wskplho/stack
|
src/Stack/SDist.hs
|
bsd-3-clause
| 6,706 | 0 | 20 | 1,701 | 1,583 | 852 | 731 | 119 | 4 |
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Safe #-}
#endif
-- | Produces XHTML 1.0 Strict.
module Text.XHtml.Strict (
-- * Data types
Html, HtmlAttr,
-- * Classes
HTML(..), ADDATTRS(..), CHANGEATTRS(..),
-- * Primitives and basic combinators
(<<), concatHtml, (+++),
noHtml, isNoHtml, tag, itag,
htmlAttrPair, emptyAttr, intAttr, strAttr, htmlAttr,
primHtml, stringToHtmlString,
docType,
-- * Rendering
showHtml, renderHtml, renderHtmlWithLanguage, prettyHtml,
showHtmlFragment, renderHtmlFragment, prettyHtmlFragment,
module Text.XHtml.Strict.Elements,
module Text.XHtml.Strict.Attributes,
module Text.XHtml.Extras
) where
import Text.XHtml.Internals
import Text.XHtml.Strict.Elements
import Text.XHtml.Strict.Attributes
import Text.XHtml.Extras
-- | The @DOCTYPE@ for XHTML 1.0 Strict.
docType :: String
docType = "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\""
++ " \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">"
-- | Output the HTML without adding newlines or spaces within the markup.
-- This should be the most time and space efficient way to
-- render HTML, though the ouput is quite unreadable.
showHtml :: HTML html => html -> String
showHtml = showHtmlInternal docType
-- | Outputs indented HTML. Because space matters in
-- HTML, the output is quite messy.
renderHtml :: HTML html => html -> String
renderHtml = renderHtmlInternal docType
-- | Outputs indented XHTML. Because space matters in
-- HTML, the output is quite messy.
renderHtmlWithLanguage :: HTML html
=> String -- ^ The code of the "dominant" language of the webpage.
-> html -- ^ All the 'Html', including a header.
-> String
renderHtmlWithLanguage l theHtml =
docType ++ "\n" ++ renderHtmlFragment code ++ "\n"
where
code = tag "html" ! [ strAttr "xmlns" "http://www.w3.org/1999/xhtml"
, strAttr "lang" l
, strAttr "xml:lang" l
] << theHtml
-- | Outputs indented HTML, with indentation inside elements.
-- This can change the meaning of the HTML document, and
-- is mostly useful for debugging the HTML output.
-- The implementation is inefficient, and you are normally
-- better off using 'showHtml' or 'renderHtml'.
prettyHtml :: HTML html => html -> String
prettyHtml = prettyHtmlInternal docType
|
DavidAlphaFox/ghc
|
libraries/xhtml/Text/XHtml/Strict.hs
|
bsd-3-clause
| 2,478 | 0 | 10 | 587 | 351 | 218 | 133 | 36 | 1 |
{-# LANGUAGE QuasiQuotes, TypeFamilies, GeneralizedNewtypeDeriving, TemplateHaskell #-}
module Model where
import Yesod
import Data.Text (Text)
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist, mkMigrate "migrateAll"] $(persistFile "config/models")
|
tehgeekmeister/apters-web
|
Model.hs
|
agpl-3.0
| 415 | 0 | 8 | 53 | 46 | 27 | 19 | 5 | 0 |
-- !!! Wentworth's version of a program to generate
-- !!! all the expansions of a generalised regular expression
-- !!!
--
-- RJE: Modified so it only outputs the number of characters in the output,
-- rather that the output itself, thus avoiding having to generate such a
-- huge output file to get a reasonable execution time.
module Main (main) where
import Data.Char
main = interact (("Enter a generator: " ++).show.numchars.expand.head.lines)
numchars :: [String] -> Int
numchars l = sum $ map length l
expand [] = [""]
expand ('<':x) = numericRule x
expand ('[':x) = alphabeticRule x
expand x = constantRule x
constantRule (c:rest) = [ c:z | z <- expand rest ]
alphabeticRule (a:'-':b:']':rest)
| a <= b = [c:z | c <- [a..b], z <- expand rest]
| otherwise = [c:z | c <- reverse [b..a], z <- expand rest]
numericRule x
= [ pad (show i) ++ z
| i <- if u < v then [u..v] else [u,u-1..v]
, z <- expand s ]
where
(p,_:q) = span (/= '-') x
(r,_:s) = span (/= '>') q
(u,v) = (mknum p, mknum r)
mknum s = foldl (\ u c -> u * 10 + (ord c - ord '0')) 0 s
pad s = [ '0' | i <- [1 .. (width-(length s))]] ++ s
width = max (length (show u)) (length (show v))
|
mightymoose/liquidhaskell
|
benchmarks/nofib/imaginary/gen_regexps/Main.hs
|
bsd-3-clause
| 1,211 | 14 | 15 | 290 | 563 | 297 | 266 | 23 | 2 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, MagicHash
, UnboxedTuples
, ScopedTypeVariables
, RankNTypes
#-}
{-# OPTIONS_GHC -Wno-deprecations #-}
-- kludge for the Control.Concurrent.QSem, Control.Concurrent.QSemN
-- and Control.Concurrent.SampleVar imports.
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (concurrency)
--
-- A common interface to a collection of useful concurrency
-- abstractions.
--
-----------------------------------------------------------------------------
module Control.Concurrent (
-- * Concurrent Haskell
-- $conc_intro
-- * Basic concurrency operations
ThreadId,
myThreadId,
forkIO,
forkFinally,
forkIOWithUnmask,
killThread,
throwTo,
-- ** Threads with affinity
forkOn,
forkOnWithUnmask,
getNumCapabilities,
setNumCapabilities,
threadCapability,
-- * Scheduling
-- $conc_scheduling
yield,
-- ** Blocking
-- $blocking
-- ** Waiting
threadDelay,
threadWaitRead,
threadWaitWrite,
threadWaitReadSTM,
threadWaitWriteSTM,
-- * Communication abstractions
module Control.Concurrent.MVar,
module Control.Concurrent.Chan,
module Control.Concurrent.QSem,
module Control.Concurrent.QSemN,
-- * Bound Threads
-- $boundthreads
rtsSupportsBoundThreads,
forkOS,
forkOSWithUnmask,
isCurrentThreadBound,
runInBoundThread,
runInUnboundThread,
-- * Weak references to ThreadIds
mkWeakThreadId,
-- * GHC's implementation of concurrency
-- |This section describes features specific to GHC's
-- implementation of Concurrent Haskell.
-- ** Haskell threads and Operating System threads
-- $osthreads
-- ** Terminating the program
-- $termination
-- ** Pre-emption
-- $preemption
-- ** Deadlock
-- $deadlock
) where
import Control.Exception.Base as Exception
import GHC.Conc hiding (threadWaitRead, threadWaitWrite,
threadWaitReadSTM, threadWaitWriteSTM)
import GHC.IO ( unsafeUnmask, catchException )
import GHC.IORef ( newIORef, readIORef, writeIORef )
import GHC.Base
import System.Posix.Types ( Fd )
import Foreign.StablePtr
import Foreign.C.Types
#ifdef mingw32_HOST_OS
import Foreign.C
import System.IO
import Data.Functor ( void )
#else
import qualified GHC.Conc
#endif
import Control.Concurrent.MVar
import Control.Concurrent.Chan
import Control.Concurrent.QSem
import Control.Concurrent.QSemN
{- $conc_intro
The concurrency extension for Haskell is described in the paper
/Concurrent Haskell/
<http://www.haskell.org/ghc/docs/papers/concurrent-haskell.ps.gz>.
Concurrency is \"lightweight\", which means that both thread creation
and context switching overheads are extremely low. Scheduling of
Haskell threads is done internally in the Haskell runtime system, and
doesn't make use of any operating system-supplied thread packages.
However, if you want to interact with a foreign library that expects your
program to use the operating system-supplied thread package, you can do so
by using 'forkOS' instead of 'forkIO'.
Haskell threads can communicate via 'MVar's, a kind of synchronised
mutable variable (see "Control.Concurrent.MVar"). Several common
concurrency abstractions can be built from 'MVar's, and these are
provided by the "Control.Concurrent" library.
In GHC, threads may also communicate via exceptions.
-}
{- $conc_scheduling
Scheduling may be either pre-emptive or co-operative,
depending on the implementation of Concurrent Haskell (see below
for information related to specific compilers). In a co-operative
system, context switches only occur when you use one of the
primitives defined in this module. This means that programs such
as:
> main = forkIO (write 'a') >> write 'b'
> where write c = putChar c >> write c
will print either @aaaaaaaaaaaaaa...@ or @bbbbbbbbbbbb...@,
instead of some random interleaving of @a@s and @b@s. In
practice, cooperative multitasking is sufficient for writing
simple graphical user interfaces.
-}
{- $blocking
Different Haskell implementations have different characteristics with
regard to which operations block /all/ threads.
Using GHC without the @-threaded@ option, all foreign calls will block
all other Haskell threads in the system, although I\/O operations will
not. With the @-threaded@ option, only foreign calls with the @unsafe@
attribute will block all other threads.
-}
-- | Fork a thread and call the supplied function when the thread is about
-- to terminate, with an exception or a returned value. The function is
-- called with asynchronous exceptions masked.
--
-- > forkFinally action and_then =
-- > mask $ \restore ->
-- > forkIO $ try (restore action) >>= and_then
--
-- This function is useful for informing the parent when a child
-- terminates, for example.
--
-- @since 4.6.0.0
forkFinally :: IO a -> (Either SomeException a -> IO ()) -> IO ThreadId
forkFinally action and_then =
mask $ \restore ->
forkIO $ try (restore action) >>= and_then
-- ---------------------------------------------------------------------------
-- Bound Threads
{- $boundthreads
#boundthreads#
Support for multiple operating system threads and bound threads as described
below is currently only available in the GHC runtime system if you use the
/-threaded/ option when linking.
Other Haskell systems do not currently support multiple operating system threads.
A bound thread is a haskell thread that is /bound/ to an operating system
thread. While the bound thread is still scheduled by the Haskell run-time
system, the operating system thread takes care of all the foreign calls made
by the bound thread.
To a foreign library, the bound thread will look exactly like an ordinary
operating system thread created using OS functions like @pthread_create@
or @CreateThread@.
Bound threads can be created using the 'forkOS' function below. All foreign
exported functions are run in a bound thread (bound to the OS thread that
called the function). Also, the @main@ action of every Haskell program is
run in a bound thread.
Why do we need this? Because if a foreign library is called from a thread
created using 'forkIO', it won't have access to any /thread-local state/ -
state variables that have specific values for each OS thread
(see POSIX's @pthread_key_create@ or Win32's @TlsAlloc@). Therefore, some
libraries (OpenGL, for example) will not work from a thread created using
'forkIO'. They work fine in threads created using 'forkOS' or when called
from @main@ or from a @foreign export@.
In terms of performance, 'forkOS' (aka bound) threads are much more
expensive than 'forkIO' (aka unbound) threads, because a 'forkOS'
thread is tied to a particular OS thread, whereas a 'forkIO' thread
can be run by any OS thread. Context-switching between a 'forkOS'
thread and a 'forkIO' thread is many times more expensive than between
two 'forkIO' threads.
Note in particular that the main program thread (the thread running
@Main.main@) is always a bound thread, so for good concurrency
performance you should ensure that the main thread is not doing
repeated communication with other threads in the system. Typically
this means forking subthreads to do the work using 'forkIO', and
waiting for the results in the main thread.
-}
-- | 'True' if bound threads are supported.
-- If @rtsSupportsBoundThreads@ is 'False', 'isCurrentThreadBound'
-- will always return 'False' and both 'forkOS' and 'runInBoundThread' will
-- fail.
foreign import ccall unsafe rtsSupportsBoundThreads :: Bool
{- |
Like 'forkIO', this sparks off a new thread to run the 'IO'
computation passed as the first argument, and returns the 'ThreadId'
of the newly created thread.
However, 'forkOS' creates a /bound/ thread, which is necessary if you
need to call foreign (non-Haskell) libraries that make use of
thread-local state, such as OpenGL (see "Control.Concurrent#boundthreads").
Using 'forkOS' instead of 'forkIO' makes no difference at all to the
scheduling behaviour of the Haskell runtime system. It is a common
misconception that you need to use 'forkOS' instead of 'forkIO' to
avoid blocking all the Haskell threads when making a foreign call;
this isn't the case. To allow foreign calls to be made without
blocking all the Haskell threads (with GHC), it is only necessary to
use the @-threaded@ option when linking your program, and to make sure
the foreign import is not marked @unsafe@.
-}
forkOS :: IO () -> IO ThreadId
foreign export ccall forkOS_entry
:: StablePtr (IO ()) -> IO ()
foreign import ccall "forkOS_entry" forkOS_entry_reimported
:: StablePtr (IO ()) -> IO ()
forkOS_entry :: StablePtr (IO ()) -> IO ()
forkOS_entry stableAction = do
action <- deRefStablePtr stableAction
action
foreign import ccall forkOS_createThread
:: StablePtr (IO ()) -> IO CInt
failNonThreaded :: IO a
failNonThreaded = fail $ "RTS doesn't support multiple OS threads "
++"(use ghc -threaded when linking)"
forkOS action0
| rtsSupportsBoundThreads = do
mv <- newEmptyMVar
b <- Exception.getMaskingState
let
-- async exceptions are masked in the child if they are masked
-- in the parent, as for forkIO (see #1048). forkOS_createThread
-- creates a thread with exceptions masked by default.
action1 = case b of
Unmasked -> unsafeUnmask action0
MaskedInterruptible -> action0
MaskedUninterruptible -> uninterruptibleMask_ action0
action_plus = catchException action1 childHandler
entry <- newStablePtr (myThreadId >>= putMVar mv >> action_plus)
err <- forkOS_createThread entry
when (err /= 0) $ fail "Cannot create OS thread."
tid <- takeMVar mv
freeStablePtr entry
return tid
| otherwise = failNonThreaded
-- | Like 'forkIOWithUnmask', but the child thread is a bound thread,
-- as with 'forkOS'.
forkOSWithUnmask :: ((forall a . IO a -> IO a) -> IO ()) -> IO ThreadId
forkOSWithUnmask io = forkOS (io unsafeUnmask)
-- | Returns 'True' if the calling thread is /bound/, that is, if it is
-- safe to use foreign libraries that rely on thread-local state from the
-- calling thread.
isCurrentThreadBound :: IO Bool
isCurrentThreadBound = IO $ \ s# ->
case isCurrentThreadBound# s# of
(# s2#, flg #) -> (# s2#, isTrue# (flg /=# 0#) #)
{- |
Run the 'IO' computation passed as the first argument. If the calling thread
is not /bound/, a bound thread is created temporarily. @runInBoundThread@
doesn't finish until the 'IO' computation finishes.
You can wrap a series of foreign function calls that rely on thread-local state
with @runInBoundThread@ so that you can use them without knowing whether the
current thread is /bound/.
-}
runInBoundThread :: IO a -> IO a
runInBoundThread action
| rtsSupportsBoundThreads = do
bound <- isCurrentThreadBound
if bound
then action
else do
ref <- newIORef undefined
let action_plus = Exception.try action >>= writeIORef ref
bracket (newStablePtr action_plus)
freeStablePtr
(\cEntry -> forkOS_entry_reimported cEntry >> readIORef ref) >>=
unsafeResult
| otherwise = failNonThreaded
{- |
Run the 'IO' computation passed as the first argument. If the calling thread
is /bound/, an unbound thread is created temporarily using 'forkIO'.
@runInBoundThread@ doesn't finish until the 'IO' computation finishes.
Use this function /only/ in the rare case that you have actually observed a
performance loss due to the use of bound threads. A program that
doesn't need its main thread to be bound and makes /heavy/ use of concurrency
(e.g. a web server), might want to wrap its @main@ action in
@runInUnboundThread@.
Note that exceptions which are thrown to the current thread are thrown in turn
to the thread that is executing the given computation. This ensures there's
always a way of killing the forked thread.
-}
runInUnboundThread :: IO a -> IO a
runInUnboundThread action = do
bound <- isCurrentThreadBound
if bound
then do
mv <- newEmptyMVar
mask $ \restore -> do
tid <- forkIO $ Exception.try (restore action) >>= putMVar mv
let wait = takeMVar mv `catchException` \(e :: SomeException) ->
Exception.throwTo tid e >> wait
wait >>= unsafeResult
else action
unsafeResult :: Either SomeException a -> IO a
unsafeResult = either Exception.throwIO return
-- ---------------------------------------------------------------------------
-- threadWaitRead/threadWaitWrite
-- | Block the current thread until data is available to read on the
-- given file descriptor (GHC only).
--
-- This will throw an 'IOError' if the file descriptor was closed
-- while this thread was blocked. To safely close a file descriptor
-- that has been used with 'threadWaitRead', use
-- 'GHC.Conc.closeFdWith'.
threadWaitRead :: Fd -> IO ()
threadWaitRead fd
#ifdef mingw32_HOST_OS
-- we have no IO manager implementing threadWaitRead on Windows.
-- fdReady does the right thing, but we have to call it in a
-- separate thread, otherwise threadWaitRead won't be interruptible,
-- and this only works with -threaded.
| threaded = withThread (waitFd fd 0)
| otherwise = case fd of
0 -> do _ <- hWaitForInput stdin (-1)
return ()
-- hWaitForInput does work properly, but we can only
-- do this for stdin since we know its FD.
_ -> errorWithoutStackTrace "threadWaitRead requires -threaded on Windows, or use System.IO.hWaitForInput"
#else
= GHC.Conc.threadWaitRead fd
#endif
-- | Block the current thread until data can be written to the
-- given file descriptor (GHC only).
--
-- This will throw an 'IOError' if the file descriptor was closed
-- while this thread was blocked. To safely close a file descriptor
-- that has been used with 'threadWaitWrite', use
-- 'GHC.Conc.closeFdWith'.
threadWaitWrite :: Fd -> IO ()
threadWaitWrite fd
#ifdef mingw32_HOST_OS
| threaded = withThread (waitFd fd 1)
| otherwise = errorWithoutStackTrace "threadWaitWrite requires -threaded on Windows"
#else
= GHC.Conc.threadWaitWrite fd
#endif
-- | Returns an STM action that can be used to wait for data
-- to read from a file descriptor. The second returned value
-- is an IO action that can be used to deregister interest
-- in the file descriptor.
--
-- @since 4.7.0.0
threadWaitReadSTM :: Fd -> IO (STM (), IO ())
threadWaitReadSTM fd
#ifdef mingw32_HOST_OS
| threaded = do v <- newTVarIO Nothing
mask_ $ void $ forkIO $ do result <- try (waitFd fd 0)
atomically (writeTVar v $ Just result)
let waitAction = do result <- readTVar v
case result of
Nothing -> retry
Just (Right ()) -> return ()
Just (Left e) -> throwSTM (e :: IOException)
let killAction = return ()
return (waitAction, killAction)
| otherwise = errorWithoutStackTrace "threadWaitReadSTM requires -threaded on Windows"
#else
= GHC.Conc.threadWaitReadSTM fd
#endif
-- | Returns an STM action that can be used to wait until data
-- can be written to a file descriptor. The second returned value
-- is an IO action that can be used to deregister interest
-- in the file descriptor.
--
-- @since 4.7.0.0
threadWaitWriteSTM :: Fd -> IO (STM (), IO ())
threadWaitWriteSTM fd
#ifdef mingw32_HOST_OS
| threaded = do v <- newTVarIO Nothing
mask_ $ void $ forkIO $ do result <- try (waitFd fd 1)
atomically (writeTVar v $ Just result)
let waitAction = do result <- readTVar v
case result of
Nothing -> retry
Just (Right ()) -> return ()
Just (Left e) -> throwSTM (e :: IOException)
let killAction = return ()
return (waitAction, killAction)
| otherwise = errorWithoutStackTrace "threadWaitWriteSTM requires -threaded on Windows"
#else
= GHC.Conc.threadWaitWriteSTM fd
#endif
#ifdef mingw32_HOST_OS
foreign import ccall unsafe "rtsSupportsBoundThreads" threaded :: Bool
withThread :: IO a -> IO a
withThread io = do
m <- newEmptyMVar
_ <- mask_ $ forkIO $ try io >>= putMVar m
x <- takeMVar m
case x of
Right a -> return a
Left e -> throwIO (e :: IOException)
waitFd :: Fd -> CInt -> IO ()
waitFd fd write = do
throwErrnoIfMinus1_ "fdReady" $
fdReady (fromIntegral fd) write iNFINITE 0
iNFINITE :: CInt
iNFINITE = 0xFFFFFFFF -- urgh
foreign import ccall safe "fdReady"
fdReady :: CInt -> CInt -> CInt -> CInt -> IO CInt
#endif
-- ---------------------------------------------------------------------------
-- More docs
{- $osthreads
#osthreads# In GHC, threads created by 'forkIO' are lightweight threads, and
are managed entirely by the GHC runtime. Typically Haskell
threads are an order of magnitude or two more efficient (in
terms of both time and space) than operating system threads.
The downside of having lightweight threads is that only one can
run at a time, so if one thread blocks in a foreign call, for
example, the other threads cannot continue. The GHC runtime
works around this by making use of full OS threads where
necessary. When the program is built with the @-threaded@
option (to link against the multithreaded version of the
runtime), a thread making a @safe@ foreign call will not block
the other threads in the system; another OS thread will take
over running Haskell threads until the original call returns.
The runtime maintains a pool of these /worker/ threads so that
multiple Haskell threads can be involved in external calls
simultaneously.
The "System.IO" library manages multiplexing in its own way. On
Windows systems it uses @safe@ foreign calls to ensure that
threads doing I\/O operations don't block the whole runtime,
whereas on Unix systems all the currently blocked I\/O requests
are managed by a single thread (the /IO manager thread/) using
a mechanism such as @epoll@ or @kqueue@, depending on what is
provided by the host operating system.
The runtime will run a Haskell thread using any of the available
worker OS threads. If you need control over which particular OS
thread is used to run a given Haskell thread, perhaps because
you need to call a foreign library that uses OS-thread-local
state, then you need bound threads (see "Control.Concurrent#boundthreads").
If you don't use the @-threaded@ option, then the runtime does
not make use of multiple OS threads. Foreign calls will block
all other running Haskell threads until the call returns. The
"System.IO" library still does multiplexing, so there can be multiple
threads doing I\/O, and this is handled internally by the runtime using
@select@.
-}
{- $termination
In a standalone GHC program, only the main thread is
required to terminate in order for the process to terminate.
Thus all other forked threads will simply terminate at the same
time as the main thread (the terminology for this kind of
behaviour is \"daemonic threads\").
If you want the program to wait for child threads to
finish before exiting, you need to program this yourself. A
simple mechanism is to have each child thread write to an
'MVar' when it completes, and have the main
thread wait on all the 'MVar's before
exiting:
> myForkIO :: IO () -> IO (MVar ())
> myForkIO io = do
> mvar <- newEmptyMVar
> forkFinally io (\_ -> putMVar mvar ())
> return mvar
Note that we use 'forkFinally' to make sure that the
'MVar' is written to even if the thread dies or
is killed for some reason.
A better method is to keep a global list of all child
threads which we should wait for at the end of the program:
> children :: MVar [MVar ()]
> children = unsafePerformIO (newMVar [])
>
> waitForChildren :: IO ()
> waitForChildren = do
> cs <- takeMVar children
> case cs of
> [] -> return ()
> m:ms -> do
> putMVar children ms
> takeMVar m
> waitForChildren
>
> forkChild :: IO () -> IO ThreadId
> forkChild io = do
> mvar <- newEmptyMVar
> childs <- takeMVar children
> putMVar children (mvar:childs)
> forkFinally io (\_ -> putMVar mvar ())
>
> main =
> later waitForChildren $
> ...
The main thread principle also applies to calls to Haskell from
outside, using @foreign export@. When the @foreign export@ed
function is invoked, it starts a new main thread, and it returns
when this main thread terminates. If the call causes new
threads to be forked, they may remain in the system after the
@foreign export@ed function has returned.
-}
{- $preemption
GHC implements pre-emptive multitasking: the execution of
threads are interleaved in a random fashion. More specifically,
a thread may be pre-empted whenever it allocates some memory,
which unfortunately means that tight loops which do no
allocation tend to lock out other threads (this only seems to
happen with pathological benchmark-style code, however).
The rescheduling timer runs on a 20ms granularity by
default, but this may be altered using the
@-i\<n\>@ RTS option. After a rescheduling
\"tick\" the running thread is pre-empted as soon as
possible.
One final note: the
@aaaa@ @bbbb@ example may not
work too well on GHC (see Scheduling, above), due
to the locking on a 'System.IO.Handle'. Only one thread
may hold the lock on a 'System.IO.Handle' at any one
time, so if a reschedule happens while a thread is holding the
lock, the other thread won't be able to run. The upshot is that
the switch from @aaaa@ to
@bbbbb@ happens infrequently. It can be
improved by lowering the reschedule tick period. We also have a
patch that causes a reschedule whenever a thread waiting on a
lock is woken up, but haven't found it to be useful for anything
other than this example :-)
-}
{- $deadlock
GHC attempts to detect when threads are deadlocked using the garbage
collector. A thread that is not reachable (cannot be found by
following pointers from live objects) must be deadlocked, and in this
case the thread is sent an exception. The exception is either
'BlockedIndefinitelyOnMVar', 'BlockedIndefinitelyOnSTM',
'NonTermination', or 'Deadlock', depending on the way in which the
thread is deadlocked.
Note that this feature is intended for debugging, and should not be
relied on for the correct operation of your program. There is no
guarantee that the garbage collector will be accurate enough to detect
your deadlock, and no guarantee that the garbage collector will run in
a timely enough manner. Basically, the same caveats as for finalizers
apply to deadlock detection.
There is a subtle interaction between deadlock detection and
finalizers (as created by 'Foreign.Concurrent.newForeignPtr' or the
functions in "System.Mem.Weak"): if a thread is blocked waiting for a
finalizer to run, then the thread will be considered deadlocked and
sent an exception. So preferably don't do this, but if you have no
alternative then it is possible to prevent the thread from being
considered deadlocked by making a 'StablePtr' pointing to it. Don't
forget to release the 'StablePtr' later with 'freeStablePtr'.
-}
|
olsner/ghc
|
libraries/base/Control/Concurrent.hs
|
bsd-3-clause
| 24,910 | 0 | 21 | 6,070 | 1,994 | 1,052 | 942 | 133 | 3 |
module Bug387
( -- * Section1#a:section1#
test1
-- * Section2#a:section2#
, test2
) where
test1 :: Int
test1 = 223
test2 :: Int
test2 = 42
|
Helkafen/haddock
|
html-test/src/Bug387.hs
|
bsd-2-clause
| 155 | 0 | 4 | 42 | 34 | 22 | 12 | 8 | 1 |
module Interpreter where
import BasicPrelude
import Control.Monad.State
import Control.Monad.Except
import Types (Store)
import Statements (exec, Prog)
runInterpreter :: Prog -> Store -> IO (Either Text (), Store)
runInterpreter prog store = runStateT (runExceptT (exec prog)) store
|
mlitchard/squanchy
|
src/Interpreter.hs
|
isc
| 290 | 0 | 10 | 43 | 94 | 53 | 41 | 8 | 1 |
-- | This module consists of some messing around with the GLFW library along w/
-- FRP to try to do some interactive IO stuff.
module FRP.Jalapeno.IO where
-------------
-- Imports --
import Graphics.Rendering.OpenGL.Raw
import Graphics.Rendering.OpenGL
import Control.Monad.IO.Class
import Control.Concurrent
import Graphics.UI.GLFW
import Data.Time.Clock
import Data.IORef
import FRP.Jalapeno.Behavior
import FRP.Jalapeno.Assets
import FRP.Jalapeno.Sample
----------
-- Code --
-- | This function runs when GLFW wants to be closed - writing true to the
-- closed @'IORef'@.
closeCallback :: IORef Bool -> WindowCloseCallback
closeCallback closedRef = do
writeIORef closedRef True
return True
-- | Running a given FRP network from some @'Behavior'@.
driveNetwork :: Show a => IORef Bool -> Behavior Double IO () a -> Int -> IO ()
driveNetwork closedRef b rate = do
ct <- getCurrentTime
driveNetwork' ct 0 closedRef b rate
where driveNetwork' :: Show a => UTCTime -> Double -> IORef Bool -> Behavior Double IO () a -> Int -> IO ()
driveNetwork' lt t closedRef b rate = do
closed <- readIORef closedRef
case closed of
True -> return ()
False -> do
clear [ColorBuffer, DepthBuffer]
(v, next) <- runBehavior t () b
print v
swapBuffers
pollEvents
threadDelay $ 1000000 `div` rate
ct <- getCurrentTime
driveNetwork' ct
(t + (fromRational $ toRational $ diffUTCTime ct lt))
closedRef
next
rate
-- | Running a given behavior at a given rate (after having constructed a GLFW
-- instance).
runNetwork :: Show a => Behavior Double IO () a -> Int -> IO ()
runNetwork b rate = do
succ <- initialize
case succ of
False -> putStrLn "Failed to initialize GLFW."
True -> do
openWindow (Size 640 480)
[DisplayRGBBits 8 8 8, DisplayAlphaBits 8, DisplayDepthBits 24]
Window
closedRef <- newIORef False
windowTitle $= "Testing Jalapeno"
windowCloseCallback $= closeCallback closedRef
sp <- loadShaderProgram "test" >>=
(\e -> case e of
Left err -> putStrLn err >> return 0
Right (ShaderProgram v) -> return v)
driveNetwork closedRef b rate
glDeleteProgram sp
closeWindow
terminate
|
crockeo/jalapeno
|
src/lib/FRP/Jalapeno/IO.hs
|
mit
| 2,546 | 0 | 21 | 809 | 621 | 305 | 316 | 57 | 3 |
module ByteString.TreeBuilder.Prelude
(
module Exports,
)
where
-- base
-------------------------
import Control.Applicative as Exports hiding (WrappedArrow(..))
import Control.Arrow as Exports hiding (first, second)
import Control.Category as Exports
import Control.Concurrent as Exports
import Control.Exception as Exports
import Control.Monad as Exports hiding (fail, mapM_, sequence_, forM_, msum, mapM, sequence, forM)
import Control.Monad.IO.Class as Exports
import Control.Monad.Fail as Exports
import Control.Monad.Fix as Exports hiding (fix)
import Control.Monad.ST as Exports
import Data.Bifunctor as Exports
import Data.Bits as Exports
import Data.Bool as Exports
import Data.Char as Exports
import Data.Coerce as Exports
import Data.Complex as Exports
import Data.Data as Exports
import Data.Dynamic as Exports
import Data.Either as Exports
import Data.Fixed as Exports
import Data.Foldable as Exports hiding (toList)
import Data.Function as Exports hiding (id, (.))
import Data.Functor as Exports
import Data.Functor.Compose as Exports
import Data.Int as Exports
import Data.IORef as Exports
import Data.Ix as Exports
import Data.List as Exports hiding (sortOn, isSubsequenceOf, uncons, concat, foldr, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, find, maximumBy, minimumBy, mapAccumL, mapAccumR, foldl')
import Data.List.NonEmpty as Exports (NonEmpty(..))
import Data.Maybe as Exports
import Data.Monoid as Exports hiding (Alt, (<>))
import Data.Ord as Exports
import Data.Proxy as Exports
import Data.Ratio as Exports
import Data.Semigroup as Exports hiding (First(..), Last(..))
import Data.STRef as Exports
import Data.String as Exports
import Data.Traversable as Exports
import Data.Tuple as Exports
import Data.Unique as Exports
import Data.Version as Exports
import Data.Void as Exports
import Data.Word as Exports
import Debug.Trace as Exports
import Foreign.ForeignPtr as Exports
import Foreign.Ptr as Exports
import Foreign.StablePtr as Exports
import Foreign.Storable as Exports
import GHC.Conc as Exports hiding (orElse, withMVar, threadWaitWriteSTM, threadWaitWrite, threadWaitReadSTM, threadWaitRead)
import GHC.Exts as Exports (IsList(..), lazy, inline, sortWith, groupWith)
import GHC.Generics as Exports (Generic)
import GHC.IO.Exception as Exports
import GHC.OverloadedLabels as Exports
import Numeric as Exports
import Prelude as Exports hiding (Read, fail, concat, foldr, mapM_, sequence_, foldl1, maximum, minimum, product, sum, all, and, any, concatMap, elem, foldl, foldr1, notElem, or, mapM, sequence, id, (.))
import System.Environment as Exports
import System.Exit as Exports
import System.IO as Exports (Handle, hClose)
import System.IO.Error as Exports
import System.IO.Unsafe as Exports
import System.Mem as Exports
import System.Mem.StableName as Exports
import System.Timeout as Exports
import Text.ParserCombinators.ReadP as Exports (ReadP, readP_to_S, readS_to_P)
import Text.ParserCombinators.ReadPrec as Exports (ReadPrec, readPrec_to_P, readP_to_Prec, readPrec_to_S, readS_to_Prec)
import Text.Printf as Exports (printf, hPrintf)
import Unsafe.Coerce as Exports
-- bytestring
-------------------------
import Data.ByteString as Exports (ByteString)
|
nikita-volkov/bytestring-tree-builder
|
library/ByteString/TreeBuilder/Prelude.hs
|
mit
| 3,258 | 0 | 6 | 395 | 860 | 604 | 256 | 71 | 0 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-}
module ViewModels.ThreadThumbnailViewModel where
import Data.Text
import Data.Data
import qualified ViewModels.ImageViewModel as IVM
data ThreadThumbnailViewModel = ThreadThumbnailViewModel {
threadId :: Text,
threadURL :: Text,
thumbnail :: IVM.ImageViewModel
} deriving (Data, Typeable)
|
itsuart/fdc_archivist
|
src/ViewModels/ThreadThumbnailViewModel.hs
|
mit
| 355 | 0 | 9 | 46 | 65 | 41 | 24 | 10 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Rank2Types #-}
module Web.Apiary.PureScript
( I.PureScript
, I.PureScriptConfig(..)
, initPureScript
, pureScript
) where
import Web.Apiary(MonadIO(..))
import Control.Monad.Apiary.Action(ActionT)
import qualified Web.Apiary.PureScript.Internal as I
import Data.Apiary.Extension(Initializer', initializer', Has, getExt)
import Data.Proxy.Compat(Proxy(..))
initPureScript :: MonadIO m => I.PureScriptConfig -> Initializer' m I.PureScript
initPureScript = initializer' . liftIO . I.makePureScript
pureScript :: (Has I.PureScript exts, MonadIO m) => FilePath -> ActionT exts prms m ()
pureScript m = getExt (Proxy :: Proxy I.PureScript) >>= flip I.pureScript m
|
philopon/apiary
|
apiary-purescript/src/Web/Apiary/PureScript.hs
|
mit
| 729 | 0 | 9 | 104 | 214 | 125 | 89 | 16 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.DocumentFragment
(js_newDocumentFragment, newDocumentFragment, js_querySelector,
querySelector, js_querySelectorAll, querySelectorAll,
DocumentFragment, castToDocumentFragment, gTypeDocumentFragment)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe
"new window[\"DocumentFragment\"]()" js_newDocumentFragment ::
IO (JSRef DocumentFragment)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DocumentFragment Mozilla DocumentFragment documentation>
newDocumentFragment :: (MonadIO m) => m DocumentFragment
newDocumentFragment
= liftIO (js_newDocumentFragment >>= fromJSRefUnchecked)
foreign import javascript unsafe "$1[\"querySelector\"]($2)"
js_querySelector ::
JSRef DocumentFragment -> JSString -> IO (JSRef Element)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DocumentFragment.querySelector Mozilla DocumentFragment.querySelector documentation>
querySelector ::
(MonadIO m, ToJSString selectors) =>
DocumentFragment -> selectors -> m (Maybe Element)
querySelector self selectors
= liftIO
((js_querySelector (unDocumentFragment self)
(toJSString selectors))
>>= fromJSRef)
foreign import javascript unsafe "$1[\"querySelectorAll\"]($2)"
js_querySelectorAll ::
JSRef DocumentFragment -> JSString -> IO (JSRef NodeList)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/DocumentFragment.querySelectorAll Mozilla DocumentFragment.querySelectorAll documentation>
querySelectorAll ::
(MonadIO m, ToJSString selectors) =>
DocumentFragment -> selectors -> m (Maybe NodeList)
querySelectorAll self selectors
= liftIO
((js_querySelectorAll (unDocumentFragment self)
(toJSString selectors))
>>= fromJSRef)
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/DocumentFragment.hs
|
mit
| 2,668 | 20 | 11 | 422 | 583 | 345 | 238 | 47 | 1 |
module Main where
import System.Process (callCommand)
import Poi.Migrations.Migrate (prepareMigrationWithConfig, migrationStatusWithConfig, createNewMigration)
import Poi.Migrations.Types (Options(..), MigrateArgs(..), Mode(..))
import Poi.Migrations.Utils (poiArgs, readConfigForEnv, dbConfig)
main :: IO ()
main = do
poiArgs (migs)
migs :: Options -> IO ()
migs (Options (MigrateArgs mode env ver)) = do
config <- readConfigForEnv env
case mode of
Prepare -> prepareMigrationWithConfig (dbConfig config)
Up -> callCommand $ makeCommand "up"
Down -> callCommand $ makeCommand "down"
New xs ft -> createNewMigration xs ft
Redo -> callCommand $ makeCommand "redo"
Status -> migrationStatusWithConfig (dbConfig config)
where
makeCommand :: String -> String
makeCommand s = maybe ("stack Migrations.hs " ++ s ++ " --env " ++ env)
(\v -> "stack Migrations.hs " ++ s ++ " --env " ++ env ++ " --version " ++ v)
ver
|
pranaysashank/poi
|
poi-bin/main.hs
|
mit
| 1,008 | 0 | 14 | 226 | 310 | 164 | 146 | 22 | 6 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : COL instance of class Logic
Copyright : (c) Till Mossakowski, Uni Bremen 2002-2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (via Logic)
COL instance of class Logic
-}
module COL.Logic_COL where
import COL.AS_COL
import COL.COLSign
import COL.ATC_COL ()
import COL.Parse_AS ()
import COL.StatAna
import COL.Print_AS ()
import CASL.Sign
import CASL.StaticAna
import CASL.MixfixParser
import CASL.Morphism
import CASL.SymbolMapAnalysis
import CASL.AS_Basic_CASL
import CASL.Parse_AS_Basic
import CASL.MapSentence
import CASL.SymbolParser
import CASL.Logic_CASL ()
import Logic.Logic
data COL = COL deriving Show
instance Language COL where
description _ =
"COLCASL extends CASL by constructors and observers"
type C_BASIC_SPEC = BASIC_SPEC () COL_SIG_ITEM ()
type CSign = Sign () COLSign
type COLMor = Morphism () COLSign (DefMorExt COLSign)
type COLFORMULA = FORMULA ()
instance SignExtension COLSign where
isSubSignExtension = isSubCOLSign
instance Syntax COL C_BASIC_SPEC
SYMB_ITEMS SYMB_MAP_ITEMS
where
parse_basic_spec COL = Just $ basicSpec col_reserved_words
parse_symb_items COL = Just $ symbItems col_reserved_words
parse_symb_map_items COL = Just $ symbMapItems col_reserved_words
instance Sentences COL COLFORMULA CSign COLMor Symbol where
map_sen COL m = return . mapSen (const id) m
sym_of COL = symOf
symmap_of COL = morphismToSymbMap
sym_name COL = symName
instance StaticAnalysis COL C_BASIC_SPEC COLFORMULA
SYMB_ITEMS SYMB_MAP_ITEMS
CSign
COLMor
Symbol RawSymbol where
basic_analysis COL = Just $ basicAnalysis (const return)
(const return) ana_COL_SIG_ITEM
emptyMix
stat_symb_map_items COL = statSymbMapItems
stat_symb_items COL = statSymbItems
symbol_to_raw COL = symbolToRaw
id_to_raw COL = idToRaw
matches COL = CASL.Morphism.matches
empty_signature COL = emptySign emptyCOLSign
signature_union COL sigma1 =
return . addSig addCOLSign sigma1
morphism_union COL = plainMorphismUnion addCOLSign
final_union COL = finalUnion addCOLSign
is_subsig COL = isSubSig isSubCOLSign
subsig_inclusion COL = sigInclusion emptyMorExt
cogenerated_sign COL = cogeneratedSign emptyMorExt
generated_sign COL = generatedSign emptyMorExt
induced_from_morphism COL = inducedFromMorphism emptyMorExt
induced_from_to_morphism COL =
inducedFromToMorphism emptyMorExt isSubCOLSign diffCOLSign
instance Logic COL ()
C_BASIC_SPEC COLFORMULA SYMB_ITEMS SYMB_MAP_ITEMS
CSign
COLMor
Symbol RawSymbol () where
empty_proof_tree _ = ()
|
nevrenato/Hets_Fork
|
COL/Logic_COL.hs
|
gpl-2.0
| 3,103 | 0 | 9 | 790 | 594 | 306 | 288 | 70 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.