code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Settings.Exception.Prettify.JSONException (
pJSONException
) where
import Control.Exception
import Network.HTTP.Simple
import Settings.Exception.Prettify.HttpException (getReqStringHost)
pJSONException :: SomeException -> Maybe String
pJSONException ex = extractor <$> fromException ex
extractor :: JSONException -> String
extractor (JSONParseException req _ _) =
"Server responded with ill-formatted JSON when we request \"" ++
getReqStringHost (show req) ++ "\"."
extractor (JSONConversionException req _ _) =
"Server responded with JSON of unexpected format when we request \"" ++
getReqStringHost (show req) ++ "\"."
| Evan-Zhao/FastCanvas | src/Settings/Exception/Prettify/JSONException.hs | bsd-3-clause | 682 | 0 | 9 | 129 | 142 | 76 | 66 | 14 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnExpr]{Renaming of expressions}
Basically dependency analysis.
Handles @Match@, @GRHSs@, @HsExpr@, and @Qualifier@ datatypes. In
general, all of these functions return a renamed thing, and a set of
free variables.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiWayIf #-}
module RnExpr (
rnLExpr, rnExpr, rnStmts
) where
#include "HsVersions.h"
import RnBinds ( rnLocalBindsAndThen, rnLocalValBindsLHS, rnLocalValBindsRHS,
rnMatchGroup, rnGRHS, makeMiniFixityEnv)
import HsSyn
import TcRnMonad
import Module ( getModule )
import RnEnv
import RnSplice ( rnBracket, rnSpliceExpr, checkThLocalName )
import RnTypes
import RnPat
import DynFlags
import PrelNames
import BasicTypes
import Name
import NameSet
import RdrName
import UniqSet
import Data.List
import Util
import ListSetOps ( removeDups )
import ErrUtils
import Outputable
import SrcLoc
import FastString
import Control.Monad
import TysWiredIn ( nilDataConName )
import qualified GHC.LanguageExtensions as LangExt
import Data.Ord
import Data.Array
{-
************************************************************************
* *
\subsubsection{Expressions}
* *
************************************************************************
-}
rnExprs :: [LHsExpr RdrName] -> RnM ([LHsExpr Name], FreeVars)
rnExprs ls = rnExprs' ls emptyUniqSet
where
rnExprs' [] acc = return ([], acc)
rnExprs' (expr:exprs) acc =
do { (expr', fvExpr) <- rnLExpr expr
-- Now we do a "seq" on the free vars because typically it's small
-- or empty, especially in very long lists of constants
; let acc' = acc `plusFV` fvExpr
; (exprs', fvExprs) <- acc' `seq` rnExprs' exprs acc'
; return (expr':exprs', fvExprs) }
-- Variables. We look up the variable and return the resulting name.
rnLExpr :: LHsExpr RdrName -> RnM (LHsExpr Name, FreeVars)
rnLExpr = wrapLocFstM rnExpr
rnExpr :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
finishHsVar :: Located Name -> RnM (HsExpr Name, FreeVars)
-- Separated from rnExpr because it's also used
-- when renaming infix expressions
finishHsVar (L l name)
= do { this_mod <- getModule
; when (nameIsLocalOrFrom this_mod name) $
checkThLocalName name
; return (HsVar (L l name), unitFV name) }
rnUnboundVar :: RdrName -> RnM (HsExpr Name, FreeVars)
rnUnboundVar v
= do { if isUnqual v
then -- Treat this as a "hole"
-- Do not fail right now; instead, return HsUnboundVar
-- and let the type checker report the error
do { let occ = rdrNameOcc v
; uv <- if startsWithUnderscore occ
then return (TrueExprHole occ)
else OutOfScope occ <$> getGlobalRdrEnv
; return (HsUnboundVar uv, emptyFVs) }
else -- Fail immediately (qualified name)
do { n <- reportUnboundName v
; return (HsVar (noLoc n), emptyFVs) } }
rnExpr (HsVar (L l v))
= do { opt_DuplicateRecordFields <- xoptM LangExt.DuplicateRecordFields
; mb_name <- lookupOccRn_overloaded opt_DuplicateRecordFields v
; case mb_name of {
Nothing -> rnUnboundVar v ;
Just (Left name)
| name == nilDataConName -- Treat [] as an ExplicitList, so that
-- OverloadedLists works correctly
-> rnExpr (ExplicitList placeHolderType Nothing [])
| otherwise
-> finishHsVar (L l name) ;
Just (Right [f@(FieldOcc (L _ fn) s)]) ->
return (HsRecFld (ambiguousFieldOcc (FieldOcc (L l fn) s))
, unitFV (selectorFieldOcc f)) ;
Just (Right fs@(_:_:_)) -> return (HsRecFld (Ambiguous (L l v)
PlaceHolder)
, mkFVs (map selectorFieldOcc fs));
Just (Right []) -> error "runExpr/HsVar" } }
rnExpr (HsIPVar v)
= return (HsIPVar v, emptyFVs)
rnExpr (HsOverLabel v)
= return (HsOverLabel v, emptyFVs)
rnExpr (HsLit lit@(HsString src s))
= do { opt_OverloadedStrings <- xoptM LangExt.OverloadedStrings
; if opt_OverloadedStrings then
rnExpr (HsOverLit (mkHsIsString src s placeHolderType))
else do {
; rnLit lit
; return (HsLit lit, emptyFVs) } }
rnExpr (HsLit lit)
= do { rnLit lit
; return (HsLit lit, emptyFVs) }
rnExpr (HsOverLit lit)
= do { (lit', fvs) <- rnOverLit lit
; return (HsOverLit lit', fvs) }
rnExpr (HsApp fun arg)
= do { (fun',fvFun) <- rnLExpr fun
; (arg',fvArg) <- rnLExpr arg
; return (HsApp fun' arg', fvFun `plusFV` fvArg) }
rnExpr (HsAppType fun arg)
= do { (fun',fvFun) <- rnLExpr fun
; (arg',fvArg) <- rnHsWcType HsTypeCtx arg
; return (HsAppType fun' arg', fvFun `plusFV` fvArg) }
rnExpr (OpApp e1 op _ e2)
= do { (e1', fv_e1) <- rnLExpr e1
; (e2', fv_e2) <- rnLExpr e2
; (op', fv_op) <- rnLExpr op
-- Deal with fixity
-- When renaming code synthesised from "deriving" declarations
-- we used to avoid fixity stuff, but we can't easily tell any
-- more, so I've removed the test. Adding HsPars in TcGenDeriv
-- should prevent bad things happening.
; fixity <- case op' of
L _ (HsVar (L _ n)) -> lookupFixityRn n
L _ (HsRecFld f) -> lookupFieldFixityRn f
_ -> return (Fixity (show minPrecedence) minPrecedence InfixL)
-- c.f. lookupFixity for unbound
; final_e <- mkOpAppRn e1' op' fixity e2'
; return (final_e, fv_e1 `plusFV` fv_op `plusFV` fv_e2) }
rnExpr (NegApp e _)
= do { (e', fv_e) <- rnLExpr e
; (neg_name, fv_neg) <- lookupSyntaxName negateName
; final_e <- mkNegAppRn e' neg_name
; return (final_e, fv_e `plusFV` fv_neg) }
------------------------------------------
-- Template Haskell extensions
-- Don't ifdef-GHCI them because we want to fail gracefully
-- (not with an rnExpr crash) in a stage-1 compiler.
rnExpr e@(HsBracket br_body) = rnBracket e br_body
rnExpr (HsSpliceE splice) = rnSpliceExpr splice
---------------------------------------------
-- Sections
-- See Note [Parsing sections] in Parser.y
rnExpr (HsPar (L loc (section@(SectionL {}))))
= do { (section', fvs) <- rnSection section
; return (HsPar (L loc section'), fvs) }
rnExpr (HsPar (L loc (section@(SectionR {}))))
= do { (section', fvs) <- rnSection section
; return (HsPar (L loc section'), fvs) }
rnExpr (HsPar e)
= do { (e', fvs_e) <- rnLExpr e
; return (HsPar e', fvs_e) }
rnExpr expr@(SectionL {})
= do { addErr (sectionErr expr); rnSection expr }
rnExpr expr@(SectionR {})
= do { addErr (sectionErr expr); rnSection expr }
---------------------------------------------
rnExpr (HsCoreAnn src ann expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsCoreAnn src ann expr', fvs_expr) }
rnExpr (HsSCC src lbl expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsSCC src lbl expr', fvs_expr) }
rnExpr (HsTickPragma src info srcInfo expr)
= do { (expr', fvs_expr) <- rnLExpr expr
; return (HsTickPragma src info srcInfo expr', fvs_expr) }
rnExpr (HsLam matches)
= do { (matches', fvMatch) <- rnMatchGroup LambdaExpr rnLExpr matches
; return (HsLam matches', fvMatch) }
rnExpr (HsLamCase matches)
= do { (matches', fvs_ms) <- rnMatchGroup CaseAlt rnLExpr matches
; return (HsLamCase matches', fvs_ms) }
rnExpr (HsCase expr matches)
= do { (new_expr, e_fvs) <- rnLExpr expr
; (new_matches, ms_fvs) <- rnMatchGroup CaseAlt rnLExpr matches
; return (HsCase new_expr new_matches, e_fvs `plusFV` ms_fvs) }
rnExpr (HsLet (L l binds) expr)
= rnLocalBindsAndThen binds $ \binds' _ -> do
{ (expr',fvExpr) <- rnLExpr expr
; return (HsLet (L l binds') expr', fvExpr) }
rnExpr (HsDo do_or_lc (L l stmts) _)
= do { ((stmts', _), fvs) <-
rnStmtsWithPostProcessing do_or_lc rnLExpr
postProcessStmtsForApplicativeDo stmts
(\ _ -> return ((), emptyFVs))
; return ( HsDo do_or_lc (L l stmts') placeHolderType, fvs ) }
rnExpr (ExplicitList _ _ exps)
= do { opt_OverloadedLists <- xoptM LangExt.OverloadedLists
; (exps', fvs) <- rnExprs exps
; if opt_OverloadedLists
then do {
; (from_list_n_name, fvs') <- lookupSyntaxName fromListNName
; return (ExplicitList placeHolderType (Just from_list_n_name) exps'
, fvs `plusFV` fvs') }
else
return (ExplicitList placeHolderType Nothing exps', fvs) }
rnExpr (ExplicitPArr _ exps)
= do { (exps', fvs) <- rnExprs exps
; return (ExplicitPArr placeHolderType exps', fvs) }
rnExpr (ExplicitTuple tup_args boxity)
= do { checkTupleSection tup_args
; checkTupSize (length tup_args)
; (tup_args', fvs) <- mapAndUnzipM rnTupArg tup_args
; return (ExplicitTuple tup_args' boxity, plusFVs fvs) }
where
rnTupArg (L l (Present e)) = do { (e',fvs) <- rnLExpr e
; return (L l (Present e'), fvs) }
rnTupArg (L l (Missing _)) = return (L l (Missing placeHolderType)
, emptyFVs)
rnExpr (ExplicitSum alt arity expr _)
= do { (expr', fvs) <- rnLExpr expr
; return (ExplicitSum alt arity expr' PlaceHolder, fvs) }
rnExpr (RecordCon { rcon_con_name = con_id
, rcon_flds = rec_binds@(HsRecFields { rec_dotdot = dd }) })
= do { con_lname@(L _ con_name) <- lookupLocatedOccRn con_id
; (flds, fvs) <- rnHsRecFields (HsRecFieldCon con_name) mk_hs_var rec_binds
; (flds', fvss) <- mapAndUnzipM rn_field flds
; let rec_binds' = HsRecFields { rec_flds = flds', rec_dotdot = dd }
; return (RecordCon { rcon_con_name = con_lname, rcon_flds = rec_binds'
, rcon_con_expr = noPostTcExpr, rcon_con_like = PlaceHolder }
, fvs `plusFV` plusFVs fvss `addOneFV` con_name) }
where
mk_hs_var l n = HsVar (L l n)
rn_field (L l fld) = do { (arg', fvs) <- rnLExpr (hsRecFieldArg fld)
; return (L l (fld { hsRecFieldArg = arg' }), fvs) }
rnExpr (RecordUpd { rupd_expr = expr, rupd_flds = rbinds })
= do { (expr', fvExpr) <- rnLExpr expr
; (rbinds', fvRbinds) <- rnHsRecUpdFields rbinds
; return (RecordUpd { rupd_expr = expr', rupd_flds = rbinds'
, rupd_cons = PlaceHolder, rupd_in_tys = PlaceHolder
, rupd_out_tys = PlaceHolder, rupd_wrap = PlaceHolder }
, fvExpr `plusFV` fvRbinds) }
rnExpr (ExprWithTySig expr pty)
= do { (pty', fvTy) <- rnHsSigWcType ExprWithTySigCtx pty
; (expr', fvExpr) <- bindSigTyVarsFV (hsWcScopedTvs pty') $
rnLExpr expr
; return (ExprWithTySig expr' pty', fvExpr `plusFV` fvTy) }
rnExpr (HsIf _ p b1 b2)
= do { (p', fvP) <- rnLExpr p
; (b1', fvB1) <- rnLExpr b1
; (b2', fvB2) <- rnLExpr b2
; (mb_ite, fvITE) <- lookupIfThenElse
; return (HsIf mb_ite p' b1' b2', plusFVs [fvITE, fvP, fvB1, fvB2]) }
rnExpr (HsMultiIf _ty alts)
= do { (alts', fvs) <- mapFvRn (rnGRHS IfAlt rnLExpr) alts
-- ; return (HsMultiIf ty alts', fvs) }
; return (HsMultiIf placeHolderType alts', fvs) }
rnExpr (ArithSeq _ _ seq)
= do { opt_OverloadedLists <- xoptM LangExt.OverloadedLists
; (new_seq, fvs) <- rnArithSeq seq
; if opt_OverloadedLists
then do {
; (from_list_name, fvs') <- lookupSyntaxName fromListName
; return (ArithSeq noPostTcExpr (Just from_list_name) new_seq, fvs `plusFV` fvs') }
else
return (ArithSeq noPostTcExpr Nothing new_seq, fvs) }
rnExpr (PArrSeq _ seq)
= do { (new_seq, fvs) <- rnArithSeq seq
; return (PArrSeq noPostTcExpr new_seq, fvs) }
{-
These three are pattern syntax appearing in expressions.
Since all the symbols are reservedops we can simply reject them.
We return a (bogus) EWildPat in each case.
-}
rnExpr EWildPat = return (hsHoleExpr, emptyFVs) -- "_" is just a hole
rnExpr e@(EAsPat {}) =
patSynErr e (text "Did you mean to enable TypeApplications?")
rnExpr e@(EViewPat {}) = patSynErr e empty
rnExpr e@(ELazyPat {}) = patSynErr e empty
{-
************************************************************************
* *
Static values
* *
************************************************************************
For the static form we check that the free variables are all top-level
value bindings. This is done by checking that the name is external or
wired-in. See the Notes about the NameSorts in Name.hs.
-}
rnExpr e@(HsStatic _ expr) = do
target <- fmap hscTarget getDynFlags
case target of
-- SPT entries are expected to exist in object code so far, and this is
-- not the case in interpreted mode. See bug #9878.
HscInterpreted -> addErr $ sep
[ text "The static form is not supported in interpreted mode."
, text "Please use -fobject-code."
]
_ -> return ()
(expr',fvExpr) <- rnLExpr expr
stage <- getStage
case stage of
Splice _ -> addErr $ sep
[ text "static forms cannot be used in splices:"
, nest 2 $ ppr e
]
_ -> return ()
mod <- getModule
let fvExpr' = filterNameSet (nameIsLocalOrFrom mod) fvExpr
return (HsStatic fvExpr' expr', fvExpr)
{-
************************************************************************
* *
Arrow notation
* *
************************************************************************
-}
rnExpr (HsProc pat body)
= newArrowScope $
rnPat ProcExpr pat $ \ pat' -> do
{ (body',fvBody) <- rnCmdTop body
; return (HsProc pat' body', fvBody) }
-- Ideally, these would be done in parsing, but to keep parsing simple, we do it here.
rnExpr e@(HsArrApp {}) = arrowFail e
rnExpr e@(HsArrForm {}) = arrowFail e
rnExpr other = pprPanic "rnExpr: unexpected expression" (ppr other)
-- HsWrap
hsHoleExpr :: HsExpr id
hsHoleExpr = HsUnboundVar (TrueExprHole (mkVarOcc "_"))
arrowFail :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
arrowFail e
= do { addErr (vcat [ text "Arrow command found where an expression was expected:"
, nest 2 (ppr e) ])
-- Return a place-holder hole, so that we can carry on
-- to report other errors
; return (hsHoleExpr, emptyFVs) }
----------------------
-- See Note [Parsing sections] in Parser.y
rnSection :: HsExpr RdrName -> RnM (HsExpr Name, FreeVars)
rnSection section@(SectionR op expr)
= do { (op', fvs_op) <- rnLExpr op
; (expr', fvs_expr) <- rnLExpr expr
; checkSectionPrec InfixR section op' expr'
; return (SectionR op' expr', fvs_op `plusFV` fvs_expr) }
rnSection section@(SectionL expr op)
= do { (expr', fvs_expr) <- rnLExpr expr
; (op', fvs_op) <- rnLExpr op
; checkSectionPrec InfixL section op' expr'
; return (SectionL expr' op', fvs_op `plusFV` fvs_expr) }
rnSection other = pprPanic "rnSection" (ppr other)
{-
************************************************************************
* *
Arrow commands
* *
************************************************************************
-}
rnCmdArgs :: [LHsCmdTop RdrName] -> RnM ([LHsCmdTop Name], FreeVars)
rnCmdArgs [] = return ([], emptyFVs)
rnCmdArgs (arg:args)
= do { (arg',fvArg) <- rnCmdTop arg
; (args',fvArgs) <- rnCmdArgs args
; return (arg':args', fvArg `plusFV` fvArgs) }
rnCmdTop :: LHsCmdTop RdrName -> RnM (LHsCmdTop Name, FreeVars)
rnCmdTop = wrapLocFstM rnCmdTop'
where
rnCmdTop' (HsCmdTop cmd _ _ _)
= do { (cmd', fvCmd) <- rnLCmd cmd
; let cmd_names = [arrAName, composeAName, firstAName] ++
nameSetElemsStable (methodNamesCmd (unLoc cmd'))
-- Generate the rebindable syntax for the monad
; (cmd_names', cmd_fvs) <- lookupSyntaxNames cmd_names
; return (HsCmdTop cmd' placeHolderType placeHolderType
(cmd_names `zip` cmd_names'),
fvCmd `plusFV` cmd_fvs) }
rnLCmd :: LHsCmd RdrName -> RnM (LHsCmd Name, FreeVars)
rnLCmd = wrapLocFstM rnCmd
rnCmd :: HsCmd RdrName -> RnM (HsCmd Name, FreeVars)
rnCmd (HsCmdArrApp arrow arg _ ho rtl)
= do { (arrow',fvArrow) <- select_arrow_scope (rnLExpr arrow)
; (arg',fvArg) <- rnLExpr arg
; return (HsCmdArrApp arrow' arg' placeHolderType ho rtl,
fvArrow `plusFV` fvArg) }
where
select_arrow_scope tc = case ho of
HsHigherOrderApp -> tc
HsFirstOrderApp -> escapeArrowScope tc
-- See Note [Escaping the arrow scope] in TcRnTypes
-- Before renaming 'arrow', use the environment of the enclosing
-- proc for the (-<) case.
-- Local bindings, inside the enclosing proc, are not in scope
-- inside 'arrow'. In the higher-order case (-<<), they are.
-- infix form
rnCmd (HsCmdArrForm op (Just _) [arg1, arg2])
= do { (op',fv_op) <- escapeArrowScope (rnLExpr op)
; let L _ (HsVar (L _ op_name)) = op'
; (arg1',fv_arg1) <- rnCmdTop arg1
; (arg2',fv_arg2) <- rnCmdTop arg2
-- Deal with fixity
; fixity <- lookupFixityRn op_name
; final_e <- mkOpFormRn arg1' op' fixity arg2'
; return (final_e, fv_arg1 `plusFV` fv_op `plusFV` fv_arg2) }
rnCmd (HsCmdArrForm op fixity cmds)
= do { (op',fvOp) <- escapeArrowScope (rnLExpr op)
; (cmds',fvCmds) <- rnCmdArgs cmds
; return (HsCmdArrForm op' fixity cmds', fvOp `plusFV` fvCmds) }
rnCmd (HsCmdApp fun arg)
= do { (fun',fvFun) <- rnLCmd fun
; (arg',fvArg) <- rnLExpr arg
; return (HsCmdApp fun' arg', fvFun `plusFV` fvArg) }
rnCmd (HsCmdLam matches)
= do { (matches', fvMatch) <- rnMatchGroup LambdaExpr rnLCmd matches
; return (HsCmdLam matches', fvMatch) }
rnCmd (HsCmdPar e)
= do { (e', fvs_e) <- rnLCmd e
; return (HsCmdPar e', fvs_e) }
rnCmd (HsCmdCase expr matches)
= do { (new_expr, e_fvs) <- rnLExpr expr
; (new_matches, ms_fvs) <- rnMatchGroup CaseAlt rnLCmd matches
; return (HsCmdCase new_expr new_matches, e_fvs `plusFV` ms_fvs) }
rnCmd (HsCmdIf _ p b1 b2)
= do { (p', fvP) <- rnLExpr p
; (b1', fvB1) <- rnLCmd b1
; (b2', fvB2) <- rnLCmd b2
; (mb_ite, fvITE) <- lookupIfThenElse
; return (HsCmdIf mb_ite p' b1' b2', plusFVs [fvITE, fvP, fvB1, fvB2]) }
rnCmd (HsCmdLet (L l binds) cmd)
= rnLocalBindsAndThen binds $ \ binds' _ -> do
{ (cmd',fvExpr) <- rnLCmd cmd
; return (HsCmdLet (L l binds') cmd', fvExpr) }
rnCmd (HsCmdDo (L l stmts) _)
= do { ((stmts', _), fvs) <-
rnStmts ArrowExpr rnLCmd stmts (\ _ -> return ((), emptyFVs))
; return ( HsCmdDo (L l stmts') placeHolderType, fvs ) }
rnCmd cmd@(HsCmdWrap {}) = pprPanic "rnCmd" (ppr cmd)
---------------------------------------------------
type CmdNeeds = FreeVars -- Only inhabitants are
-- appAName, choiceAName, loopAName
-- find what methods the Cmd needs (loop, choice, apply)
methodNamesLCmd :: LHsCmd Name -> CmdNeeds
methodNamesLCmd = methodNamesCmd . unLoc
methodNamesCmd :: HsCmd Name -> CmdNeeds
methodNamesCmd (HsCmdArrApp _arrow _arg _ HsFirstOrderApp _rtl)
= emptyFVs
methodNamesCmd (HsCmdArrApp _arrow _arg _ HsHigherOrderApp _rtl)
= unitFV appAName
methodNamesCmd (HsCmdArrForm {}) = emptyFVs
methodNamesCmd (HsCmdWrap _ cmd) = methodNamesCmd cmd
methodNamesCmd (HsCmdPar c) = methodNamesLCmd c
methodNamesCmd (HsCmdIf _ _ c1 c2)
= methodNamesLCmd c1 `plusFV` methodNamesLCmd c2 `addOneFV` choiceAName
methodNamesCmd (HsCmdLet _ c) = methodNamesLCmd c
methodNamesCmd (HsCmdDo (L _ stmts) _) = methodNamesStmts stmts
methodNamesCmd (HsCmdApp c _) = methodNamesLCmd c
methodNamesCmd (HsCmdLam match) = methodNamesMatch match
methodNamesCmd (HsCmdCase _ matches)
= methodNamesMatch matches `addOneFV` choiceAName
--methodNamesCmd _ = emptyFVs
-- Other forms can't occur in commands, but it's not convenient
-- to error here so we just do what's convenient.
-- The type checker will complain later
---------------------------------------------------
methodNamesMatch :: MatchGroup Name (LHsCmd Name) -> FreeVars
methodNamesMatch (MG { mg_alts = L _ ms })
= plusFVs (map do_one ms)
where
do_one (L _ (Match _ _ _ grhss)) = methodNamesGRHSs grhss
-------------------------------------------------
-- gaw 2004
methodNamesGRHSs :: GRHSs Name (LHsCmd Name) -> FreeVars
methodNamesGRHSs (GRHSs grhss _) = plusFVs (map methodNamesGRHS grhss)
-------------------------------------------------
methodNamesGRHS :: Located (GRHS Name (LHsCmd Name)) -> CmdNeeds
methodNamesGRHS (L _ (GRHS _ rhs)) = methodNamesLCmd rhs
---------------------------------------------------
methodNamesStmts :: [Located (StmtLR Name Name (LHsCmd Name))] -> FreeVars
methodNamesStmts stmts = plusFVs (map methodNamesLStmt stmts)
---------------------------------------------------
methodNamesLStmt :: Located (StmtLR Name Name (LHsCmd Name)) -> FreeVars
methodNamesLStmt = methodNamesStmt . unLoc
methodNamesStmt :: StmtLR Name Name (LHsCmd Name) -> FreeVars
methodNamesStmt (LastStmt cmd _ _) = methodNamesLCmd cmd
methodNamesStmt (BodyStmt cmd _ _ _) = methodNamesLCmd cmd
methodNamesStmt (BindStmt _ cmd _ _ _) = methodNamesLCmd cmd
methodNamesStmt (RecStmt { recS_stmts = stmts }) =
methodNamesStmts stmts `addOneFV` loopAName
methodNamesStmt (LetStmt {}) = emptyFVs
methodNamesStmt (ParStmt {}) = emptyFVs
methodNamesStmt (TransStmt {}) = emptyFVs
methodNamesStmt ApplicativeStmt{} = emptyFVs
-- ParStmt and TransStmt can't occur in commands, but it's not
-- convenient to error here so we just do what's convenient
{-
************************************************************************
* *
Arithmetic sequences
* *
************************************************************************
-}
rnArithSeq :: ArithSeqInfo RdrName -> RnM (ArithSeqInfo Name, FreeVars)
rnArithSeq (From expr)
= do { (expr', fvExpr) <- rnLExpr expr
; return (From expr', fvExpr) }
rnArithSeq (FromThen expr1 expr2)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; return (FromThen expr1' expr2', fvExpr1 `plusFV` fvExpr2) }
rnArithSeq (FromTo expr1 expr2)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; return (FromTo expr1' expr2', fvExpr1 `plusFV` fvExpr2) }
rnArithSeq (FromThenTo expr1 expr2 expr3)
= do { (expr1', fvExpr1) <- rnLExpr expr1
; (expr2', fvExpr2) <- rnLExpr expr2
; (expr3', fvExpr3) <- rnLExpr expr3
; return (FromThenTo expr1' expr2' expr3',
plusFVs [fvExpr1, fvExpr2, fvExpr3]) }
{-
************************************************************************
* *
\subsubsection{@Stmt@s: in @do@ expressions}
* *
************************************************************************
-}
{-
Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Both ApplicativeDo and RecursiveDo need to create tuples not
present in the source text.
For ApplicativeDo we create:
(a,b,c) <- (\c b a -> (a,b,c)) <$>
For RecursiveDo we create:
mfix (\ ~(a,b,c) -> do ...; return (a',b',c'))
The order of the components in those tuples needs to be stable
across recompilations, otherwise they can get optimized differently
and we end up with incompatible binaries.
To get a stable order we use nameSetElemsStable.
See Note [Deterministic UniqFM] to learn more about nondeterminism.
-}
-- | Rename some Stmts
rnStmts :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of each statement (e.g. rnLExpr)
-> [LStmt RdrName (Located (body RdrName))]
-- ^ Statements
-> ([Name] -> RnM (thing, FreeVars))
-- ^ if these statements scope over something, this renames it
-- and returns the result.
-> RnM (([LStmt Name (Located (body Name))], thing), FreeVars)
rnStmts ctxt rnBody = rnStmtsWithPostProcessing ctxt rnBody noPostProcessStmts
-- | like 'rnStmts' but applies a post-processing step to the renamed Stmts
rnStmtsWithPostProcessing
:: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of each statement (e.g. rnLExpr)
-> (HsStmtContext Name
-> [(LStmt Name (Located (body Name)), FreeVars)]
-> RnM ([LStmt Name (Located (body Name))], FreeVars))
-- ^ postprocess the statements
-> [LStmt RdrName (Located (body RdrName))]
-- ^ Statements
-> ([Name] -> RnM (thing, FreeVars))
-- ^ if these statements scope over something, this renames it
-- and returns the result.
-> RnM (([LStmt Name (Located (body Name))], thing), FreeVars)
rnStmtsWithPostProcessing ctxt rnBody ppStmts stmts thing_inside
= do { ((stmts', thing), fvs) <-
rnStmtsWithFreeVars ctxt rnBody stmts thing_inside
; (pp_stmts, fvs') <- ppStmts ctxt stmts'
; return ((pp_stmts, thing), fvs `plusFV` fvs')
}
-- | maybe rearrange statements according to the ApplicativeDo transformation
postProcessStmtsForApplicativeDo
:: HsStmtContext Name
-> [(ExprLStmt Name, FreeVars)]
-> RnM ([ExprLStmt Name], FreeVars)
postProcessStmtsForApplicativeDo ctxt stmts
= do {
-- rearrange the statements using ApplicativeStmt if
-- -XApplicativeDo is on. Also strip out the FreeVars attached
-- to each Stmt body.
ado_is_on <- xoptM LangExt.ApplicativeDo
; let is_do_expr | DoExpr <- ctxt = True
| otherwise = False
; if ado_is_on && is_do_expr
then rearrangeForApplicativeDo ctxt stmts
else noPostProcessStmts ctxt stmts }
-- | strip the FreeVars annotations from statements
noPostProcessStmts
:: HsStmtContext Name
-> [(LStmt Name (Located (body Name)), FreeVars)]
-> RnM ([LStmt Name (Located (body Name))], FreeVars)
noPostProcessStmts _ stmts = return (map fst stmts, emptyNameSet)
rnStmtsWithFreeVars :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [LStmt RdrName (Located (body RdrName))]
-> ([Name] -> RnM (thing, FreeVars))
-> RnM ( ([(LStmt Name (Located (body Name)), FreeVars)], thing)
, FreeVars)
-- Each Stmt body is annotated with its FreeVars, so that
-- we can rearrange statements for ApplicativeDo.
--
-- Variables bound by the Stmts, and mentioned in thing_inside,
-- do not appear in the result FreeVars
rnStmtsWithFreeVars ctxt _ [] thing_inside
= do { checkEmptyStmts ctxt
; (thing, fvs) <- thing_inside []
; return (([], thing), fvs) }
rnStmtsWithFreeVars MDoExpr rnBody stmts thing_inside -- Deal with mdo
= -- Behave like do { rec { ...all but last... }; last }
do { ((stmts1, (stmts2, thing)), fvs)
<- rnStmt MDoExpr rnBody (noLoc $ mkRecStmt all_but_last) $ \ _ ->
do { last_stmt' <- checkLastStmt MDoExpr last_stmt
; rnStmt MDoExpr rnBody last_stmt' thing_inside }
; return (((stmts1 ++ stmts2), thing), fvs) }
where
Just (all_but_last, last_stmt) = snocView stmts
rnStmtsWithFreeVars ctxt rnBody (lstmt@(L loc _) : lstmts) thing_inside
| null lstmts
= setSrcSpan loc $
do { lstmt' <- checkLastStmt ctxt lstmt
; rnStmt ctxt rnBody lstmt' thing_inside }
| otherwise
= do { ((stmts1, (stmts2, thing)), fvs)
<- setSrcSpan loc $
do { checkStmt ctxt lstmt
; rnStmt ctxt rnBody lstmt $ \ bndrs1 ->
rnStmtsWithFreeVars ctxt rnBody lstmts $ \ bndrs2 ->
thing_inside (bndrs1 ++ bndrs2) }
; return (((stmts1 ++ stmts2), thing), fvs) }
----------------------
rnStmt :: Outputable (body RdrName)
=> HsStmtContext Name
-> (Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-- ^ How to rename the body of the statement
-> LStmt RdrName (Located (body RdrName))
-- ^ The statement
-> ([Name] -> RnM (thing, FreeVars))
-- ^ Rename the stuff that this statement scopes over
-> RnM ( ([(LStmt Name (Located (body Name)), FreeVars)], thing)
, FreeVars)
-- Variables bound by the Stmt, and mentioned in thing_inside,
-- do not appear in the result FreeVars
rnStmt ctxt rnBody (L loc (LastStmt body noret _)) thing_inside
= do { (body', fv_expr) <- rnBody body
; (ret_op, fvs1) <- lookupStmtName ctxt returnMName
; (thing, fvs3) <- thing_inside []
; return (([(L loc (LastStmt body' noret ret_op), fv_expr)], thing),
fv_expr `plusFV` fvs1 `plusFV` fvs3) }
rnStmt ctxt rnBody (L loc (BodyStmt body _ _ _)) thing_inside
= do { (body', fv_expr) <- rnBody body
; (then_op, fvs1) <- lookupStmtName ctxt thenMName
; (guard_op, fvs2) <- if isListCompExpr ctxt
then lookupStmtName ctxt guardMName
else return (noSyntaxExpr, emptyFVs)
-- Only list/parr/monad comprehensions use 'guard'
-- Also for sub-stmts of same eg [ e | x<-xs, gd | blah ]
-- Here "gd" is a guard
; (thing, fvs3) <- thing_inside []
; return (([(L loc (BodyStmt body'
then_op guard_op placeHolderType), fv_expr)], thing),
fv_expr `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) }
rnStmt ctxt rnBody (L loc (BindStmt pat body _ _ _)) thing_inside
= do { (body', fv_expr) <- rnBody body
-- The binders do not scope over the expression
; (bind_op, fvs1) <- lookupStmtName ctxt bindMName
; xMonadFailEnabled <- fmap (xopt LangExt.MonadFailDesugaring) getDynFlags
; let failFunction | xMonadFailEnabled = failMName
| otherwise = failMName_preMFP
; (fail_op, fvs2) <- lookupSyntaxName failFunction
; rnPat (StmtCtxt ctxt) pat $ \ pat' -> do
{ (thing, fvs3) <- thing_inside (collectPatBinders pat')
; return (( [( L loc (BindStmt pat' body' bind_op fail_op PlaceHolder)
, fv_expr )]
, thing),
fv_expr `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) }}
-- fv_expr shouldn't really be filtered by the rnPatsAndThen
-- but it does not matter because the names are unique
rnStmt _ _ (L loc (LetStmt (L l binds))) thing_inside
= do { rnLocalBindsAndThen binds $ \binds' bind_fvs -> do
{ (thing, fvs) <- thing_inside (collectLocalBinders binds')
; return (([(L loc (LetStmt (L l binds')), bind_fvs)], thing), fvs) } }
rnStmt ctxt rnBody (L loc (RecStmt { recS_stmts = rec_stmts })) thing_inside
= do { (return_op, fvs1) <- lookupStmtName ctxt returnMName
; (mfix_op, fvs2) <- lookupStmtName ctxt mfixName
; (bind_op, fvs3) <- lookupStmtName ctxt bindMName
; let empty_rec_stmt = emptyRecStmtName { recS_ret_fn = return_op
, recS_mfix_fn = mfix_op
, recS_bind_fn = bind_op }
-- Step1: Bring all the binders of the mdo into scope
-- (Remember that this also removes the binders from the
-- finally-returned free-vars.)
-- And rename each individual stmt, making a
-- singleton segment. At this stage the FwdRefs field
-- isn't finished: it's empty for all except a BindStmt
-- for which it's the fwd refs within the bind itself
-- (This set may not be empty, because we're in a recursive
-- context.)
; rnRecStmtsAndThen rnBody rec_stmts $ \ segs -> do
{ let bndrs = nameSetElemsStable $
foldr (unionNameSet . (\(ds,_,_,_) -> ds))
emptyNameSet
segs
-- See Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
; (thing, fvs_later) <- thing_inside bndrs
; let (rec_stmts', fvs) = segmentRecStmts loc ctxt empty_rec_stmt segs fvs_later
-- We aren't going to try to group RecStmts with
-- ApplicativeDo, so attaching empty FVs is fine.
; return ( ((zip rec_stmts' (repeat emptyNameSet)), thing)
, fvs `plusFV` fvs1 `plusFV` fvs2 `plusFV` fvs3) } }
rnStmt ctxt _ (L loc (ParStmt segs _ _ _)) thing_inside
= do { (mzip_op, fvs1) <- lookupStmtNamePoly ctxt mzipName
; (bind_op, fvs2) <- lookupStmtName ctxt bindMName
; (return_op, fvs3) <- lookupStmtName ctxt returnMName
; ((segs', thing), fvs4) <- rnParallelStmts (ParStmtCtxt ctxt) return_op segs thing_inside
; return ( ([(L loc (ParStmt segs' mzip_op bind_op placeHolderType), fvs4)], thing)
, fvs1 `plusFV` fvs2 `plusFV` fvs3 `plusFV` fvs4) }
rnStmt ctxt _ (L loc (TransStmt { trS_stmts = stmts, trS_by = by, trS_form = form
, trS_using = using })) thing_inside
= do { -- Rename the 'using' expression in the context before the transform is begun
(using', fvs1) <- rnLExpr using
-- Rename the stmts and the 'by' expression
-- Keep track of the variables mentioned in the 'by' expression
; ((stmts', (by', used_bndrs, thing)), fvs2)
<- rnStmts (TransStmtCtxt ctxt) rnLExpr stmts $ \ bndrs ->
do { (by', fvs_by) <- mapMaybeFvRn rnLExpr by
; (thing, fvs_thing) <- thing_inside bndrs
; let fvs = fvs_by `plusFV` fvs_thing
used_bndrs = filter (`elemNameSet` fvs) bndrs
-- The paper (Fig 5) has a bug here; we must treat any free variable
-- of the "thing inside", **or of the by-expression**, as used
; return ((by', used_bndrs, thing), fvs) }
-- Lookup `return`, `(>>=)` and `liftM` for monad comprehensions
; (return_op, fvs3) <- lookupStmtName ctxt returnMName
; (bind_op, fvs4) <- lookupStmtName ctxt bindMName
; (fmap_op, fvs5) <- case form of
ThenForm -> return (noExpr, emptyFVs)
_ -> lookupStmtNamePoly ctxt fmapName
; let all_fvs = fvs1 `plusFV` fvs2 `plusFV` fvs3
`plusFV` fvs4 `plusFV` fvs5
bndr_map = used_bndrs `zip` used_bndrs
-- See Note [TransStmt binder map] in HsExpr
; traceRn (text "rnStmt: implicitly rebound these used binders:" <+> ppr bndr_map)
; return (([(L loc (TransStmt { trS_stmts = stmts', trS_bndrs = bndr_map
, trS_by = by', trS_using = using', trS_form = form
, trS_ret = return_op, trS_bind = bind_op
, trS_bind_arg_ty = PlaceHolder
, trS_fmap = fmap_op }), fvs2)], thing), all_fvs) }
rnStmt _ _ (L _ ApplicativeStmt{}) _ =
panic "rnStmt: ApplicativeStmt"
rnParallelStmts :: forall thing. HsStmtContext Name
-> SyntaxExpr Name
-> [ParStmtBlock RdrName RdrName]
-> ([Name] -> RnM (thing, FreeVars))
-> RnM (([ParStmtBlock Name Name], thing), FreeVars)
-- Note [Renaming parallel Stmts]
rnParallelStmts ctxt return_op segs thing_inside
= do { orig_lcl_env <- getLocalRdrEnv
; rn_segs orig_lcl_env [] segs }
where
rn_segs :: LocalRdrEnv
-> [Name] -> [ParStmtBlock RdrName RdrName]
-> RnM (([ParStmtBlock Name Name], thing), FreeVars)
rn_segs _ bndrs_so_far []
= do { let (bndrs', dups) = removeDups cmpByOcc bndrs_so_far
; mapM_ dupErr dups
; (thing, fvs) <- bindLocalNames bndrs' (thing_inside bndrs')
; return (([], thing), fvs) }
rn_segs env bndrs_so_far (ParStmtBlock stmts _ _ : segs)
= do { ((stmts', (used_bndrs, segs', thing)), fvs)
<- rnStmts ctxt rnLExpr stmts $ \ bndrs ->
setLocalRdrEnv env $ do
{ ((segs', thing), fvs) <- rn_segs env (bndrs ++ bndrs_so_far) segs
; let used_bndrs = filter (`elemNameSet` fvs) bndrs
; return ((used_bndrs, segs', thing), fvs) }
; let seg' = ParStmtBlock stmts' used_bndrs return_op
; return ((seg':segs', thing), fvs) }
cmpByOcc n1 n2 = nameOccName n1 `compare` nameOccName n2
dupErr vs = addErr (text "Duplicate binding in parallel list comprehension for:"
<+> quotes (ppr (head vs)))
lookupStmtName :: HsStmtContext Name -> Name -> RnM (SyntaxExpr Name, FreeVars)
-- Like lookupSyntaxName, but respects contexts
lookupStmtName ctxt n
| rebindableContext ctxt
= lookupSyntaxName n
| otherwise
= return (mkRnSyntaxExpr n, emptyFVs)
lookupStmtNamePoly :: HsStmtContext Name -> Name -> RnM (HsExpr Name, FreeVars)
lookupStmtNamePoly ctxt name
| rebindableContext ctxt
= do { rebindable_on <- xoptM LangExt.RebindableSyntax
; if rebindable_on
then do { fm <- lookupOccRn (nameRdrName name)
; return (HsVar (noLoc fm), unitFV fm) }
else not_rebindable }
| otherwise
= not_rebindable
where
not_rebindable = return (HsVar (noLoc name), emptyFVs)
-- | Is this a context where we respect RebindableSyntax?
-- but ListComp/PArrComp are never rebindable
-- Neither is ArrowExpr, which has its own desugarer in DsArrows
rebindableContext :: HsStmtContext Name -> Bool
rebindableContext ctxt = case ctxt of
ListComp -> False
PArrComp -> False
ArrowExpr -> False
PatGuard {} -> False
DoExpr -> True
MDoExpr -> True
MonadComp -> True
GhciStmtCtxt -> True -- I suppose?
ParStmtCtxt c -> rebindableContext c -- Look inside to
TransStmtCtxt c -> rebindableContext c -- the parent context
{-
Note [Renaming parallel Stmts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Renaming parallel statements is painful. Given, say
[ a+c | a <- as, bs <- bss
| c <- bs, a <- ds ]
Note that
(a) In order to report "Defined but not used" about 'bs', we must
rename each group of Stmts with a thing_inside whose FreeVars
include at least {a,c}
(b) We want to report that 'a' is illegally bound in both branches
(c) The 'bs' in the second group must obviously not be captured by
the binding in the first group
To satisfy (a) we nest the segements.
To satisfy (b) we check for duplicates just before thing_inside.
To satisfy (c) we reset the LocalRdrEnv each time.
************************************************************************
* *
\subsubsection{mdo expressions}
* *
************************************************************************
-}
type FwdRefs = NameSet
type Segment stmts = (Defs,
Uses, -- May include defs
FwdRefs, -- A subset of uses that are
-- (a) used before they are bound in this segment, or
-- (b) used here, and bound in subsequent segments
stmts) -- Either Stmt or [Stmt]
-- wrapper that does both the left- and right-hand sides
rnRecStmtsAndThen :: Outputable (body RdrName) =>
(Located (body RdrName)
-> RnM (Located (body Name), FreeVars))
-> [LStmt RdrName (Located (body RdrName))]
-- assumes that the FreeVars returned includes
-- the FreeVars of the Segments
-> ([Segment (LStmt Name (Located (body Name)))]
-> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnRecStmtsAndThen rnBody s cont
= do { -- (A) Make the mini fixity env for all of the stmts
fix_env <- makeMiniFixityEnv (collectRecStmtsFixities s)
-- (B) Do the LHSes
; new_lhs_and_fv <- rn_rec_stmts_lhs fix_env s
-- ...bring them and their fixities into scope
; let bound_names = collectLStmtsBinders (map fst new_lhs_and_fv)
-- Fake uses of variables introduced implicitly (warning suppression, see #4404)
implicit_uses = lStmtsImplicits (map fst new_lhs_and_fv)
; bindLocalNamesFV bound_names $
addLocalFixities fix_env bound_names $ do
-- (C) do the right-hand-sides and thing-inside
{ segs <- rn_rec_stmts rnBody bound_names new_lhs_and_fv
; (res, fvs) <- cont segs
; warnUnusedLocalBinds bound_names (fvs `unionNameSet` implicit_uses)
; return (res, fvs) }}
-- get all the fixity decls in any Let stmt
collectRecStmtsFixities :: [LStmtLR RdrName RdrName body] -> [LFixitySig RdrName]
collectRecStmtsFixities l =
foldr (\ s -> \acc -> case s of
(L _ (LetStmt (L _ (HsValBinds (ValBindsIn _ sigs))))) ->
foldr (\ sig -> \ acc -> case sig of
(L loc (FixSig s)) -> (L loc s) : acc
_ -> acc) acc sigs
_ -> acc) [] l
-- left-hand sides
rn_rec_stmt_lhs :: Outputable body => MiniFixityEnv
-> LStmt RdrName body
-- rename LHS, and return its FVs
-- Warning: we will only need the FreeVars below in the case of a BindStmt,
-- so we don't bother to compute it accurately in the other cases
-> RnM [(LStmtLR Name RdrName body, FreeVars)]
rn_rec_stmt_lhs _ (L loc (BodyStmt body a b c))
= return [(L loc (BodyStmt body a b c), emptyFVs)]
rn_rec_stmt_lhs _ (L loc (LastStmt body noret a))
= return [(L loc (LastStmt body noret a), emptyFVs)]
rn_rec_stmt_lhs fix_env (L loc (BindStmt pat body a b t))
= do
-- should the ctxt be MDo instead?
(pat', fv_pat) <- rnBindPat (localRecNameMaker fix_env) pat
return [(L loc (BindStmt pat' body a b t),
fv_pat)]
rn_rec_stmt_lhs _ (L _ (LetStmt (L _ binds@(HsIPBinds _))))
= failWith (badIpBinds (text "an mdo expression") binds)
rn_rec_stmt_lhs fix_env (L loc (LetStmt (L l(HsValBinds binds))))
= do (_bound_names, binds') <- rnLocalValBindsLHS fix_env binds
return [(L loc (LetStmt (L l (HsValBinds binds'))),
-- Warning: this is bogus; see function invariant
emptyFVs
)]
-- XXX Do we need to do something with the return and mfix names?
rn_rec_stmt_lhs fix_env (L _ (RecStmt { recS_stmts = stmts })) -- Flatten Rec inside Rec
= rn_rec_stmts_lhs fix_env stmts
rn_rec_stmt_lhs _ stmt@(L _ (ParStmt {})) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ stmt@(L _ (TransStmt {})) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ stmt@(L _ (ApplicativeStmt {})) -- Shouldn't appear yet
= pprPanic "rn_rec_stmt" (ppr stmt)
rn_rec_stmt_lhs _ (L _ (LetStmt (L _ EmptyLocalBinds)))
= panic "rn_rec_stmt LetStmt EmptyLocalBinds"
rn_rec_stmts_lhs :: Outputable body => MiniFixityEnv
-> [LStmt RdrName body]
-> RnM [(LStmtLR Name RdrName body, FreeVars)]
rn_rec_stmts_lhs fix_env stmts
= do { ls <- concatMapM (rn_rec_stmt_lhs fix_env) stmts
; let boundNames = collectLStmtsBinders (map fst ls)
-- First do error checking: we need to check for dups here because we
-- don't bind all of the variables from the Stmt at once
-- with bindLocatedLocals.
; checkDupNames boundNames
; return ls }
-- right-hand-sides
rn_rec_stmt :: (Outputable (body RdrName)) =>
(Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [Name]
-> (LStmtLR Name RdrName (Located (body RdrName)), FreeVars)
-> RnM [Segment (LStmt Name (Located (body Name)))]
-- Rename a Stmt that is inside a RecStmt (or mdo)
-- Assumes all binders are already in scope
-- Turns each stmt into a singleton Stmt
rn_rec_stmt rnBody _ (L loc (LastStmt body noret _), _)
= do { (body', fv_expr) <- rnBody body
; (ret_op, fvs1) <- lookupSyntaxName returnMName
; return [(emptyNameSet, fv_expr `plusFV` fvs1, emptyNameSet,
L loc (LastStmt body' noret ret_op))] }
rn_rec_stmt rnBody _ (L loc (BodyStmt body _ _ _), _)
= do { (body', fvs) <- rnBody body
; (then_op, fvs1) <- lookupSyntaxName thenMName
; return [(emptyNameSet, fvs `plusFV` fvs1, emptyNameSet,
L loc (BodyStmt body' then_op noSyntaxExpr placeHolderType))] }
rn_rec_stmt rnBody _ (L loc (BindStmt pat' body _ _ _), fv_pat)
= do { (body', fv_expr) <- rnBody body
; (bind_op, fvs1) <- lookupSyntaxName bindMName
; xMonadFailEnabled <- fmap (xopt LangExt.MonadFailDesugaring) getDynFlags
; let failFunction | xMonadFailEnabled = failMName
| otherwise = failMName_preMFP
; (fail_op, fvs2) <- lookupSyntaxName failFunction
; let bndrs = mkNameSet (collectPatBinders pat')
fvs = fv_expr `plusFV` fv_pat `plusFV` fvs1 `plusFV` fvs2
; return [(bndrs, fvs, bndrs `intersectNameSet` fvs,
L loc (BindStmt pat' body' bind_op fail_op PlaceHolder))] }
rn_rec_stmt _ _ (L _ (LetStmt (L _ binds@(HsIPBinds _))), _)
= failWith (badIpBinds (text "an mdo expression") binds)
rn_rec_stmt _ all_bndrs (L loc (LetStmt (L l (HsValBinds binds'))), _)
= do { (binds', du_binds) <- rnLocalValBindsRHS (mkNameSet all_bndrs) binds'
-- fixities and unused are handled above in rnRecStmtsAndThen
; let fvs = allUses du_binds
; return [(duDefs du_binds, fvs, emptyNameSet,
L loc (LetStmt (L l (HsValBinds binds'))))] }
-- no RecStmt case because they get flattened above when doing the LHSes
rn_rec_stmt _ _ stmt@(L _ (RecStmt {}), _)
= pprPanic "rn_rec_stmt: RecStmt" (ppr stmt)
rn_rec_stmt _ _ stmt@(L _ (ParStmt {}), _) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt: ParStmt" (ppr stmt)
rn_rec_stmt _ _ stmt@(L _ (TransStmt {}), _) -- Syntactically illegal in mdo
= pprPanic "rn_rec_stmt: TransStmt" (ppr stmt)
rn_rec_stmt _ _ (L _ (LetStmt (L _ EmptyLocalBinds)), _)
= panic "rn_rec_stmt: LetStmt EmptyLocalBinds"
rn_rec_stmt _ _ stmt@(L _ (ApplicativeStmt {}), _)
= pprPanic "rn_rec_stmt: ApplicativeStmt" (ppr stmt)
rn_rec_stmts :: Outputable (body RdrName) =>
(Located (body RdrName) -> RnM (Located (body Name), FreeVars))
-> [Name]
-> [(LStmtLR Name RdrName (Located (body RdrName)), FreeVars)]
-> RnM [Segment (LStmt Name (Located (body Name)))]
rn_rec_stmts rnBody bndrs stmts
= do { segs_s <- mapM (rn_rec_stmt rnBody bndrs) stmts
; return (concat segs_s) }
---------------------------------------------
segmentRecStmts :: SrcSpan -> HsStmtContext Name
-> Stmt Name body
-> [Segment (LStmt Name body)] -> FreeVars
-> ([LStmt Name body], FreeVars)
segmentRecStmts loc ctxt empty_rec_stmt segs fvs_later
| null segs
= ([], fvs_later)
| MDoExpr <- ctxt
= segsToStmts empty_rec_stmt grouped_segs fvs_later
-- Step 4: Turn the segments into Stmts
-- Use RecStmt when and only when there are fwd refs
-- Also gather up the uses from the end towards the
-- start, so we can tell the RecStmt which things are
-- used 'after' the RecStmt
| otherwise
= ([ L loc $
empty_rec_stmt { recS_stmts = ss
, recS_later_ids = nameSetElemsStable
(defs `intersectNameSet` fvs_later)
, recS_rec_ids = nameSetElemsStable
(defs `intersectNameSet` uses) }]
-- See Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
, uses `plusFV` fvs_later)
where
(defs_s, uses_s, _, ss) = unzip4 segs
defs = plusFVs defs_s
uses = plusFVs uses_s
-- Step 2: Fill in the fwd refs.
-- The segments are all singletons, but their fwd-ref
-- field mentions all the things used by the segment
-- that are bound after their use
segs_w_fwd_refs = addFwdRefs segs
-- Step 3: Group together the segments to make bigger segments
-- Invariant: in the result, no segment uses a variable
-- bound in a later segment
grouped_segs = glomSegments ctxt segs_w_fwd_refs
----------------------------
addFwdRefs :: [Segment a] -> [Segment a]
-- So far the segments only have forward refs *within* the Stmt
-- (which happens for bind: x <- ...x...)
-- This function adds the cross-seg fwd ref info
addFwdRefs segs
= fst (foldr mk_seg ([], emptyNameSet) segs)
where
mk_seg (defs, uses, fwds, stmts) (segs, later_defs)
= (new_seg : segs, all_defs)
where
new_seg = (defs, uses, new_fwds, stmts)
all_defs = later_defs `unionNameSet` defs
new_fwds = fwds `unionNameSet` (uses `intersectNameSet` later_defs)
-- Add the downstream fwd refs here
{-
Note [Segmenting mdo]
~~~~~~~~~~~~~~~~~~~~~
NB. June 7 2012: We only glom segments that appear in an explicit mdo;
and leave those found in "do rec"'s intact. See
http://ghc.haskell.org/trac/ghc/ticket/4148 for the discussion
leading to this design choice. Hence the test in segmentRecStmts.
Note [Glomming segments]
~~~~~~~~~~~~~~~~~~~~~~~~
Glomming the singleton segments of an mdo into minimal recursive groups.
At first I thought this was just strongly connected components, but
there's an important constraint: the order of the stmts must not change.
Consider
mdo { x <- ...y...
p <- z
y <- ...x...
q <- x
z <- y
r <- x }
Here, the first stmt mention 'y', which is bound in the third.
But that means that the innocent second stmt (p <- z) gets caught
up in the recursion. And that in turn means that the binding for
'z' has to be included... and so on.
Start at the tail { r <- x }
Now add the next one { z <- y ; r <- x }
Now add one more { q <- x ; z <- y ; r <- x }
Now one more... but this time we have to group a bunch into rec
{ rec { y <- ...x... ; q <- x ; z <- y } ; r <- x }
Now one more, which we can add on without a rec
{ p <- z ;
rec { y <- ...x... ; q <- x ; z <- y } ;
r <- x }
Finally we add the last one; since it mentions y we have to
glom it together with the first two groups
{ rec { x <- ...y...; p <- z ; y <- ...x... ;
q <- x ; z <- y } ;
r <- x }
-}
glomSegments :: HsStmtContext Name
-> [Segment (LStmt Name body)]
-> [Segment [LStmt Name body]] -- Each segment has a non-empty list of Stmts
-- See Note [Glomming segments]
glomSegments _ [] = []
glomSegments ctxt ((defs,uses,fwds,stmt) : segs)
-- Actually stmts will always be a singleton
= (seg_defs, seg_uses, seg_fwds, seg_stmts) : others
where
segs' = glomSegments ctxt segs
(extras, others) = grab uses segs'
(ds, us, fs, ss) = unzip4 extras
seg_defs = plusFVs ds `plusFV` defs
seg_uses = plusFVs us `plusFV` uses
seg_fwds = plusFVs fs `plusFV` fwds
seg_stmts = stmt : concat ss
grab :: NameSet -- The client
-> [Segment a]
-> ([Segment a], -- Needed by the 'client'
[Segment a]) -- Not needed by the client
-- The result is simply a split of the input
grab uses dus
= (reverse yeses, reverse noes)
where
(noes, yeses) = span not_needed (reverse dus)
not_needed (defs,_,_,_) = not (intersectsNameSet defs uses)
----------------------------------------------------
segsToStmts :: Stmt Name body -- A RecStmt with the SyntaxOps filled in
-> [Segment [LStmt Name body]] -- Each Segment has a non-empty list of Stmts
-> FreeVars -- Free vars used 'later'
-> ([LStmt Name body], FreeVars)
segsToStmts _ [] fvs_later = ([], fvs_later)
segsToStmts empty_rec_stmt ((defs, uses, fwds, ss) : segs) fvs_later
= ASSERT( not (null ss) )
(new_stmt : later_stmts, later_uses `plusFV` uses)
where
(later_stmts, later_uses) = segsToStmts empty_rec_stmt segs fvs_later
new_stmt | non_rec = head ss
| otherwise = L (getLoc (head ss)) rec_stmt
rec_stmt = empty_rec_stmt { recS_stmts = ss
, recS_later_ids = nameSetElemsStable used_later
, recS_rec_ids = nameSetElemsStable fwds }
-- See Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
non_rec = isSingleton ss && isEmptyNameSet fwds
used_later = defs `intersectNameSet` later_uses
-- The ones needed after the RecStmt
{-
************************************************************************
* *
ApplicativeDo
* *
************************************************************************
Note [ApplicativeDo]
= Example =
For a sequence of statements
do
x <- A
y <- B x
z <- C
return (f x y z)
We want to transform this to
(\(x,y) z -> f x y z) <$> (do x <- A; y <- B x; return (x,y)) <*> C
It would be easy to notice that "y <- B x" and "z <- C" are
independent and do something like this:
do
x <- A
(y,z) <- (,) <$> B x <*> C
return (f x y z)
But this isn't enough! A and C were also independent, and this
transformation loses the ability to do A and C in parallel.
The algorithm works by first splitting the sequence of statements into
independent "segments", and a separate "tail" (the final statement). In
our example above, the segements would be
[ x <- A
, y <- B x ]
[ z <- C ]
and the tail is:
return (f x y z)
Then we take these segments and make an Applicative expression from them:
(\(x,y) z -> return (f x y z))
<$> do { x <- A; y <- B x; return (x,y) }
<*> C
Finally, we recursively apply the transformation to each segment, to
discover any nested parallelism.
= Syntax & spec =
expr ::= ... | do {stmt_1; ..; stmt_n} expr | ...
stmt ::= pat <- expr
| (arg_1 | ... | arg_n) -- applicative composition, n>=1
| ... -- other kinds of statement (e.g. let)
arg ::= pat <- expr
| {stmt_1; ..; stmt_n} {var_1..var_n}
(note that in the actual implementation,the expr in a do statement is
represented by a LastStmt as the final stmt, this is just a
representational issue and may change later.)
== Transformation to introduce applicative stmts ==
ado {} tail = tail
ado {pat <- expr} {return expr'} = (mkArg(pat <- expr)); return expr'
ado {one} tail = one : tail
ado stmts tail
| n == 1 = ado before (ado after tail)
where (before,after) = split(stmts_1)
| n > 1 = (mkArg(stmts_1) | ... | mkArg(stmts_n)); tail
where
{stmts_1 .. stmts_n} = segments(stmts)
segments(stmts) =
-- divide stmts into segments with no interdependencies
mkArg({pat <- expr}) = (pat <- expr)
mkArg({stmt_1; ...; stmt_n}) =
{stmt_1; ...; stmt_n} {vars(stmt_1) u .. u vars(stmt_n)}
split({stmt_1; ..; stmt_n) =
({stmt_1; ..; stmt_i}, {stmt_i+1; ..; stmt_n})
-- 1 <= i <= n
-- i is a good place to insert a bind
== Desugaring for do ==
dsDo {} expr = expr
dsDo {pat <- rhs; stmts} expr =
rhs >>= \pat -> dsDo stmts expr
dsDo {(arg_1 | ... | arg_n)} (return expr) =
(\argpat (arg_1) .. argpat(arg_n) -> expr)
<$> argexpr(arg_1)
<*> ...
<*> argexpr(arg_n)
dsDo {(arg_1 | ... | arg_n); stmts} expr =
join (\argpat (arg_1) .. argpat(arg_n) -> dsDo stmts expr)
<$> argexpr(arg_1)
<*> ...
<*> argexpr(arg_n)
-}
-- | rearrange a list of statements using ApplicativeDoStmt. See
-- Note [ApplicativeDo].
rearrangeForApplicativeDo
:: HsStmtContext Name
-> [(ExprLStmt Name, FreeVars)]
-> RnM ([ExprLStmt Name], FreeVars)
rearrangeForApplicativeDo _ [] = return ([], emptyNameSet)
rearrangeForApplicativeDo _ [(one,_)] = return ([one], emptyNameSet)
rearrangeForApplicativeDo ctxt stmts0 = do
optimal_ado <- goptM Opt_OptimalApplicativeDo
let stmt_tree | optimal_ado = mkStmtTreeOptimal stmts
| otherwise = mkStmtTreeHeuristic stmts
stmtTreeToStmts ctxt stmt_tree [last] last_fvs
where
(stmts,(last,last_fvs)) = findLast stmts0
findLast [] = error "findLast"
findLast [last] = ([],last)
findLast (x:xs) = (x:rest,last) where (rest,last) = findLast xs
-- | A tree of statements using a mixture of applicative and bind constructs.
data StmtTree a
= StmtTreeOne a
| StmtTreeBind (StmtTree a) (StmtTree a)
| StmtTreeApplicative [StmtTree a]
flattenStmtTree :: StmtTree a -> [a]
flattenStmtTree t = go t []
where
go (StmtTreeOne a) as = a : as
go (StmtTreeBind l r) as = go l (go r as)
go (StmtTreeApplicative ts) as = foldr go as ts
type ExprStmtTree = StmtTree (ExprLStmt Name, FreeVars)
type Cost = Int
-- | Turn a sequence of statements into an ExprStmtTree using a
-- heuristic algorithm. /O(n^2)/
mkStmtTreeHeuristic :: [(ExprLStmt Name, FreeVars)] -> ExprStmtTree
mkStmtTreeHeuristic [one] = StmtTreeOne one
mkStmtTreeHeuristic stmts =
case segments stmts of
[one] -> split one
segs -> StmtTreeApplicative (map split segs)
where
split [one] = StmtTreeOne one
split stmts =
StmtTreeBind (mkStmtTreeHeuristic before) (mkStmtTreeHeuristic after)
where (before, after) = splitSegment stmts
-- | Turn a sequence of statements into an ExprStmtTree optimally,
-- using dynamic programming. /O(n^3)/
mkStmtTreeOptimal :: [(ExprLStmt Name, FreeVars)] -> ExprStmtTree
mkStmtTreeOptimal stmts =
ASSERT(not (null stmts)) -- the empty case is handled by the caller;
-- we don't support empty StmtTrees.
fst (arr ! (0,n))
where
n = length stmts - 1
stmt_arr = listArray (0,n) stmts
-- lazy cache of optimal trees for subsequences of the input
arr :: Array (Int,Int) (ExprStmtTree, Cost)
arr = array ((0,0),(n,n))
[ ((lo,hi), tree lo hi)
| lo <- [0..n]
, hi <- [lo..n] ]
-- compute the optimal tree for the sequence [lo..hi]
tree lo hi
| hi == lo = (StmtTreeOne (stmt_arr ! lo), 1)
| otherwise =
case segments [ stmt_arr ! i | i <- [lo..hi] ] of
[] -> panic "mkStmtTree"
[_one] -> split lo hi
segs -> (StmtTreeApplicative trees, maximum costs)
where
bounds = scanl (\(_,hi) a -> (hi+1, hi + length a)) (0,lo-1) segs
(trees,costs) = unzip (map (uncurry split) (tail bounds))
-- find the best place to split the segment [lo..hi]
split :: Int -> Int -> (ExprStmtTree, Cost)
split lo hi
| hi == lo = (StmtTreeOne (stmt_arr ! lo), 1)
| otherwise = (StmtTreeBind before after, c1+c2)
where
-- As per the paper, for a sequence s1...sn, we want to find
-- the split with the minimum cost, where the cost is the
-- sum of the cost of the left and right subsequences.
--
-- As an optimisation (also in the paper) if the cost of
-- s1..s(n-1) is different from the cost of s2..sn, we know
-- that the optimal solution is the lower of the two. Only
-- in the case that these two have the same cost do we need
-- to do the exhaustive search.
--
((before,c1),(after,c2))
| hi - lo == 1
= ((StmtTreeOne (stmt_arr ! lo), 1),
(StmtTreeOne (stmt_arr ! hi), 1))
| left_cost < right_cost
= ((left,left_cost), (StmtTreeOne (stmt_arr ! hi), 1))
| otherwise -- left_cost > right_cost
= ((StmtTreeOne (stmt_arr ! lo), 1), (right,right_cost))
| otherwise = minimumBy (comparing cost) alternatives
where
(left, left_cost) = arr ! (lo,hi-1)
(right, right_cost) = arr ! (lo+1,hi)
cost ((_,c1),(_,c2)) = c1 + c2
alternatives = [ (arr ! (lo,k), arr ! (k+1,hi))
| k <- [lo .. hi-1] ]
-- | Turn the ExprStmtTree back into a sequence of statements, using
-- ApplicativeStmt where necessary.
stmtTreeToStmts
:: HsStmtContext Name
-> ExprStmtTree
-> [ExprLStmt Name] -- ^ the "tail"
-> FreeVars -- ^ free variables of the tail
-> RnM ( [ExprLStmt Name] -- ( output statements,
, FreeVars ) -- , things we needed
-- If we have a single bind, and we can do it without a join, transform
-- to an ApplicativeStmt. This corresponds to the rule
-- dsBlock [pat <- rhs] (return expr) = expr <$> rhs
-- In the spec, but we do it here rather than in the desugarer,
-- because we need the typechecker to typecheck the <$> form rather than
-- the bind form, which would give rise to a Monad constraint.
stmtTreeToStmts ctxt (StmtTreeOne (L _ (BindStmt pat rhs _ _ _),_))
tail _tail_fvs
| isIrrefutableHsPat pat, (False,tail') <- needJoin tail
-- WARNING: isIrrefutableHsPat on (HsPat Name) doesn't have enough info
-- to know which types have only one constructor. So only
-- tuples come out as irrefutable; other single-constructor
-- types, and newtypes, will not. See the code for
-- isIrrefuatableHsPat
= mkApplicativeStmt ctxt [ApplicativeArgOne pat rhs] False tail'
stmtTreeToStmts _ctxt (StmtTreeOne (s,_)) tail _tail_fvs =
return (s : tail, emptyNameSet)
stmtTreeToStmts ctxt (StmtTreeBind before after) tail tail_fvs = do
(stmts1, fvs1) <- stmtTreeToStmts ctxt after tail tail_fvs
let tail1_fvs = unionNameSets (tail_fvs : map snd (flattenStmtTree after))
(stmts2, fvs2) <- stmtTreeToStmts ctxt before stmts1 tail1_fvs
return (stmts2, fvs1 `plusFV` fvs2)
stmtTreeToStmts ctxt (StmtTreeApplicative trees) tail tail_fvs = do
pairs <- mapM (stmtTreeArg ctxt tail_fvs) trees
let (stmts', fvss) = unzip pairs
let (need_join, tail') = needJoin tail
(stmts, fvs) <- mkApplicativeStmt ctxt stmts' need_join tail'
return (stmts, unionNameSets (fvs:fvss))
where
stmtTreeArg _ctxt _tail_fvs (StmtTreeOne (L _ (BindStmt pat exp _ _ _), _)) =
return (ApplicativeArgOne pat exp, emptyFVs)
stmtTreeArg ctxt tail_fvs tree = do
let stmts = flattenStmtTree tree
pvarset = mkNameSet (concatMap (collectStmtBinders.unLoc.fst) stmts)
`intersectNameSet` tail_fvs
pvars = nameSetElemsStable pvarset
-- See Note [Deterministic ApplicativeDo and RecursiveDo desugaring]
pat = mkBigLHsVarPatTup pvars
tup = mkBigLHsVarTup pvars
(stmts',fvs2) <- stmtTreeToStmts ctxt tree [] pvarset
(mb_ret, fvs1) <-
if | L _ ApplicativeStmt{} <- last stmts' ->
return (unLoc tup, emptyNameSet)
| otherwise -> do
(ret,fvs) <- lookupStmtNamePoly ctxt returnMName
return (HsApp (noLoc ret) tup, fvs)
return ( ApplicativeArgMany stmts' mb_ret pat
, fvs1 `plusFV` fvs2)
-- | Divide a sequence of statements into segments, where no segment
-- depends on any variables defined by a statement in another segment.
segments
:: [(ExprLStmt Name, FreeVars)]
-> [[(ExprLStmt Name, FreeVars)]]
segments stmts = map fst $ merge $ reverse $ map reverse $ walk (reverse stmts)
where
allvars = mkNameSet (concatMap (collectStmtBinders.unLoc.fst) stmts)
-- We would rather not have a segment that just has LetStmts in
-- it, so combine those with an adjacent segment where possible.
merge [] = []
merge (seg : segs)
= case rest of
[] -> [(seg,all_lets)]
((s,s_lets):ss) | all_lets || s_lets
-> (seg ++ s, all_lets && s_lets) : ss
_otherwise -> (seg,all_lets) : rest
where
rest = merge segs
all_lets = all (isLetStmt . fst) seg
-- walk splits the statement sequence into segments, traversing
-- the sequence from the back to the front, and keeping track of
-- the set of free variables of the current segment. Whenever
-- this set of free variables is empty, we have a complete segment.
walk :: [(ExprLStmt Name, FreeVars)] -> [[(ExprLStmt Name, FreeVars)]]
walk [] = []
walk ((stmt,fvs) : stmts) = ((stmt,fvs) : seg) : walk rest
where (seg,rest) = chunter fvs' stmts
(_, fvs') = stmtRefs stmt fvs
chunter _ [] = ([], [])
chunter vars ((stmt,fvs) : rest)
| not (isEmptyNameSet vars)
= ((stmt,fvs) : chunk, rest')
where (chunk,rest') = chunter vars' rest
(pvars, evars) = stmtRefs stmt fvs
vars' = (vars `minusNameSet` pvars) `unionNameSet` evars
chunter _ rest = ([], rest)
stmtRefs stmt fvs
| isLetStmt stmt = (pvars, fvs' `minusNameSet` pvars)
| otherwise = (pvars, fvs')
where fvs' = fvs `intersectNameSet` allvars
pvars = mkNameSet (collectStmtBinders (unLoc stmt))
isLetStmt :: LStmt a b -> Bool
isLetStmt (L _ LetStmt{}) = True
isLetStmt _ = False
-- | Find a "good" place to insert a bind in an indivisible segment.
-- This is the only place where we use heuristics. The current
-- heuristic is to peel off the first group of independent statements
-- and put the bind after those.
splitSegment
:: [(ExprLStmt Name, FreeVars)]
-> ( [(ExprLStmt Name, FreeVars)]
, [(ExprLStmt Name, FreeVars)] )
splitSegment [one,two] = ([one],[two])
-- there is no choice when there are only two statements; this just saves
-- some work in a common case.
splitSegment stmts
| Just (lets,binds,rest) <- slurpIndependentStmts stmts
= if not (null lets)
then (lets, binds++rest)
else (lets++binds, rest)
| otherwise
= case stmts of
(x:xs) -> ([x],xs)
_other -> (stmts,[])
slurpIndependentStmts
:: [(LStmt Name (Located (body Name)), FreeVars)]
-> Maybe ( [(LStmt Name (Located (body Name)), FreeVars)] -- LetStmts
, [(LStmt Name (Located (body Name)), FreeVars)] -- BindStmts
, [(LStmt Name (Located (body Name)), FreeVars)] )
slurpIndependentStmts stmts = go [] [] emptyNameSet stmts
where
-- If we encounter a BindStmt that doesn't depend on a previous BindStmt
-- in this group, then add it to the group.
go lets indep bndrs ((L loc (BindStmt pat body bind_op fail_op ty), fvs) : rest)
| isEmptyNameSet (bndrs `intersectNameSet` fvs)
= go lets ((L loc (BindStmt pat body bind_op fail_op ty), fvs) : indep)
bndrs' rest
where bndrs' = bndrs `unionNameSet` mkNameSet (collectPatBinders pat)
-- If we encounter a LetStmt that doesn't depend on a BindStmt in this
-- group, then move it to the beginning, so that it doesn't interfere with
-- grouping more BindStmts.
-- TODO: perhaps we shouldn't do this if there are any strict bindings,
-- because we might be moving evaluation earlier.
go lets indep bndrs ((L loc (LetStmt binds), fvs) : rest)
| isEmptyNameSet (bndrs `intersectNameSet` fvs)
= go ((L loc (LetStmt binds), fvs) : lets) indep bndrs rest
go _ [] _ _ = Nothing
go _ [_] _ _ = Nothing
go lets indep _ stmts = Just (reverse lets, reverse indep, stmts)
-- | Build an ApplicativeStmt, and strip the "return" from the tail
-- if necessary.
--
-- For example, if we start with
-- do x <- E1; y <- E2; return (f x y)
-- then we get
-- do (E1[x] | E2[y]); f x y
--
-- the LastStmt in this case has the return removed, but we set the
-- flag on the LastStmt to indicate this, so that we can print out the
-- original statement correctly in error messages. It is easier to do
-- it this way rather than try to ignore the return later in both the
-- typechecker and the desugarer (I tried it that way first!).
mkApplicativeStmt
:: HsStmtContext Name
-> [ApplicativeArg Name Name] -- ^ The args
-> Bool -- ^ True <=> need a join
-> [ExprLStmt Name] -- ^ The body statements
-> RnM ([ExprLStmt Name], FreeVars)
mkApplicativeStmt ctxt args need_join body_stmts
= do { (fmap_op, fvs1) <- lookupStmtName ctxt fmapName
; (ap_op, fvs2) <- lookupStmtName ctxt apAName
; (mb_join, fvs3) <-
if need_join then
do { (join_op, fvs) <- lookupStmtName ctxt joinMName
; return (Just join_op, fvs) }
else
return (Nothing, emptyNameSet)
; let applicative_stmt = noLoc $ ApplicativeStmt
(zip (fmap_op : repeat ap_op) args)
mb_join
placeHolderType
; return ( applicative_stmt : body_stmts
, fvs1 `plusFV` fvs2 `plusFV` fvs3) }
-- | Given the statements following an ApplicativeStmt, determine whether
-- we need a @join@ or not, and remove the @return@ if necessary.
needJoin :: [ExprLStmt Name] -> (Bool, [ExprLStmt Name])
needJoin [] = (False, []) -- we're in an ApplicativeArg
needJoin [L loc (LastStmt e _ t)]
| Just arg <- isReturnApp e = (False, [L loc (LastStmt arg True t)])
needJoin stmts = (True, stmts)
-- | @Just e@, if the expression is @return e@ or @return $ e@,
-- otherwise @Nothing@
isReturnApp :: LHsExpr Name -> Maybe (LHsExpr Name)
isReturnApp (L _ (HsPar expr)) = isReturnApp expr
isReturnApp (L _ e) = case e of
OpApp l op _ r | is_return l, is_dollar op -> Just r
HsApp f arg | is_return f -> Just arg
_otherwise -> Nothing
where
is_var f (L _ (HsPar e)) = is_var f e
is_var f (L _ (HsAppType e _)) = is_var f e
is_var f (L _ (HsVar (L _ r))) = f r
-- TODO: I don't know how to get this right for rebindable syntax
is_var _ _ = False
is_return = is_var (\n -> n == returnMName || n == pureAName)
is_dollar = is_var (`hasKey` dollarIdKey)
{-
************************************************************************
* *
\subsubsection{Errors}
* *
************************************************************************
-}
checkEmptyStmts :: HsStmtContext Name -> RnM ()
-- We've seen an empty sequence of Stmts... is that ok?
checkEmptyStmts ctxt
= unless (okEmpty ctxt) (addErr (emptyErr ctxt))
okEmpty :: HsStmtContext a -> Bool
okEmpty (PatGuard {}) = True
okEmpty _ = False
emptyErr :: HsStmtContext Name -> SDoc
emptyErr (ParStmtCtxt {}) = text "Empty statement group in parallel comprehension"
emptyErr (TransStmtCtxt {}) = text "Empty statement group preceding 'group' or 'then'"
emptyErr ctxt = text "Empty" <+> pprStmtContext ctxt
----------------------
checkLastStmt :: Outputable (body RdrName) => HsStmtContext Name
-> LStmt RdrName (Located (body RdrName))
-> RnM (LStmt RdrName (Located (body RdrName)))
checkLastStmt ctxt lstmt@(L loc stmt)
= case ctxt of
ListComp -> check_comp
MonadComp -> check_comp
PArrComp -> check_comp
ArrowExpr -> check_do
DoExpr -> check_do
MDoExpr -> check_do
_ -> check_other
where
check_do -- Expect BodyStmt, and change it to LastStmt
= case stmt of
BodyStmt e _ _ _ -> return (L loc (mkLastStmt e))
LastStmt {} -> return lstmt -- "Deriving" clauses may generate a
-- LastStmt directly (unlike the parser)
_ -> do { addErr (hang last_error 2 (ppr stmt)); return lstmt }
last_error = (text "The last statement in" <+> pprAStmtContext ctxt
<+> text "must be an expression")
check_comp -- Expect LastStmt; this should be enforced by the parser!
= case stmt of
LastStmt {} -> return lstmt
_ -> pprPanic "checkLastStmt" (ppr lstmt)
check_other -- Behave just as if this wasn't the last stmt
= do { checkStmt ctxt lstmt; return lstmt }
-- Checking when a particular Stmt is ok
checkStmt :: HsStmtContext Name
-> LStmt RdrName (Located (body RdrName))
-> RnM ()
checkStmt ctxt (L _ stmt)
= do { dflags <- getDynFlags
; case okStmt dflags ctxt stmt of
IsValid -> return ()
NotValid extra -> addErr (msg $$ extra) }
where
msg = sep [ text "Unexpected" <+> pprStmtCat stmt <+> ptext (sLit "statement")
, text "in" <+> pprAStmtContext ctxt ]
pprStmtCat :: Stmt a body -> SDoc
pprStmtCat (TransStmt {}) = text "transform"
pprStmtCat (LastStmt {}) = text "return expression"
pprStmtCat (BodyStmt {}) = text "body"
pprStmtCat (BindStmt {}) = text "binding"
pprStmtCat (LetStmt {}) = text "let"
pprStmtCat (RecStmt {}) = text "rec"
pprStmtCat (ParStmt {}) = text "parallel"
pprStmtCat (ApplicativeStmt {}) = panic "pprStmtCat: ApplicativeStmt"
------------
emptyInvalid :: Validity -- Payload is the empty document
emptyInvalid = NotValid Outputable.empty
okStmt, okDoStmt, okCompStmt, okParStmt, okPArrStmt
:: DynFlags -> HsStmtContext Name
-> Stmt RdrName (Located (body RdrName)) -> Validity
-- Return Nothing if OK, (Just extra) if not ok
-- The "extra" is an SDoc that is appended to an generic error message
okStmt dflags ctxt stmt
= case ctxt of
PatGuard {} -> okPatGuardStmt stmt
ParStmtCtxt ctxt -> okParStmt dflags ctxt stmt
DoExpr -> okDoStmt dflags ctxt stmt
MDoExpr -> okDoStmt dflags ctxt stmt
ArrowExpr -> okDoStmt dflags ctxt stmt
GhciStmtCtxt -> okDoStmt dflags ctxt stmt
ListComp -> okCompStmt dflags ctxt stmt
MonadComp -> okCompStmt dflags ctxt stmt
PArrComp -> okPArrStmt dflags ctxt stmt
TransStmtCtxt ctxt -> okStmt dflags ctxt stmt
-------------
okPatGuardStmt :: Stmt RdrName (Located (body RdrName)) -> Validity
okPatGuardStmt stmt
= case stmt of
BodyStmt {} -> IsValid
BindStmt {} -> IsValid
LetStmt {} -> IsValid
_ -> emptyInvalid
-------------
okParStmt dflags ctxt stmt
= case stmt of
LetStmt (L _ (HsIPBinds {})) -> emptyInvalid
_ -> okStmt dflags ctxt stmt
----------------
okDoStmt dflags ctxt stmt
= case stmt of
RecStmt {}
| LangExt.RecursiveDo `xopt` dflags -> IsValid
| ArrowExpr <- ctxt -> IsValid -- Arrows allows 'rec'
| otherwise -> NotValid (text "Use RecursiveDo")
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
_ -> emptyInvalid
----------------
okCompStmt dflags _ stmt
= case stmt of
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
ParStmt {}
| LangExt.ParallelListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use ParallelListComp")
TransStmt {}
| LangExt.TransformListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use TransformListComp")
RecStmt {} -> emptyInvalid
LastStmt {} -> emptyInvalid -- Should not happen (dealt with by checkLastStmt)
ApplicativeStmt {} -> emptyInvalid
----------------
okPArrStmt dflags _ stmt
= case stmt of
BindStmt {} -> IsValid
LetStmt {} -> IsValid
BodyStmt {} -> IsValid
ParStmt {}
| LangExt.ParallelListComp `xopt` dflags -> IsValid
| otherwise -> NotValid (text "Use ParallelListComp")
TransStmt {} -> emptyInvalid
RecStmt {} -> emptyInvalid
LastStmt {} -> emptyInvalid -- Should not happen (dealt with by checkLastStmt)
ApplicativeStmt {} -> emptyInvalid
---------
checkTupleSection :: [LHsTupArg RdrName] -> RnM ()
checkTupleSection args
= do { tuple_section <- xoptM LangExt.TupleSections
; checkErr (all tupArgPresent args || tuple_section) msg }
where
msg = text "Illegal tuple section: use TupleSections"
---------
sectionErr :: HsExpr RdrName -> SDoc
sectionErr expr
= hang (text "A section must be enclosed in parentheses")
2 (text "thus:" <+> (parens (ppr expr)))
patSynErr :: HsExpr RdrName -> SDoc -> RnM (HsExpr Name, FreeVars)
patSynErr e explanation = do { addErr (sep [text "Pattern syntax in expression context:",
nest 4 (ppr e)] $$
explanation)
; return (EWildPat, emptyFVs) }
badIpBinds :: Outputable a => SDoc -> a -> SDoc
badIpBinds what binds
= hang (text "Implicit-parameter bindings illegal in" <+> what)
2 (ppr binds)
| sgillespie/ghc | compiler/rename/RnExpr.hs | bsd-3-clause | 79,391 | 107 | 22 | 22,755 | 19,984 | 10,616 | 9,368 | 1,164 | 13 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Time.ES.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Testing.Asserts
import Duckling.Time.ES.Corpus
tests :: TestTree
tests = testGroup "ES Tests"
[ makeCorpusTest [Seal Time] corpus
, makeCorpusTest [Seal Time] latentCorpus
]
| facebookincubator/duckling | tests/Duckling/Time/ES/Tests.hs | bsd-3-clause | 541 | 0 | 9 | 87 | 93 | 57 | 36 | 12 | 1 |
{-# LANGUAGE TemplateHaskell #-}
-- | The @bounds@ element of a OSM file.
module Data.Geo.OSM.Bounds
(
Bounds
, bounds
) where
import Text.XML.HXT.Arrow.Pickle
import Data.Geo.OSM.Lens.MinlatL
import Data.Geo.OSM.Lens.MaxlatL
import Data.Geo.OSM.Lens.MinlonL
import Data.Geo.OSM.Lens.MaxlonL
import Data.Geo.OSM.Lens.OriginL
import Control.Lens.TH
-- | The @bounds@ element of a OSM file.
data Bounds = Bounds {
_boundsMinLat :: String,
_boundsMinLon :: String,
_boundsMaxLat :: String,
_boundsMaxLon :: String,
_boundsOrigin :: Maybe String
} deriving Eq
makeLenses ''Bounds
instance XmlPickler Bounds where
xpickle =
xpElem "bounds" (xpWrap (\(minlat', minlon', maxlat', maxlon', origin') -> bounds minlat' minlon' maxlat' maxlon' origin', \(Bounds minlat' minlon' maxlat' maxlon' origin') -> (minlat', minlon', maxlat', maxlon', origin'))
(xp5Tuple (xpAttr "minlat" xpText) (xpAttr "minlon" xpText) (xpAttr "maxlat" xpText) (xpAttr "maxlon" xpText) (xpOption (xpAttr "origin" xpText))))
instance Show Bounds where
show =
showPickled []
instance MinlatL Bounds where
minlatL = boundsMinLat
instance MinlonL Bounds where
minlonL = boundsMinLon
instance MaxlatL Bounds where
maxlatL = boundsMaxLat
instance MaxlonL Bounds where
maxlonL = boundsMaxLon
instance OriginL Bounds where
originL = boundsOrigin
-- | Constructs a bounds with a minlat, minlon, maxlat, maxlon and origin attributes.
bounds ::
String -- ^ The @minlat@ attribute.
-> String -- ^ The @minlon@ attribute.
-> String -- ^ The @maxlat@ attribute.
-> String -- ^ The @maxlon@ attribute.
-> Maybe String -- ^ The @origin@ attribute.
-> Bounds
bounds =
Bounds
| tonymorris/geo-osm | src/Data/Geo/OSM/Bounds.hs | bsd-3-clause | 1,715 | 0 | 14 | 310 | 405 | 237 | 168 | 46 | 1 |
-- | Test standard RandomGen interface of Random123-backed generators.
module TestTypeclasses (test_typeclasses) where
import Data.Word
import Test.Framework (testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import System.Random.Random123.Types
test_li1x32_bijection initial = initial == mapped initial where
types = initial :: Word32
mapped = liFromInteger . liToInteger
test_li2x32_bijection initial = initial == mapped initial where
types = initial :: Array2 Word32
mapped = liFromInteger . liToInteger
test_li4x32_bijection initial = initial == mapped initial where
types = initial :: Array4 Word32
mapped = liFromInteger . liToInteger
test_li1x64_bijection initial = initial == mapped initial where
types = initial :: Word64
mapped = liFromInteger . liToInteger
test_li2x64_bijection initial = initial == mapped initial where
types = initial :: Array2 Word64
mapped = liFromInteger . liToInteger
test_li4x64_bijection initial = initial == mapped initial where
types = initial :: Array4 Word64
mapped = liFromInteger . liToInteger
test_li1x32_bitsize li = liToInteger li < 2 ^ liBitSize li where
types = li :: Word32
test_li2x32_bitsize li = liToInteger li < 2 ^ liBitSize li where
types = li :: Array2 Word32
test_li4x32_bitsize li = liToInteger li < 2 ^ liBitSize li where
types = li :: Array4 Word32
test_li1x64_bitsize li = liToInteger li < 2 ^ liBitSize li where
types = li :: Word64
test_li2x64_bitsize li = liToInteger li < 2 ^ liBitSize li where
types = li :: Array2 Word64
test_li4x64_bitsize li = liToInteger li < 2 ^ liBitSize li where
types = li :: Array4 Word64
test_ctr2x32_skip ctr i = liFromInteger (liToInteger ctr + i) == skip i ctr where
types = (ctr :: Array2 Word32, i :: Integer)
test_ctr4x32_skip ctr i = liFromInteger (liToInteger ctr + i) == skip i ctr where
types = (ctr :: Array4 Word32, i :: Integer)
test_ctr2x64_skip ctr i = liFromInteger (liToInteger ctr + i) == skip i ctr where
types = (ctr :: Array2 Word64, i :: Integer)
test_ctr4x64_skip ctr i = liFromInteger (liToInteger ctr + i) == skip i ctr where
types = (ctr :: Array4 Word64, i :: Integer)
test_ctr2x32_increment ctr = increment ctr == skip 1 ctr where
types = ctr :: Array2 Word32
test_ctr4x32_increment ctr = increment ctr == skip 1 ctr where
types = ctr :: Array4 Word32
test_ctr2x64_increment ctr = increment ctr == skip 1 ctr where
types = ctr :: Array2 Word64
test_ctr4x64_increment ctr = increment ctr == skip 1 ctr where
types = ctr :: Array4 Word64
test_typeclasses = testGroup "Typeclasses" [
testGroup "LimitedInteger" [
testGroup "liFromInteger . liToInteger == id" [
testProperty "1x32" test_li1x32_bijection,
testProperty "2x32" test_li2x32_bijection,
testProperty "4x32" test_li4x32_bijection,
testProperty "1x64" test_li1x64_bijection,
testProperty "2x64" test_li2x64_bijection,
testProperty "4x64" test_li4x64_bijection
],
testGroup "liToInteger < 2 ^ liBitSize" [
testProperty "1x32" test_li1x32_bitsize,
testProperty "2x32" test_li2x32_bitsize,
testProperty "4x32" test_li4x32_bitsize,
testProperty "1x64" test_li1x64_bitsize,
testProperty "2x64" test_li2x64_bitsize,
testProperty "4x64" test_li4x64_bitsize
]
],
testGroup "Counter" [
testGroup "skip behaves like the default implementation" [
testProperty "2x32" test_ctr2x32_skip,
testProperty "4x32" test_ctr4x32_skip,
testProperty "2x64" test_ctr2x64_skip,
testProperty "4x64" test_ctr4x64_skip
],
testGroup "increment == skip 1" [
testProperty "2x32" test_ctr2x32_increment,
testProperty "4x32" test_ctr4x32_increment,
testProperty "2x64" test_ctr2x64_increment,
testProperty "4x64" test_ctr4x64_increment
]
]
]
| fjarri/haskell-random123 | test/TestTypeclasses.hs | bsd-3-clause | 4,086 | 0 | 11 | 979 | 1,030 | 526 | 504 | 78 | 1 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
import Language.Haskell.Liquid.Prelude
[lq| data Pair a b <p :: x0:a -> x1:b -> Prop> = P (x :: a) (y :: b<p x>) |]
data Pair a b = P a b
[lq| mkP :: forall a <q :: y0:a -> y1:a -> Prop>. x: a -> y: a<q x> -> Pair <q> a a |]
mkP :: a -> a -> Pair a a
mkP x y = P x y
incr :: Int -> Int
incr x = x + 1
baz x = mkP x (incr x)
chk :: Pair Int Int -> Bool
chk (P x y) = liquidAssertB (x < y)
prop = chk $ baz n
where n = choose 100
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/deptupW.hs | bsd-3-clause | 493 | 0 | 7 | 138 | 174 | 93 | 81 | 15 | 1 |
{-# OPTIONS -fno-cse #-}
{-
From happstack-server
License BSD3
Author Happstack team, HAppS LLC
-}
module Happstack.Server.Internal.Clock
( getApproximateTime
, getApproximatePOSIXTime
, getApproximateUTCTime
, formatHttpDate
) where
import Control.Applicative ((<$>))
import Control.Concurrent
import Data.IORef
import Data.Time.Clock (UTCTime)
import Data.Time.Clock.POSIX (POSIXTime, getPOSIXTime, posixSecondsToUTCTime)
import Data.Time.Format (formatTime)
import System.IO.Unsafe
import System.Locale
import qualified Data.ByteString.Char8 as B
data DateCache = DateCache {
cachedPOSIXTime :: !(IORef POSIXTime)
, cachedHttpDate :: !(IORef B.ByteString)
}
formatHttpDate :: UTCTime -> String
formatHttpDate = formatTime defaultTimeLocale "%a, %d %b %Y %X GMT"
{-# INLINE formatHttpDate #-}
mkTime :: IO (POSIXTime, B.ByteString)
mkTime =
do now <- getPOSIXTime
return (now, B.pack $ formatHttpDate (posixSecondsToUTCTime now))
{-# NOINLINE clock #-}
clock :: DateCache
clock = unsafePerformIO $ do
(now, httpDate) <- mkTime
nowRef <- newIORef now
httpDateRef <- newIORef httpDate
let dateCache = (DateCache nowRef httpDateRef)
forkIO $ updater dateCache
return dateCache
updater :: DateCache -> IO ()
updater dateCache =
do threadDelay (10^(6 :: Int)) -- Every second
(now, httpDate) <- mkTime
writeIORef (cachedPOSIXTime dateCache) now
writeIORef (cachedHttpDate dateCache) httpDate
updater dateCache
getApproximateTime :: IO B.ByteString
getApproximateTime = readIORef (cachedHttpDate clock)
getApproximatePOSIXTime :: IO POSIXTime
getApproximatePOSIXTime = readIORef (cachedPOSIXTime clock)
getApproximateUTCTime :: IO UTCTime
getApproximateUTCTime = posixSecondsToUTCTime <$> getApproximatePOSIXTime
| aslatter/happstack-wai | src/Happstack/Server/Internal/Clock.hs | bsd-3-clause | 1,835 | 0 | 12 | 326 | 451 | 243 | 208 | 51 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | A bunch of type classes representing common values shared between multiple
-- CSS properties, like `Auto`, `Inherit`, `None`, `Normal` and several more.
--
-- All the common value type classes have an instance for the Value type,
-- making them easily derivable for custom value types.
module Clay.Common where
import Clay.Property
-------------------------------------------------------------------------------
class Auto a where auto :: a
class Inherit a where inherit :: a
class None a where none :: a
class Normal a where normal :: a
class Visible a where visible :: a
class Hidden a where hidden :: a
-- | The other type class is used to escape from the type safety introduced by
-- embedding CSS properties into the typed world of Clay. `Other` allows you to
-- cast any `Value` to a specific value type.
class Other a where other :: Value -> a
instance Auto Value where auto = "auto"
instance Inherit Value where inherit = "inherit"
instance Normal Value where normal = "normal"
instance None Value where none = "none"
instance Visible Value where visible = "visible"
instance Hidden Value where hidden = "hidden"
instance Other Value where other = id
-------------------------------------------------------------------------------
-- | Common list browser prefixes to make experimental properties work in
-- different browsers.
browsers :: Prefixed
browsers = Prefixed
[ ( "-webkit-", "" )
, ( "-moz-", "" )
, ( "-ms-", "" )
, ( "-o-", "" )
, ( "", "" )
]
| bergmark/clay | src/Clay/Common.hs | bsd-3-clause | 1,643 | 0 | 7 | 384 | 275 | 157 | 118 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Dalvik.AccessFlags
( AccessFlag(..)
, AccessFlags
, AccessType(..)
, flagCode
, codeFlag
, flagString
, flagsString
, hasAccessFlag
) where
import Data.Bits
import Data.List
import Data.Monoid
import Data.String
import Data.Word
import Text.Printf
import Prelude
type AccessFlags = Word32
data AccessFlag
= ACC_PUBLIC
| ACC_PRIVATE
| ACC_PROTECTED
| ACC_STATIC
| ACC_FINAL
| ACC_SYNCHRONIZED
| ACC_VOLATILE
| ACC_BRIDGE
| ACC_TRANSIENT
| ACC_VARARGS
| ACC_NATIVE
| ACC_INTERFACE
| ACC_ABSTRACT
| ACC_STRICT
| ACC_SYNTHETIC
| ACC_ANNOTATION
| ACC_ENUM
| ACC_CONSTRUCTOR
| ACC_DECLARED_SYNCHRONIZED
deriving (Eq, Enum)
flagCode :: AccessFlag -> Word32
flagCode ACC_PUBLIC = 0x1
flagCode ACC_PRIVATE = 0x2
flagCode ACC_PROTECTED = 0x4
flagCode ACC_STATIC = 0x8
flagCode ACC_FINAL = 0x10
flagCode ACC_SYNCHRONIZED = 0x20
flagCode ACC_VOLATILE = 0x40
flagCode ACC_BRIDGE = 0x40
flagCode ACC_TRANSIENT = 0x80
flagCode ACC_VARARGS = 0x80
flagCode ACC_NATIVE = 0x100
flagCode ACC_INTERFACE = 0x200
flagCode ACC_ABSTRACT = 0x400
flagCode ACC_STRICT = 0x800
flagCode ACC_SYNTHETIC = 0x1000
flagCode ACC_ANNOTATION = 0x2000
flagCode ACC_ENUM = 0x4000
flagCode ACC_CONSTRUCTOR = 0x10000
flagCode ACC_DECLARED_SYNCHRONIZED = 0x20000
data AccessType = AClass | AField | AMethod deriving Eq
{-
tyString :: (IsString s) => AccessType -> s
tyString AClass = "class"
tyString AField = "field"
tyString AMethod = "method"
-}
codeFlag :: AccessType -> Word32 -> AccessFlag
codeFlag _ 0x00001 = ACC_PUBLIC
codeFlag _ 0x00002 = ACC_PRIVATE
codeFlag _ 0x00004 = ACC_PROTECTED
codeFlag _ 0x00008 = ACC_STATIC
codeFlag _ 0x00010 = ACC_FINAL
codeFlag AMethod 0x00020 = ACC_SYNCHRONIZED
codeFlag AField 0x00040 = ACC_VOLATILE
codeFlag AMethod 0x00040 = ACC_BRIDGE
codeFlag AField 0x00080 = ACC_TRANSIENT
codeFlag AMethod 0x00080 = ACC_VARARGS
codeFlag AMethod 0x00100 = ACC_NATIVE
codeFlag AClass 0x00200 = ACC_INTERFACE
codeFlag AClass 0x00400 = ACC_ABSTRACT
codeFlag AMethod 0x00400 = ACC_ABSTRACT
codeFlag AMethod 0x00800 = ACC_STRICT
codeFlag _ 0x01000 = ACC_SYNTHETIC
codeFlag AClass 0x02000 = ACC_ANNOTATION
codeFlag AClass 0x04000 = ACC_ENUM
codeFlag AField 0x04000 = ACC_ENUM
codeFlag AMethod 0x10000 = ACC_CONSTRUCTOR
codeFlag AMethod 0x20000 = ACC_DECLARED_SYNCHRONIZED
codeFlag _ bits = error $ printf "(unknown access flag %08x)" bits
flagString :: (IsString s) => AccessFlag -> s
flagString ACC_PUBLIC = "PUBLIC"
flagString ACC_PRIVATE = "PRIVATE"
flagString ACC_PROTECTED = "PROTECTED"
flagString ACC_STATIC = "STATIC"
flagString ACC_FINAL = "FINAL"
flagString ACC_SYNCHRONIZED = "SYNCHRONIZED"
flagString ACC_VOLATILE = "VOLATILE"
flagString ACC_BRIDGE = "BRIDGE"
flagString ACC_TRANSIENT = "TRANSIENT"
flagString ACC_VARARGS = "VARARGS"
flagString ACC_NATIVE = "NATIVE"
flagString ACC_INTERFACE = "INTERFACE"
flagString ACC_ABSTRACT = "ABSTRACT"
flagString ACC_STRICT = "STRICT"
flagString ACC_SYNTHETIC = "SYNTHETIC"
flagString ACC_ANNOTATION = "ANNOTATION"
flagString ACC_ENUM = "ENUM"
flagString ACC_CONSTRUCTOR = "CONSTRUCTOR"
flagString ACC_DECLARED_SYNCHRONIZED = "DECLARED_SYNCHRONIZED"
flagsString :: (IsString s, Monoid s) => AccessType -> Word32 -> s
flagsString ty w = mconcat $ intersperse " "
[ flagString (codeFlag ty c) | c <- allCodes, w .&. c /= 0 ]
where allCodes = [ 0x00001, 0x00002, 0x00004, 0x00008
, 0x00010, 0x00020, 0x00040, 0x00080
, 0x00100, 0x00200, 0x00400, 0x00800
, 0x01000, 0x02000, 0x04000
, 0x10000, 0x20000
]
andTrue :: Word32 -> Word32 -> Bool
andTrue w1 w2 = (w1 .&. w2) /= 0
hasAccessFlag :: AccessFlag -> Word32 -> Bool
hasAccessFlag f = andTrue (flagCode f)
| travitch/dalvik | src/Dalvik/AccessFlags.hs | bsd-3-clause | 3,856 | 0 | 10 | 724 | 945 | 509 | 436 | 115 | 1 |
{-# LANGUAGE Arrows #-}
module HN.MainPage where
import HN.Parsing
data SParse = Title String String | Info String deriving Show
doc' = readDocument opts "../newest.html"
nn = doc' /> hasName "html" /> hasName "body" /> hasName "center" /> hasName "table"
/> hasName "tr" /> hasName "td" /> hasName "table" /> hasName "tr" >>> title <+> info
where td = getChildren >>> hasName "td"
title = proc x ->
do a <- td >>> hasAttrValue "class" (=="title") /> hasName "a" -< x
link <- getAttrValue "href" -< a
title <- getChildren >>> getText -< a
returnA -< Title link title
info = proc x ->
do sub <- td >>> hasAttrValue "class" (=="subtext") -< x
user <- getChildren >>> aUser -< sub
returnA -< Info user
| llelf/hn | HN/MainPage.hs | bsd-3-clause | 879 | 2 | 15 | 305 | 269 | 130 | 139 | 17 | 1 |
-- | Some text utility functions.
module Text where
import Text.Regex.Posix
import Data.Char
nameOkay :: String -> Bool
nameOkay = (=~ "^[-'a-zA-Z]{3,16}$")
notEmpty :: String -> Bool
notEmpty = not . all isSpace
trim :: String -> String
trim = f . f where f = reverse . dropWhile isSpace
type MatchRes = (String, String, String, [String])
-- | Sanitizes and splits a command into verb and arguments.
-- For example: say hello there --> ("say", "hello there")
splitCommand :: String -> Maybe (String, String)
splitCommand s = case sanitizeInput s =~ "^([a-zA-Z]*[^a-zA-Z]?)" :: MatchRes of
(_, _, _, [""]) -> Nothing
(_, _, args, [verb]) -> Just (trim verb, args)
_ -> error "unexpected regex result"
sanitizeInput :: String -> String
sanitizeInput = filter charOk . trim
where charOk c = ' ' <= c && c <= '~'
replaceAll :: String -> String -> String -> String
replaceAll pat sub input = case input =~ pat :: MatchRes of
(before, _, "", _) -> before
(before, _, after, _) -> before ++ sub ++ replaceAll pat sub after
-- | listify ["a", "b", "c"] yields "a, b and c"
listify :: [String] -> String
listify [] = ""
listify [x] = x
listify [x, y] = x ++ " and " ++ y
listify (x:xs) = x ++ ", " ++ listify xs
equalsIgnoreCase :: String -> String -> Bool
equalsIgnoreCase xs ys = map toLower xs == map toLower ys
| MedeaMelana/custard | Text.hs | bsd-3-clause | 1,361 | 0 | 10 | 297 | 466 | 257 | 209 | 29 | 3 |
module Spec.Enum where
import Data.Int(Int32)
-- TODO: Parse the XML comments into here
data Enum = Enum { eName :: String
, eNamespace :: Maybe String
, eExpand :: Maybe String
, eComment :: Maybe String
, eElements :: [EnumElement]
}
deriving (Show)
data EnumElement = EnumElement { eeName :: String
, eeValue :: !Int32
, eeComment :: Maybe String
}
deriving (Show)
| oldmanmike/vulkan | generate/src/Spec/Enum.hs | bsd-3-clause | 554 | 0 | 9 | 249 | 113 | 67 | 46 | 14 | 0 |
module Crawl where
import Network.Browser( browse,
setCookies,
request)
import Network.HTTP( rspBody,
getRequest,
rspBody,
mkRequest,
RequestMethod(..),
simpleHTTP)
import Network.URI(parseURI)
import Data.Maybe(fromJust)
import Network.HTTP.Conduit(simpleHttp)
import Text.HTML.TagSoup(fromAttrib,
isTagOpenName,
parseTags,
Tag(..))
import Data.List(isPrefixOf,
isSuffixOf)
import Control.Arrow((&&&))
import Control.Monad(replicateM,
when)
import Data.Tuple(swap)
import Text.Regex.TDFA((=~))
import qualified Data.ByteString.Lazy as L
import Control.Concurrent.MVar(newEmptyMVar,
takeMVar,
putMVar)
import Control.Concurrent(forkIO,
killThread)
import System.FilePath(splitExtension)
import System.Posix.Files(fileExist)
import System.Process(system)
import System.Exit(ExitCode(..))
-- |Filter prev/next links.
prevNext :: [ Tag String ] -> (Maybe String, Maybe String)
prevNext = locatePrevNext (Nothing, Nothing)
where
locatePrevNext res [] = res
locatePrevNext (p,n) (tag@(TagOpen "a" _): TagText t: rest) | isSuffixOf "Prev" t = locatePrevNext (Just $ fromAttrib "href" tag, n) rest
| isPrefixOf "Next" t = locatePrevNext (p, Just $ fromAttrib "href" tag) rest
locatePrevNext res (t:ts) = locatePrevNext res ts
-- |Filter all href links in a page.
papers :: [ Tag String ] -> [ String ]
papers = filter (isPrefixOf "paper.jsp").map (fromAttrib "href").filter (isTagOpenName "a")
-- | Collect all paper links on a page and the next/prev links, if they exist
collectPage :: [ Tag String ] -> ([ String ], (Maybe String,Maybe String))
collectPage = papers &&& prevNext
-- | Generate paper ID from paper link.
--
-- >>> paperId "paper.jsp?r=cs/9605101&qid=13871620873749a_nCnN_-288443966&qs=%22big+data%22+OR+cloud+OR+%22machine+learning%22+OR+%22artificial+intelligence%22+OR+%22distributed+computing%22"
-- "cs/9605101"
paperId :: String -> String
paperId link = case link =~ "paper.jsp.r=([^&]+)&.*" :: (String,String,String,[String]) of
(_,_,_,x:xs) -> x
_ -> ""
-- |Get first page of query.
firstPage = body "http://search.arxiv.org:8081/?query=%22big+data%22+OR+cloud+OR+%22machine+learning%22+OR+%22artificial+intelligence%22+OR+%22distributed+computing%22&qid=13871620873749a_nCnN_-288443966&startat=40"
-- |Follow all `Prev` links till beginning of search and collect paper links.
previousPages :: ([ String ], (Maybe String,Maybe String)) -> IO [ String ]
previousPages (uris, (Nothing, _)) = return uris
previousPages (uris, (Just p, _)) = do
b <- body $ "http://search.arxiv.org:8081/" ++ p
let (uris', np) = collectPage $ parseTags b
previousPages (uris ++ uris', np)
-- |Follow all `Next` links till end of search and collect paper links.
nextPages :: ([ String ], (Maybe String,Maybe String)) -> IO [ String ]
nextPages (uris, (_, Nothing)) = return uris
nextPages (uris, (_, Just p)) = do
b <- body $ "http://search.arxiv.org:8081/" ++ p
let (uris', np) = collectPage $ parseTags b
nextPages (uris ++ uris', np)
-- | Get body of request, ignoring all cookies but following redirections.
body uri = browse $ setCookies [] >> (request (getRequest uri)) >>= (return.rspBody.snd)
-- |Collect all paper ids
allPaperIds :: IO [ String ]
allPaperIds = do
f <- firstPage
let page = collectPage $ parseTags f
prev <- previousPages page
next <- nextPages ([], snd page)
return $ map paperId $ prev ++ next
-- |Construct an URI to a paper's PDF from an id
pdfURI id = fromJust $ parseURI $ "http://arxiv.org/pdf/" ++ id ++ "v1.pdf"
-- |Download single PDF
--
-- throw an error if fail to download, returns filname otherwise.
downloadPDF id = do
resp <- simpleHTTP (mkRequest GET $ pdfURI id)
let body = rspBody $ (\ (Right r) -> r) resp
let f = filename id
e <- fileExist f
when (not e) $ L.writeFile f body
return f
where
filename id = map replaceChars id ++ ".pdf"
replaceChars '/' = '_'
replaceChars c= c
-- |Run a thread pool for executing concurrent computations
runInThreadPool :: Int -- number of threads to run concurrently
-> [ String ] -- list of ids to download
-> IO [ String ]
runInThreadPool numThreads ids = do
inChan <- newEmptyMVar
outChan <- newEmptyMVar
tids <- replicateM numThreads (forkIO $ compute inChan outChan)
forkIO $ mapM_ (putMVar inChan) ids
files <- mapM (const $ takeMVar outChan) ids
mapM_ killThread tids
return files
where
compute inChan outChan = do
id <- takeMVar inChan
f <- downloadPDF id
putMVar outChan f
compute inChan outChan
-- |Dowload all PDF of papers
downloadPDFs :: IO [ String ]
downloadPDFs = do
ids <- allPaperIds
runInThreadPool 10 ids
-- | Convert a PDF file to text.
--
-- This assumes `pdftotext` is available in the PATH.
convertToText :: String -- file path
-> IO String -- Converted file
convertToText pdf = do
let txt = fst (splitExtension pdf) ++ ".txt"
exit <- system $ "pdftotext " ++ pdf
case exit of
ExitSuccess -> return txt
ExitFailure _ -> return ""
| RayRacine/hs-word2vec | Crawl.hs | bsd-3-clause | 5,561 | 0 | 15 | 1,425 | 1,496 | 791 | 705 | 108 | 3 |
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.Framework.TH
import Test.HUnit
--import Test.QuickCheck.Modifiers
main :: IO ()
main = $(defaultMainGenerator)
type Prop a = a -> Bool
prop_true :: Prop ()
prop_true _ = True
case_tcase :: Assertion
case_tcase = assertBool "Hi" True
| j-rock/4hire | tests/Tests.hs | mit | 436 | 0 | 6 | 105 | 94 | 56 | 38 | 13 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
module Qi.Program.Lambda.Lang where
import Control.Monad.Freer
import Data.Aeson (ToJSON)
import Protolude
import Qi.Config.AWS.S3 (S3Object)
import Qi.Config.Identifier (LambdaId)
import Qi.Core.Curry
data LambdaEff r where
Invoke
:: ToJSON a
=> LambdaId
-> a
-> LambdaEff ()
Update
:: LambdaId
-> S3Object
-> LambdaEff ()
invoke
:: (Member LambdaEff effs, ToJSON a)
=> LambdaId
-> a
-> Eff effs ()
invoke =
send .: Invoke
update
:: (Member LambdaEff effs)
=> LambdaId
-> S3Object
-> Eff effs ()
update =
send .: Update
| qmuli/qmuli | library/Qi/Program/Lambda/Lang.hs | mit | 973 | 0 | 9 | 294 | 199 | 116 | 83 | 40 | 1 |
module Code.Huffman.Boiler where
import Code.Type
import Code.Huffman.Partial
import Code.Huffman.Throw
import Code.Huffman.Config
import Inter.Quiz
import Inter.Types
import Autolib.Reader
import Autolib.ToDoc
make_fixed :: Make
make_fixed = direct Huffman
$ Frequency $ listToFM
$ zip [ 'a' .. ] [ 14 :: Int, 23, 94, 87, 15, 90, 18, 35, 71 ]
make_quiz :: Make
make_quiz = quiz Huffman Code.Huffman.Config.example
instance ( Show a, ToDoc a, Reader a, Reader [a], ToDoc [a], Ord a )
=> Generator Huffman ( Config a ) ( Frequency a ) where
generator _ conf key = throw conf
instance Project Huffman ( Frequency a ) ( Frequency a ) where
project _ f = f
instance OrderScore Huffman where
scoringOrder _ = None
| florianpilz/autotool | src/Code/Huffman/Boiler.hs | gpl-2.0 | 769 | 0 | 8 | 176 | 267 | 149 | 118 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module SubHask.Algebra.Array
( BArray (..)
, UArray (..)
, Unboxable
)
where
import Control.Monad
import Control.Monad.Primitive
import Data.Primitive as Prim
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as VU
import qualified Data.Vector.Unboxed.Mutable as VUM
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Generic.Mutable as VGM
import qualified Prelude as P
import SubHask.Algebra
import SubHask.Algebra.Parallel
import SubHask.Algebra.Vector
import SubHask.Category
import SubHask.Internal.Prelude
-------------------------------------------------------------------------------
-- boxed arrays
newtype BArray e = BArray (V.Vector e)
type instance Index (BArray e) = Int
type instance Logic (BArray e) = Logic e
type instance Scalar (BArray e) = Int
type instance Elem (BArray e) = e
type instance SetElem (BArray e) e' = BArray e'
----------------------------------------
-- mutability
mkMutable [t| forall e. BArray e |]
----------------------------------------
-- misc instances
instance Arbitrary e => Arbitrary (BArray e) where
arbitrary = fmap fromList arbitrary
instance NFData e => NFData (BArray e) where
rnf (BArray v) = rnf v
instance Show e => Show (BArray e) where
show (BArray v) = "BArray " ++ show (VG.toList v)
----------------------------------------
-- algebra
instance Semigroup (BArray e) where
(BArray v1)+(BArray v2) = fromList $ VG.toList v1 ++ VG.toList v2
instance Monoid (BArray e) where
zero = BArray VG.empty
instance Normed (BArray e) where
size (BArray v) = VG.length v
----------------------------------------
-- comparison
instance (ValidLogic e, Eq_ e) => Eq_ (BArray e) where
a1==a2 = toList a1==toList a2
instance (ClassicalLogic e, POrd_ e) => POrd_ (BArray e) where
inf a1 a2 = fromList $ inf (toList a1) (toList a2)
instance (ClassicalLogic e, POrd_ e) => MinBound_ (BArray e) where
minBound = zero
----------------------------------------
-- container
instance Constructible (BArray e) where
fromList1 x xs = BArray $ VG.fromList (x:xs)
instance (ValidLogic e, Eq_ e) => Container (BArray e) where
elem e arr = elem e $ toList arr
instance Foldable (BArray e) where
{-# INLINE toList #-}
toList (BArray v) = VG.toList v
{-# INLINE uncons #-}
uncons (BArray v) = if VG.null v
then Nothing
else Just (VG.head v, BArray $ VG.tail v)
{-# INLINE unsnoc #-}
unsnoc (BArray v) = if VG.null v
then Nothing
else Just (BArray $ VG.init v, VG.last v)
{-# INLINE foldMap #-}
foldMap f (BArray v) = VG.foldl' (\a e -> a + f e) zero v
{-# INLINE foldr #-}
{-# INLINE foldr' #-}
{-# INLINE foldr1 #-}
{-# INLINE foldr1' #-}
{-# INLINE foldl #-}
{-# INLINE foldl' #-}
{-# INLINE foldl1 #-}
{-# INLINE foldl1' #-}
foldr f x (BArray v) = VG.foldr f x v
foldr' f x (BArray v) = VG.foldr' f x v
foldr1 f (BArray v) = VG.foldr1 f v
foldr1' f (BArray v) = VG.foldr1' f v
foldl f x (BArray v) = VG.foldl f x v
foldl' f x (BArray v) = VG.foldl' f x v
foldl1 f (BArray v) = VG.foldl1 f v
foldl1' f (BArray v) = VG.foldl1' f v
instance ValidLogic e => Sliceable (BArray e) where
slice i n (BArray v) = BArray $ VG.slice i n v
instance ValidLogic e => IxContainer (BArray e) where
lookup i (BArray v) = v VG.!? i
(!) (BArray v) = VG.unsafeIndex v
indices (BArray v) = [0..VG.length v-1]
values (BArray v) = VG.toList v
imap f (BArray v) = BArray $ VG.imap f v
instance ValidLogic e => Partitionable (BArray e) where
partition n arr = go 0
where
go i = if i>=length arr
then []
else (slice i len arr):(go $ i+lenmax)
where
len = if i+lenmax >= length arr
then (length arr)-i
else lenmax
lenmax = length arr `quot` n
-------------------------------------------------------------------------------
-- unboxed arrays
data UArray e
= UArray !(VU.Vector e)
-- | UArray_Zero
type instance Index (UArray e) = Int
type instance Logic (UArray e) = Logic e
type instance Scalar (UArray e) = Int
type instance Elem (UArray e) = e
type instance SetElem (UArray e) e' = UArray e'
----------------------------------------
-- mutability
mkMutable [t| forall e. UArray e |]
----------------------------------------
-- misc instances
instance (Unboxable e, Arbitrary e) => Arbitrary (UArray e) where
arbitrary = fmap fromList arbitrary
instance (NFData e) => NFData (UArray e) where
rnf (UArray v) = rnf v
-- rnf UArray_Zero = ()
instance (Unboxable e, Show e) => Show (UArray e) where
show arr = "UArray " ++ show (toList arr)
----------------------------------------
-- algebra
instance Unboxable e => Semigroup (UArray e) where
-- UArray_Zero + a = a
-- a + UArray_Zero = a
(UArray v1) + (UArray v2) = fromList $ VG.toList v1 ++ VG.toList v2
instance Unboxable e => Monoid (UArray e) where
zero = UArray VG.empty
-- zero = UArray_Zero
instance Unbox e => Normed (UArray e) where
size (UArray v) = VG.length v
-- size UArray_Zero = 0
----------------------------------------
-- comparison
instance (Unboxable e, Eq_ e) => Eq_ (UArray e) where
a1==a2 = toList a1==toList a2
instance (Unboxable e, POrd_ e) => POrd_ (UArray e) where
inf a1 a2 = fromList $ inf (toList a1) (toList a2)
instance (Unboxable e, POrd_ e) => MinBound_ (UArray e) where
minBound = zero
----------------------------------------
-- container
type Unboxable e = (Constructible (UArray e), Eq e, Unbox e)
#define mkConstructible(e) \
instance Constructible (UArray e) where\
{ fromList1 x xs = UArray $ VG.fromList (x:xs) } ;
mkConstructible(Int)
mkConstructible(Float)
mkConstructible(Double)
mkConstructible(Char)
mkConstructible(Bool)
instance
( ClassicalLogic r
, Eq_ r
, Unbox r
, Prim r
, FreeModule r
, IsScalar r
, ValidUVector s r
) => Constructible (UArray (UVector (s::Symbol) r))
where
{-# INLINABLE fromList1 #-}
fromList1 x xs = fromList1N (length $ x:xs) x xs
{-# INLINABLE fromList1N #-}
fromList1N n x xs = unsafeInlineIO $ do
marr <- safeNewByteArray (n*size'*rbytes) 16
let mv = UArray_MUVector marr 0 n size'
let go [] (-1) = return ()
go (x':xs') i = do
VGM.unsafeWrite mv i x'
go xs' (i-1)
go (P.reverse $ x:xs) (n-1)
v <- VG.basicUnsafeFreeze mv
return $ UArray v
where
rbytes=Prim.sizeOf (undefined::r)
size'=roundUpToNearest 4 $ dim x
instance Unboxable e => Container (UArray e) where
elem e (UArray v) = elem e $ VG.toList v
instance Unboxable e => Foldable (UArray e) where
{-# INLINE toList #-}
toList (UArray v) = VG.toList v
{-# INLINE uncons #-}
uncons (UArray v) = if VG.null v
then Nothing
else Just (VG.head v, UArray $ VG.tail v)
{-# INLINE unsnoc #-}
unsnoc (UArray v) = if VG.null v
then Nothing
else Just (UArray $ VG.init v, VG.last v)
{-# INLINE foldMap #-}
foldMap f (UArray v) = VG.foldl' (\a e -> a + f e) zero v
{-# INLINE foldr #-}
{-# INLINE foldr' #-}
{-# INLINE foldr1 #-}
{-# INLINE foldr1' #-}
{-# INLINE foldl #-}
{-# INLINE foldl' #-}
{-# INLINE foldl1 #-}
{-# INLINE foldl1' #-}
foldr f x (UArray v) = VG.foldr f x v
foldr' f x (UArray v) = VG.foldr' f x v
foldr1 f (UArray v) = VG.foldr1 f v
foldr1' f (UArray v) = VG.foldr1' f v
foldl f x (UArray v) = VG.foldl f x v
foldl' f x (UArray v) = VG.foldl' f x v
foldl1 f (UArray v) = VG.foldl1 f v
foldl1' f (UArray v) = VG.foldl1' f v
instance Unboxable e => Sliceable (UArray e) where
slice i n (UArray v) = UArray $ VG.slice i n v
instance Unboxable e => IxContainer (UArray e) where
type ValidElem (UArray e) e = Unboxable e
lookup i (UArray v) = v VG.!? i
(!) (UArray v) = VG.unsafeIndex v
indices (UArray v) = [0..VG.length v-1]
values (UArray v) = VG.toList v
instance Unboxable e => Partitionable (UArray e) where
partition n arr = go 0
where
go i = if i>=length arr
then []
else (slice i len arr):(go $ i+lenmax)
where
len = if i+lenmax >= length arr
then (length arr)-i
else lenmax
lenmax = length arr `quot` n
-------------------------------------------------------------------------------
-- UVector
instance
( IsScalar elem
, ClassicalLogic elem
, Unbox elem
, Prim elem
) => Unbox (UVector (n::Symbol) elem)
data instance VU.Vector (UVector (n::Symbol) elem) = UArray_UVector
{-#UNPACK#-}!ByteArray
{-#UNPACK#-}!Int -- offset
{-#UNPACK#-}!Int -- length of container
{-#UNPACK#-}!Int -- length of element vectors
instance
( IsScalar elem
, Unbox elem
, Prim elem
) => VG.Vector VU.Vector (UVector (n::Symbol) elem)
where
{-# INLINABLE basicLength #-}
basicLength (UArray_UVector _ _ n _) = n
{-# INLINABLE basicUnsafeSlice #-}
basicUnsafeSlice i len' (UArray_UVector arr off _ size') = UArray_UVector arr (off+i*size') len' size'
{-# INLINABLE basicUnsafeFreeze #-}
basicUnsafeFreeze (UArray_MUVector marr off n size') = do
arr <- unsafeFreezeByteArray marr
return $ UArray_UVector arr off n size'
{-# INLINABLE basicUnsafeThaw #-}
basicUnsafeThaw (UArray_UVector arr off n size')= do
marr <- unsafeThawByteArray arr
return $ UArray_MUVector marr off n size'
{-# INLINABLE basicUnsafeIndexM #-}
basicUnsafeIndexM (UArray_UVector arr off _ size') i =
return $ UVector_Dynamic arr (off+i*size') size'
data instance VUM.MVector s (UVector (n::Symbol) elem) = UArray_MUVector
{-#UNPACK#-}!(MutableByteArray s)
{-#UNPACK#-}!Int -- offset in number of elem
{-#UNPACK#-}!Int -- length of container
{-#UNPACK#-}!Int -- length of element vectors
instance
( ClassicalLogic elem
, IsScalar elem
, Unbox elem
, Prim elem
) => VGM.MVector VUM.MVector (UVector (n::Symbol) elem)
where
{-# INLINABLE basicLength #-}
basicLength (UArray_MUVector _ _ n _) = n
{-# INLINABLE basicUnsafeSlice #-}
basicUnsafeSlice i lenM' (UArray_MUVector marr off _ size')
= UArray_MUVector marr (off+i*size') lenM' size'
{-# INLINABLE basicOverlaps #-}
basicOverlaps (UArray_MUVector marr1 _ _ _) (UArray_MUVector marr2 _ _ _)
= sameMutableByteArray marr1 marr2
{-# INLINABLE basicUnsafeNew #-}
basicUnsafeNew 0 = do
marr <- newByteArray 0
return $ UArray_MUVector marr 0 0 0
basicUnsafeNew _ = error "basicUnsafeNew not supported on UArray_MUVector with nonzero size"
{-# INLINABLE basicUnsafeRead #-}
basicUnsafeRead (UArray_MUVector marr off _ size') i = do
let b=Prim.sizeOf (undefined::elem)
marr' <- safeNewByteArray (size'*b) 16
copyMutableByteArray marr' 0 marr ((off+i*size')*b) (size'*b)
arr <- unsafeFreezeByteArray marr'
return $ UVector_Dynamic arr 0 size'
{-# INLINABLE basicUnsafeWrite #-}
basicUnsafeWrite (UArray_MUVector marr1 off1 _ size1) loc (UVector_Dynamic arr2 off2 _) =
copyByteArray marr1 ((off1+size1*loc)*b) arr2 (off2*b) (size1*b)
where
b=Prim.sizeOf (undefined::elem)
{-# INLINABLE basicUnsafeCopy #-}
basicUnsafeCopy (UArray_MUVector marr1 off1 _ size1) (UArray_MUVector marr2 off2 n2 _) =
copyMutableByteArray marr1 (off1*b) marr2 (off2*b) (n2*b)
where
b = size1*Prim.sizeOf (undefined::elem)
{-# INLINABLE basicUnsafeMove #-}
basicUnsafeMove (UArray_MUVector marr1 off1 _ size1) (UArray_MUVector marr2 off2 n2 _) =
moveByteArray marr1 (off1*b) marr2 (off2*b) (n2*b)
where
b = size1*Prim.sizeOf (undefined::elem)
--------------------------------------------------------------------------------
-- Labeled'
instance
( Unbox y
, Prim y
, ClassicalLogic a
, IsScalar a
, Unbox a
, Prim a
) => Unbox (Labeled' (UVector (s::Symbol) a) y)
data instance VUM.MVector s (Labeled' (UVector (n::Symbol) elem) y) = UArray_Labeled'_MUVector
{-#UNPACK#-}!(MutableByteArray s)
{-#UNPACK#-}!Int -- offset in number of elem
{-#UNPACK#-}!Int -- length of container
{-#UNPACK#-}!Int -- length of element vectors
instance
( ClassicalLogic elem
, IsScalar elem
, Unbox elem
, Prim elem
, Prim y
) => VGM.MVector VUM.MVector (Labeled' (UVector (n::Symbol) elem) y)
where
{-# INLINABLE basicLength #-}
basicLength (UArray_Labeled'_MUVector _ _ n _) = n
{-# INLINABLE basicUnsafeSlice #-}
basicUnsafeSlice i lenM' (UArray_Labeled'_MUVector marr off _ size')
= UArray_Labeled'_MUVector marr (off+i*(size'+ysize)) lenM' size'
where
ysize=roundUpToNearest 4 $ Prim.sizeOf (undefined::y) `quot` Prim.sizeOf (undefined::elem)
{-# INLINABLE basicOverlaps #-}
basicOverlaps (UArray_Labeled'_MUVector marr1 _ _ _) (UArray_Labeled'_MUVector marr2 _ _ _)
= sameMutableByteArray marr1 marr2
{-# INLINABLE basicUnsafeNew #-}
basicUnsafeNew 0 = do
marr <- newByteArray 0
return $ UArray_Labeled'_MUVector marr 0 0 0
basicUnsafeNew _ = error "basicUnsafeNew not supported on UArray_MUVector with nonzero size"
{-# INLINABLE basicUnsafeRead #-}
basicUnsafeRead (UArray_Labeled'_MUVector marr off _ size') i = do
marr' <- safeNewByteArray (size'*b) 16
copyMutableByteArray marr' 0 marr ((off+i*(size'+ysize))*b) (size'*b)
arr <- unsafeFreezeByteArray marr'
let x=UVector_Dynamic arr 0 size'
y <- readByteArray marr $ (off+i*(size'+ysize)+size') `quot` ysizereal
return $ Labeled' x y
where
b=Prim.sizeOf (undefined::elem)
ysizereal = Prim.sizeOf (undefined::y) `quot` Prim.sizeOf (undefined::elem)
ysize=roundUpToNearest 4 $ ysizereal
{-# INLINABLE basicUnsafeWrite #-}
basicUnsafeWrite
(UArray_Labeled'_MUVector marr1 off1 _ size')
i
(Labeled' (UVector_Dynamic arr2 off2 _) y)
= do
copyByteArray marr1 ((off1+i*(size'+ysize))*b) arr2 (off2*b) (size'*b)
writeByteArray marr1 ((off1+i*(size'+ysize)+size') `quot` ysizereal) y
where
b=Prim.sizeOf (undefined::elem)
ysizereal = Prim.sizeOf (undefined::y) `quot` Prim.sizeOf (undefined::elem)
ysize=roundUpToNearest 4 $ ysizereal
{-# INLINABLE basicUnsafeCopy #-}
basicUnsafeCopy
(UArray_Labeled'_MUVector marr1 off1 _ size1)
(UArray_Labeled'_MUVector marr2 off2 n2 _)
= copyMutableByteArray marr1 (off1*b) marr2 (off2*b) (n2*b)
where
b = (size1+ysize)*Prim.sizeOf (undefined::elem)
ysize=roundUpToNearest 4 $ Prim.sizeOf (undefined::y) `quot` Prim.sizeOf (undefined::elem)
{-# INLINABLE basicUnsafeMove #-}
basicUnsafeMove
(UArray_Labeled'_MUVector marr1 off1 _ size1)
(UArray_Labeled'_MUVector marr2 off2 n2 _)
= moveByteArray marr1 (off1*b) marr2 (off2*b) (n2*b)
where
b = (size1+ysize)*Prim.sizeOf (undefined::elem)
ysize=roundUpToNearest 4 $ Prim.sizeOf (undefined::y) `quot` Prim.sizeOf (undefined::elem)
data instance VU.Vector (Labeled' (UVector (n::Symbol) elem) y) = UArray_Labeled'_UVector
{-#UNPACK#-}!ByteArray
{-#UNPACK#-}!Int -- offset
{-#UNPACK#-}!Int -- length of container
{-#UNPACK#-}!Int -- length of element vectors
instance
( IsScalar elem
, Unbox elem
, Prim elem
, Prim y
) => VG.Vector VU.Vector (Labeled' (UVector (n::Symbol) elem) y)
where
{-# INLINABLE basicLength #-}
basicLength (UArray_Labeled'_UVector _ _ n _) = n
{-# INLINABLE basicUnsafeSlice #-}
basicUnsafeSlice i len' (UArray_Labeled'_UVector arr off _ size')
= UArray_Labeled'_UVector arr (off+i*(size'+ysize)) len' size'
where
ysize=roundUpToNearest 4 $ Prim.sizeOf (undefined::y) `quot` Prim.sizeOf (undefined::elem)
{-# INLINABLE basicUnsafeFreeze #-}
basicUnsafeFreeze (UArray_Labeled'_MUVector marr off n size') = do
arr <- unsafeFreezeByteArray marr
return $ UArray_Labeled'_UVector arr off n size'
{-# INLINABLE basicUnsafeThaw #-}
basicUnsafeThaw (UArray_Labeled'_UVector arr off n size')= do
marr <- unsafeThawByteArray arr
return $ UArray_Labeled'_MUVector marr off n size'
{-# INLINE basicUnsafeIndexM #-}
basicUnsafeIndexM (UArray_Labeled'_UVector arr off _ size') i =
-- trace ("off'="+show off') $
return $ Labeled' x y
where
off' = off+i*(size'+ysize)
x = UVector_Dynamic arr off' size'
y = indexByteArray arr $ (off'+size') `quot` ysizereal
ysizereal = Prim.sizeOf (undefined::y) `quot` Prim.sizeOf (undefined::elem)
ysize=roundUpToNearest 4 $ ysizereal
instance
( ClassicalLogic r
, Eq_ r
, Unbox r
, Prim r
, FreeModule r
, IsScalar r
, Prim y
, Unbox y
, ValidUVector s r
) => Constructible (UArray (Labeled' (UVector (s::Symbol) r) y))
where
{-# INLINABLE fromList1 #-}
fromList1 x xs = fromList1N (length $ x:xs) x xs
{-# INLINABLE fromList1N #-}
fromList1N n x xs = unsafeInlineIO $ do
let arrlen = n*(xsize+ysize)
marr <- safeNewByteArray (arrlen*rbytes) 16
setByteArray marr 0 arrlen (0::r)
let mv = UArray_Labeled'_MUVector marr 0 n xsize
let go [] (-1) = return ()
go (x':xs') i = do
VGM.unsafeWrite mv i x'
go xs' (i-1)
go (P.reverse $ x:xs) (n-1)
v <- VG.basicUnsafeFreeze mv
return $ UArray v
where
rbytes=Prim.sizeOf (undefined::r)
xsize=roundUpToNearest 4 $ dim $ xLabeled' x
ysize=roundUpToNearest 4 $ Prim.sizeOf (undefined::y) `quot` rbytes
| Drezil/subhask | src/SubHask/Algebra/Array.hs | bsd-3-clause | 18,638 | 72 | 19 | 4,856 | 5,781 | 3,038 | 2,743 | -1 | -1 |
import Control.Blub
import Control.Monad
f :: Int -> Int
f = (+ 3)
| jystic/hsimport | tests/goldenFiles/ModuleTest11.hs | bsd-3-clause | 67 | 0 | 5 | 13 | 29 | 17 | 12 | 4 | 1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "Control/Concurrent/STM/TSem.hs" #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.STM.TSem
-- Copyright : (c) The University of Glasgow 2012
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- 'TSem': transactional semaphores.
--
-- @since 2.4.2
-----------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable #-}
module Control.Concurrent.STM.TSem (
TSem, newTSem, waitTSem, signalTSem
) where
import Control.Concurrent.STM
import Control.Monad
import Data.Typeable
-- | 'TSem' is a transactional semaphore. It holds a certain number
-- of units, and units may be acquired or released by 'waitTSem' and
-- 'signalTSem' respectively. When the 'TSem' is empty, 'waitTSem'
-- blocks.
--
-- Note that 'TSem' has no concept of fairness, and there is no
-- guarantee that threads blocked in `waitTSem` will be unblocked in
-- the same order; in fact they will all be unblocked at the same time
-- and will fight over the 'TSem'. Hence 'TSem' is not suitable if
-- you expect there to be a high number of threads contending for the
-- resource. However, like other STM abstractions, 'TSem' is
-- composable.
--
-- @since 2.4.2
newtype TSem = TSem (TVar Int)
deriving (Eq, Typeable)
newTSem :: Int -> STM TSem
newTSem i = fmap TSem (newTVar i)
waitTSem :: TSem -> STM ()
waitTSem (TSem t) = do
i <- readTVar t
when (i <= 0) retry
writeTVar t $! (i-1)
signalTSem :: TSem -> STM ()
signalTSem (TSem t) = do
i <- readTVar t
writeTVar t $! i+1
| phischu/fragnix | tests/packages/scotty/Control.Concurrent.STM.TSem.hs | bsd-3-clause | 1,769 | 0 | 9 | 312 | 247 | 142 | 105 | 21 | 1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Network/Wai/Handler/Warp/Date.hs" #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.Date (
withDateCache
, GMTDate
) where
import Control.AutoUpdate (defaultUpdateSettings, updateAction, mkAutoUpdate)
import Data.ByteString.Char8
import Network.HTTP.Date
import System.Posix (epochTime)
-- | The type of the Date header value.
type GMTDate = ByteString
-- | Creating 'DateCache' and executing the action.
withDateCache :: (IO GMTDate -> IO a) -> IO a
withDateCache action = initialize >>= action
initialize :: IO (IO GMTDate)
initialize = mkAutoUpdate defaultUpdateSettings {
updateAction = formatHTTPDate <$> getCurrentHTTPDate
}
getCurrentHTTPDate :: IO HTTPDate
getCurrentHTTPDate = epochTimeToHTTPDate <$> epochTime
| phischu/fragnix | tests/packages/scotty/Network.Wai.Handler.Warp.Date.hs | bsd-3-clause | 953 | 0 | 8 | 282 | 154 | 90 | 64 | 18 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- | Provides functionality for runtime Hamlet templates. Please use
-- "Text.Hamlet.Runtime" instead.
module Text.Hamlet.RT
( -- * Public API
HamletRT (..)
, HamletData (..)
, HamletMap
, HamletException (..)
, parseHamletRT
, renderHamletRT
, renderHamletRT'
, SimpleDoc (..)
) where
import Text.Shakespeare.Base
import Data.Monoid (mconcat)
import Control.Monad (liftM, forM)
import Control.Exception (Exception)
import Data.Typeable (Typeable)
import Text.Hamlet.Parse
import Data.List (intercalate)
#if MIN_VERSION_blaze_html(0,5,0)
import Text.Blaze.Html (Html)
import Text.Blaze.Internal (preEscapedString, preEscapedText)
#else
import Text.Blaze (preEscapedString, preEscapedText, Html)
#endif
import Data.Text (Text)
#if MIN_VERSION_exceptions(0,4,0)
import Control.Monad.Catch (MonadThrow, throwM)
#else
import Control.Monad.Catch (MonadCatch, throwM)
#define MonadThrow MonadCatch
#endif
type HamletMap url = [([String], HamletData url)]
type UrlRenderer url = (url -> [(Text, Text)] -> Text)
data HamletData url
= HDHtml Html
| HDUrl url
| HDUrlParams url [(Text, Text)]
| HDTemplate HamletRT
| HDBool Bool
| HDMaybe (Maybe (HamletMap url))
| HDList [HamletMap url]
-- FIXME switch to Text?
data SimpleDoc = SDRaw String
| SDVar [String]
| SDUrl Bool [String]
| SDTemplate [String]
| SDForall [String] String [SimpleDoc]
| SDMaybe [String] String [SimpleDoc] [SimpleDoc]
| SDCond [([String], [SimpleDoc])] [SimpleDoc]
newtype HamletRT = HamletRT [SimpleDoc]
data HamletException = HamletParseException String
| HamletUnsupportedDocException Doc
| HamletRenderException String
deriving (Show, Typeable)
instance Exception HamletException
parseHamletRT :: MonadThrow m
=> HamletSettings -> String -> m HamletRT
parseHamletRT set s =
case parseDoc set s of
Error s' -> throwM $ HamletParseException s'
Ok (_, x) -> liftM HamletRT $ mapM convert x
where
convert x@(DocForall deref (BindAs _ _) docs) =
error "Runtime Hamlet does not currently support 'as' patterns"
convert x@(DocForall deref (BindVar (Ident ident)) docs) = do
deref' <- flattenDeref' x deref
docs' <- mapM convert docs
return $ SDForall deref' ident docs'
convert DocForall{} = error "Runtime Hamlet does not currently support tuple patterns"
convert x@(DocMaybe deref (BindAs _ _) jdocs ndocs) =
error "Runtime Hamlet does not currently support 'as' patterns"
convert x@(DocMaybe deref (BindVar (Ident ident)) jdocs ndocs) = do
deref' <- flattenDeref' x deref
jdocs' <- mapM convert jdocs
ndocs' <- maybe (return []) (mapM convert) ndocs
return $ SDMaybe deref' ident jdocs' ndocs'
convert DocMaybe{} = error "Runtime Hamlet does not currently support tuple patterns"
convert (DocContent (ContentRaw s')) = return $ SDRaw s'
convert x@(DocContent (ContentVar deref)) = do
y <- flattenDeref' x deref
return $ SDVar y
convert x@(DocContent (ContentUrl p deref)) = do
y <- flattenDeref' x deref
return $ SDUrl p y
convert x@(DocContent (ContentEmbed deref)) = do
y <- flattenDeref' x deref
return $ SDTemplate y
convert (DocContent ContentMsg{}) =
error "Runtime hamlet does not currently support message interpolation"
convert (DocContent ContentAttrs{}) =
error "Runtime hamlet does not currently support attrs interpolation"
convert x@(DocCond conds els) = do
conds' <- mapM go conds
els' <- maybe (return []) (mapM convert) els
return $ SDCond conds' els'
where
-- | See the comments in Text.Hamlet.Parse.testIncludeClazzes. The conditional
-- added there doesn't work for runtime Hamlet, so we remove it here.
go (DerefBranch (DerefIdent x) _, docs') | x == specialOrIdent = do
docs'' <- mapM convert docs'
return (["True"], docs'')
go (deref, docs') = do
deref' <- flattenDeref' x deref
docs'' <- mapM convert docs'
return (deref', docs'')
convert DocWith{} = error "Runtime hamlet does not currently support $with"
convert DocCase{} = error "Runtime hamlet does not currently support $case"
renderHamletRT :: MonadThrow m
=> HamletRT
-> HamletMap url
-> UrlRenderer url
-> m Html
renderHamletRT = renderHamletRT' False
#if MIN_VERSION_exceptions(0,4,0)
renderHamletRT' :: MonadThrow m
#else
renderHamletRT' :: MonadCatch m
#endif
=> Bool -- ^ should embeded template (via ^{..}) be plain Html or actual templates?
-> HamletRT
-> HamletMap url
-> (url -> [(Text, Text)] -> Text)
-> m Html
renderHamletRT' tempAsHtml (HamletRT docs) scope0 renderUrl =
liftM mconcat $ mapM (go scope0) docs
where
go _ (SDRaw s) = return $ preEscapedString s
go scope (SDVar n) = do
v <- lookup' n n scope
case v of
HDHtml h -> return h
_ -> fa $ showName n ++ ": expected HDHtml"
go scope (SDUrl p n) = do
v <- lookup' n n scope
case (p, v) of
(False, HDUrl u) -> return $ preEscapedText $ renderUrl u []
(True, HDUrlParams u q) ->
return $ preEscapedText $ renderUrl u q
(False, _) -> fa $ showName n ++ ": expected HDUrl"
(True, _) -> fa $ showName n ++ ": expected HDUrlParams"
go scope (SDTemplate n) = do
v <- lookup' n n scope
case (tempAsHtml, v) of
(False, HDTemplate h) -> renderHamletRT' tempAsHtml h scope renderUrl
(False, _) -> fa $ showName n ++ ": expected HDTemplate"
(True, HDHtml h) -> return h
(True, _) -> fa $ showName n ++ ": expected HDHtml"
go scope (SDForall n ident docs') = do
v <- lookup' n n scope
case v of
HDList os ->
liftM mconcat $ forM os $ \o -> do
let scope' = map (\(x, y) -> (ident : x, y)) o ++ scope
renderHamletRT' tempAsHtml (HamletRT docs') scope' renderUrl
_ -> fa $ showName n ++ ": expected HDList"
go scope (SDMaybe n ident jdocs ndocs) = do
v <- lookup' n n scope
(scope', docs') <-
case v of
HDMaybe Nothing -> return (scope, ndocs)
HDMaybe (Just o) -> do
let scope' = map (\(x, y) -> (ident : x, y)) o ++ scope
return (scope', jdocs)
_ -> fa $ showName n ++ ": expected HDMaybe"
renderHamletRT' tempAsHtml (HamletRT docs') scope' renderUrl
go scope (SDCond [] docs') =
renderHamletRT' tempAsHtml (HamletRT docs') scope renderUrl
go scope (SDCond ((b, docs'):cs) els) = do
v <- lookup' b b scope
case v of
HDBool True ->
renderHamletRT' tempAsHtml (HamletRT docs') scope renderUrl
HDBool False -> go scope (SDCond cs els)
_ -> fa $ showName b ++ ": expected HDBool"
#if MIN_VERSION_exceptions(0,4,0)
lookup' :: MonadThrow m
#else
lookup' :: MonadCatch m
#endif
=> [String] -> [String] -> HamletMap url -> m (HamletData url)
lookup' orig k m =
case lookup k m of
Nothing | k == ["True"] -> return $ HDBool True
Nothing -> fa $ showName orig ++ ": not found"
Just x -> return x
fa :: MonadThrow m => String -> m a
fa = throwM . HamletRenderException
showName :: [String] -> String
showName = intercalate "." . reverse
#if MIN_VERSION_exceptions(0,4,0)
flattenDeref' :: MonadThrow f => Doc -> Deref -> f [String]
#else
flattenDeref' :: MonadCatch f => Doc -> Deref -> f [String]
#endif
flattenDeref' orig deref =
case flattenDeref deref of
Nothing -> throwM $ HamletUnsupportedDocException orig
Just x -> return x
| fgaray/shakespeare | Text/Hamlet/RT.hs | mit | 8,307 | 0 | 24 | 2,423 | 2,446 | 1,251 | 1,195 | 174 | 22 |
{- |
Module : $Header$
Description : Gtk Module to enable disproving Theorems
Copyright : (c) Simon Ulbricht, Uni Bremen 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
This module provides a disproving module that checks consistency of inverted
theorems.
-}
module GUI.GtkDisprove (disproveAtNode) where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.Glade
import GUI.GtkUtils
import qualified GUI.Glade.NodeChecker as ConsistencyChecker
import GUI.GraphTypes
import GUI.GraphLogic hiding (openProofStatus)
import GUI.GtkConsistencyChecker
import Proofs.ConsistencyCheck
import Interfaces.GenericATPState (guiDefaultTimeLimit)
import Interfaces.DataTypes
import Interfaces.Utils (updateNodeProof)
import Logic.Logic
import Logic.Prover
import Static.DevGraph
import Static.GTheory
import Static.ComputeTheory
import qualified Common.OrderedMap as OMap
import Common.AS_Annotation
import Common.LibName (LibName)
import Common.Result
import Common.ExtSign
import Control.Concurrent (forkIO, killThread)
import Control.Concurrent.MVar
import Control.Monad (unless)
import Data.Graph.Inductive.Graph (LNode)
import Data.IORef
import qualified Data.Map as Map
import Data.List
import Data.Maybe
{- | this method holds the functionality to convert the nodes goals to the
FNode datatype from GUI.GtkConsistencyChecker. The goals are being negated
by negate_th and this theory is stored in FNodes DGNodeLab local and global
theory. -}
showDisproveGUI :: GInfo -> LibEnv -> DGraph -> LNode DGNodeLab -> IO ()
showDisproveGUI gi le dg (i, lbl) = case globalTheory lbl of
Nothing -> error "GtkDisprove.showDisproveGUI(no global theory found)"
Just gt@(G_theory _ _ _ _ sens _) -> let
fg g th = let
l = lbl { dgn_theory = th }
l' = l { globalTheory = computeLabelTheory le dg (i, l) }
no_cs = ConsistencyStatus CSUnchecked ""
stat = case OMap.lookup g sens of
Nothing -> no_cs
Just tm -> case thmStatus tm of
[] -> no_cs
ts -> basicProofToConStatus $ maximum $ map snd ts
in FNode { name = g, node = (i, l'), sublogic = sublogicOfTh th,
cStatus = stat }
fgoals = foldr (\ (g, _) t -> case negate_th gt g of
Nothing -> t
Just nt -> fg g nt : t) []
$ getThGoals gt
in if null fgoals
then
errorDialogExt "Error (disprove)" "found no goals suitable for disprove function"
else do
wait <- newEmptyMVar
showDisproveWindow wait (libName gi) le dg gt fgoals
res <- takeMVar wait
runDisproveAtNode gi (i, lbl) res
{- | negates a single sentence within a G_theory and returns a theory
containing all axioms in addition to the one negated sentence. -}
negate_th :: G_theory -> String -> Maybe G_theory
negate_th g_th goal = case g_th of
G_theory lid1 syn (ExtSign sign symb) i1 sens _ ->
case OMap.lookup goal sens of
Nothing -> Nothing
Just sen ->
case negation lid1 $ sentence sen of
Nothing -> Nothing
Just sen' -> let
negSen = sen { sentence = sen', isAxiom = True }
sens' = OMap.insert goal negSen $ OMap.filter isAxiom sens
in Just $ G_theory lid1 syn (ExtSign sign symb) i1 sens' startThId
{- | this function is being called from outside and manages the locking-
mechanism of the node being called upon. -}
disproveAtNode :: GInfo -> Int -> DGraph -> IO ()
disproveAtNode gInfo descr dgraph = do
lockedEnv <- ensureLockAtNode gInfo descr dgraph
case lockedEnv of
Nothing -> return ()
Just (dg, lbl, le) -> do
acquired <- tryLockLocal lbl
if acquired then do
showDisproveGUI gInfo le dg (descr, lbl)
unlockLocal lbl
else errorDialogExt "Error" "Proof or disproof window already open"
{- | after results have been collected, this function is called to store
the results for this node within the dgraphs history. -}
runDisproveAtNode :: GInfo -> LNode DGNodeLab -> Result G_theory -> IO ()
runDisproveAtNode gInfo (v, dgnode) (Result ds mres) = case mres of
Just rTh ->
let oldTh = dgn_theory dgnode in
unless (rTh == oldTh) $ do
showDiagMessAux 2 ds
lockGlobal gInfo
let ln = libName gInfo
iSt = intState gInfo
ost <- readIORef iSt
let (ost', hist) = updateNodeProof ln ost (v, dgnode) rTh
case i_state ost' of
Nothing -> return ()
Just _ -> do
writeIORef iSt ost'
runAndLock gInfo $ updateGraph gInfo hist
unlockGlobal gInfo
_ -> return ()
{- | Displays a GUI to set TimeoutLimit and select the ConsistencyChecker
and holds the functionality to call the ConsistencyChecker for the
(previously negated) selected Theorems. -}
showDisproveWindow :: MVar (Result G_theory) -> LibName -> LibEnv
-> DGraph -> G_theory -> [FNode] -> IO ()
showDisproveWindow res ln le dg g_th fgoals = postGUIAsync $ do
xml <- getGladeXML ConsistencyChecker.get
-- get objects
window <- xmlGetWidget xml castToWindow "NodeChecker"
btnClose <- xmlGetWidget xml castToButton "btnClose"
btnResults <- xmlGetWidget xml castToButton "btnResults"
-- get goals view and buttons
trvGoals <- xmlGetWidget xml castToTreeView "trvNodes"
btnNodesAll <- xmlGetWidget xml castToButton "btnNodesAll"
btnNodesNone <- xmlGetWidget xml castToButton "btnNodesNone"
btnNodesInvert <- xmlGetWidget xml castToButton "btnNodesInvert"
btnNodesUnchecked <- xmlGetWidget xml castToButton "btnNodesUnchecked"
btnNodesTimeout <- xmlGetWidget xml castToButton "btnNodesTimeout"
cbInclThms <- xmlGetWidget xml castToCheckButton "cbInclThms"
-- get checker view and buttons
cbComorphism <- xmlGetWidget xml castToComboBox "cbComorphism"
lblSublogic <- xmlGetWidget xml castToLabel "lblSublogic"
sbTimeout <- xmlGetWidget xml castToSpinButton "sbTimeout"
btnCheck <- xmlGetWidget xml castToButton "btnCheck"
btnStop <- xmlGetWidget xml castToButton "btnStop"
trvFinder <- xmlGetWidget xml castToTreeView "trvFinder"
toolLabel <- xmlGetWidget xml castToLabel "label1"
labelSetLabel toolLabel "Pick disprover"
windowSetTitle window "Disprove"
spinButtonSetValue sbTimeout $ fromIntegral guiDefaultTimeLimit
let widgets = [ toWidget sbTimeout
, toWidget cbComorphism
, toWidget lblSublogic ]
checkWidgets = widgets ++ [ toWidget btnClose
, toWidget btnNodesAll
, toWidget btnNodesNone
, toWidget btnNodesInvert
, toWidget btnNodesUnchecked
, toWidget btnNodesTimeout
, toWidget btnResults ]
switch b = do
widgetSetSensitive btnStop $ not b
widgetSetSensitive btnCheck b
widgetSetSensitive btnStop False
widgetSetSensitive btnCheck False
threadId <- newEmptyMVar
wait <- newEmptyMVar
mView <- newEmptyMVar
-- setup data
listGoals <- setListData trvGoals show $ sort fgoals
listFinder <- setListData trvFinder fName []
-- setup comorphism combobox
comboBoxSetModelText cbComorphism
shC <- after cbComorphism changed
$ setSelectedComorphism trvFinder listFinder cbComorphism
-- setup view selection actions
let update = do
mf <- getSelectedSingle trvFinder listFinder
updateComorphism trvFinder listFinder cbComorphism shC
widgetSetSensitive btnCheck $ isJust mf
setListSelectorSingle trvFinder update
let upd = updateNodes trvGoals listGoals
(\ b s -> do
labelSetLabel lblSublogic $ show s
updateFinder trvFinder listFinder b s)
(do
labelSetLabel lblSublogic "No sublogic"
listStoreClear listFinder
activate widgets False
widgetSetSensitive btnCheck False)
(activate widgets True >> widgetSetSensitive btnCheck True)
shN <- setListSelectorMultiple trvGoals btnNodesAll btnNodesNone
btnNodesInvert upd
-- bindings
let selectWithAux f u = do
signalBlock shN
sel <- treeViewGetSelection trvGoals
treeSelectionSelectAll sel
rs <- treeSelectionGetSelectedRows sel
mapM_ ( \ ~p@(row : []) -> do
fn <- listStoreGetValue listGoals row
(if f fn then treeSelectionSelectPath else treeSelectionUnselectPath)
sel p) rs
signalUnblock shN
u
selectWith f = selectWithAux $ f . cStatus
onClicked btnNodesUnchecked
$ selectWith (== ConsistencyStatus CSUnchecked "") upd
onClicked btnNodesTimeout $ selectWith (== ConsistencyStatus CSTimeout "") upd
onClicked btnResults $ showModelView mView "Models" listGoals []
onClicked btnClose $ widgetDestroy window
onClicked btnStop $ takeMVar threadId >>= killThread >>= putMVar wait
onClicked btnCheck $ do
activate checkWidgets False
timeout <- spinButtonGetValueAsInt sbTimeout
inclThms <- toggleButtonGetActive cbInclThms
(updat, pexit) <- progressBar "Checking consistency" "please wait..."
goals' <- getSelectedMultiple trvGoals listGoals
mf <- getSelectedSingle trvFinder listFinder
f <- case mf of
Nothing -> error "Disprove: internal error"
Just (_, f) -> return f
switch False
tid <- forkIO $ do
{- call the check function from GUI.GtkConsistencyChecker.
first argument means disprove-mode and leads the ConsistencyChecker
to mark consistent sentences as disproved (since consistent with
negated sentence) -}
check True inclThms ln le dg f timeout listGoals updat goals'
putMVar wait ()
putMVar threadId tid
forkIO_ $ do
takeMVar wait
postGUIAsync $ do
switch True
tryTakeMVar threadId
showModelView mView "Results of disproving" listGoals []
signalBlock shN
sortNodes trvGoals listGoals
signalUnblock shN
upd
activate checkWidgets True
pexit
{- after window closes a new G_theory is created containing the results.
only successful disprove attempts are returned; for each one, a new
BasicProof is created and set to disproved. -}
onDestroy window $ do
fnodes' <- listStoreToList listGoals
maybe_F <- getSelectedSingle trvFinder listFinder
case maybe_F of
Just (_, f) -> case g_th of
G_theory lid syn sig i1 sens _ -> let
sens' = foldr (\ fg t -> if (sType . cStatus) fg == CSInconsistent
then let
n' = name fg
es = Map.findWithDefault (error
"GtkDisprove.showDisproveWindow") n' t
s = OMap.ele es
ps = openProofStatus n' (fName f) (empty_proof_tree lid)
bp = BasicProof lid ps { goalStatus = Disproved }
c = comorphism f !! selected f
s' = s { senAttr = ThmStatus $ (c, bp) : thmStatus s } in
Map.insert n' es { OMap.ele = s' } t
else t ) sens fnodes'
in putMVar res $ return (G_theory lid syn sig i1 sens' startThId)
_ -> putMVar res $ return g_th
selectWith (== ConsistencyStatus CSUnchecked "") upd
widgetShow window
| mariefarrell/Hets | GUI/GtkDisprove.hs | gpl-2.0 | 11,301 | 4 | 24 | 2,849 | 2,744 | 1,315 | 1,429 | -1 | -1 |
{-# LANGUAGE PatternSignatures #-}
module Test10255 where
import Data.Maybe
fob (f :: (Maybe t -> Int)) =
undefined
| christiaanb/ghc | testsuite/tests/ghc-api/annotations/Test10255.hs | bsd-3-clause | 120 | 0 | 10 | 22 | 35 | 20 | 15 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) 2006-2015 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.Native
Copyright : Copyright (C) 2006-2015 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of a 'Pandoc' document to a string representation.
Note: If @writerStandalone@ is @False@, only the document body
is represented; otherwise, the full 'Pandoc' document, including the
metadata.
-}
module Text.Pandoc.Writers.Native ( writeNative )
where
import Text.Pandoc.Options ( WriterOptions(..), WrapOption(..) )
import Data.List ( intersperse )
import Text.Pandoc.Definition
import Text.Pandoc.Pretty
prettyList :: [Doc] -> Doc
prettyList ds =
"[" <> (cat $ intersperse (cr <> ",") $ map (nest 1) ds) <> "]"
-- | Prettyprint Pandoc block element.
prettyBlock :: Block -> Doc
prettyBlock (BlockQuote blocks) =
"BlockQuote" $$ prettyList (map prettyBlock blocks)
prettyBlock (OrderedList attribs blockLists) =
"OrderedList" <> space <> text (show attribs) $$
(prettyList $ map (prettyList . map prettyBlock) blockLists)
prettyBlock (BulletList blockLists) =
"BulletList" $$
(prettyList $ map (prettyList . map prettyBlock) blockLists)
prettyBlock (DefinitionList items) = "DefinitionList" $$
(prettyList $ map deflistitem items)
where deflistitem (term, defs) = "(" <> text (show term) <> "," <> cr <>
nest 1 (prettyList $ map (prettyList . map prettyBlock) defs) <> ")"
prettyBlock (Table caption aligns widths header rows) =
"Table " <> text (show caption) <> " " <> text (show aligns) <> " " <>
text (show widths) $$
prettyRow header $$
prettyList (map prettyRow rows)
where prettyRow cols = prettyList (map (prettyList . map prettyBlock) cols)
prettyBlock (Div attr blocks) =
text ("Div " <> show attr) $$ prettyList (map prettyBlock blocks)
prettyBlock block = text $ show block
-- | Prettyprint Pandoc document.
writeNative :: WriterOptions -> Pandoc -> String
writeNative opts (Pandoc meta blocks) =
let colwidth = if writerWrapText opts == WrapAuto
then Just $ writerColumns opts
else Nothing
withHead = if writerStandalone opts
then \bs -> text ("Pandoc (" ++ show meta ++ ")") $$
bs $$ cr
else id
in render colwidth $ withHead $ prettyList $ map prettyBlock blocks
| janschulz/pandoc | src/Text/Pandoc/Writers/Native.hs | gpl-2.0 | 3,194 | 0 | 17 | 673 | 672 | 344 | 328 | 41 | 3 |
{-# LANGUAGE StandaloneKindSignatures #-}
{-# LANGUAGE TypeFamilies, GADTs, PolyKinds, DataKinds, ExplicitForAll #-}
-- See also: saks007_fail.hs
module SAKS_007 where
import Data.Kind (Type, Constraint)
type family F a where { F Type = True; F _ = False }
type family G a where { G Type = False; G _ = True }
type X :: forall k1 k2. (F k1 ~ G k2) => k1 -> k2 -> Type
data X a b where
MkX :: X Integer Maybe -- OK: F Type ~ G (Type -> Type)
-- True ~ True
| sdiehl/ghc | testsuite/tests/saks/should_compile/saks007.hs | bsd-3-clause | 495 | 0 | 9 | 132 | 127 | 78 | 49 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Server-Sent Events Add-on</title>
<maps>
<homeID>sse.introduction</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/sse/src/main/javahelp/org/zaproxy/zap/extension/sse/resources/help_ko_KR/helpset_ko_KR.hs | apache-2.0 | 983 | 77 | 68 | 157 | 419 | 212 | 207 | -1 | -1 |
import Test.Cabal.Prelude
main = cabalTest $ do
-- Don't run this test unless the GHC is sufficiently recent
-- to not ship boot old-time/old-locale
skipUnless =<< ghcVersionIs (>= mkVersion [7,11])
-- Don't run this test on GHC 8.2, which ships with Cabal 2.0,
-- which is not eligible for old-style Custom setup (if
-- we had the full Hackage index, we'd try it.)
skipUnless =<< ghcVersionIs (< mkVersion [8,1])
withRepo "repo" $ do
cabal "new-build" ["a"]
| mydaum/cabal | cabal-testsuite/PackageTests/NewBuild/T4375/cabal.test.hs | bsd-3-clause | 499 | 0 | 12 | 116 | 88 | 47 | 41 | 6 | 1 |
module Interp (runInterp) where
import GenUtils
import DataTypes
import InterpUtils
import Parser (pgnLexer)
runInterp :: AbsGame -> RealGame
runInterp (Game tags toks) = Game tags (pgnInterp toks initParState)
initParState = (FirstBoard startBoard)
type Par a = StoreBoard -> a
thenP :: Par a -> (a -> Par b) -> Par b
returnP :: a -> Par a
returnP a = \s -> a
thenP m k s = case m s of
r -> k r s
failP a = \s -> error a
consP q rest = \s -> q : pgnInterp rest s
thenP' :: Par StoreBoard -> Par a -> Par a
thenP' m k s = case m s of
r -> k r
newGameP :: Par a -> Par a
newGameP m = \ _ -> m initParState
getCurrColour :: Par Colour
getCurrColour =
getBoard `thenP` \ (Board _ (MoveNumber _ col) _) ->
returnP col
checkColour :: MoveNumber -> Par ()
checkColour (MoveNumber i col) =
getBoard `thenP` \ (Board _ (MoveNumber i' col') _) ->
if i == i' && col == col'
then returnP ()
else failP ("number mis-match: "
++ userFormat (MoveNumber i col)
++ " (looking for "
++ userFormat (MoveNumber i' col')
++ ")\n")
data StoreBoard
= FirstBoard Board
| UndoableBoard Board {- new -} Board {- back one -}
updateBoard :: Board -> Par StoreBoard
updateBoard brd (FirstBoard old_brd)
= UndoableBoard brd old_brd
updateBoard brd (UndoableBoard old_brd _)
= UndoableBoard brd old_brd
getBoard :: Par Board
getBoard s@(FirstBoard brd)
= brd
getBoard s@(UndoableBoard brd _)
= brd
undoBoard :: Par StoreBoard
undoBoard (FirstBoard _)
= error "Incorrect start to some analysis"
undoBoard (UndoableBoard _ old_brd)
= FirstBoard old_brd
pgnInterp :: [Token] -> Par [Quantum]
pgnInterp (IntToken n:PeriodToken:PeriodToken:PeriodToken:rest) =
checkColour (MoveNumber n Black) `thenP` \ () ->
pgnInterp rest
pgnInterp (IntToken n:PeriodToken:rest) =
checkColour (MoveNumber n White) `thenP` \ () ->
pgnInterp rest
pgnInterp (SymbolToken str:CommentToken (ann:rs):r)
| all (flip elem "!?") ann =
pgnInterp (SymbolToken str:pgnLexer ann ++ (CommentToken rs:r))
pgnInterp (CommentToken (n:tag:rest):r)
| head tag == '(' && take 2 (reverse tag) == ":)" && length rest > 1 =
getCurrColour `thenP` \ col ->
let
invert Black r = r -- because the move has *already* happend
invert _ "0.00" = "0.00" -- don't negate 0
invert _ ('-':r) = r
invert _ r = '-':r
in
pgnInterp (LeftRBToken:map SymbolToken (take (length rest-1) rest)
++ [CommentToken ["Score:",invert col n],RightRBToken] ++ r)
pgnInterp (CommentToken []:rest) = pgnInterp rest
pgnInterp (CommentToken comm:rest) =
consP (QuantumComment comm) rest
pgnInterp (NAGToken nag:rest) =
consP (QuantumNAG nag) rest
pgnInterp (NAGAnnToken nag _:rest) =
consP (QuantumNAG nag) rest
pgnInterp (SymbolToken "0-1":rest) =
consP (QuantumResult "0-1") rest
pgnInterp (SymbolToken "1-0":rest) =
consP (QuantumResult "1-0") rest
pgnInterp (SymbolToken "1/2-1/2":rest) =
consP (QuantumResult "1/2-1/2") rest
pgnInterp (AsterixToken:rest) =
consP (QuantumResult "*") rest
pgnInterp (SymbolToken move:rest@(NAGAnnToken _ str:_)) =
getBoard `thenP` \ brd ->
parseMove move brd `thenP` \ (mv,ch,corrMv,new_brd) ->
updateBoard new_brd `thenP'`
consP (QuantumMove mv ch str new_brd) rest
pgnInterp (SymbolToken move:rest) =
getBoard `thenP` \ brd ->
parseMove move brd `thenP` \ (mv,ch,corrMv,new_brd) ->
updateBoard new_brd `thenP'`
consP (QuantumMove mv ch "" new_brd) rest
pgnInterp (LeftRBToken:rest) =
getAnalysis rest 0 [] `thenP` \ (anal,rest) ->
(undoBoard `thenP'`
pgnInterp anal) `thenP` \ anal' ->
consP (QuantumAnalysis anal') rest
pgnInterp [] = returnP []
pgnInterp toks = failP ("when reading: "
++ unwords (map userFormat (take 10 toks)))
getAnalysis (t@LeftRBToken:r) n anal = getAnalysis r (n+1) (t:anal)
getAnalysis (t@RightRBToken:r) n anal
| n == (0 :: Int) = returnP (reverse anal,r)
| otherwise = getAnalysis r (n-1) (t:anal)
getAnalysis (t:r) n anal = getAnalysis r n (t:anal)
getAnalysis [] n anal = failP "no closing ')'"
parseMove :: String -> Board -> Par (String,String,String,Board)
parseMove move brd@(Board _ (MoveNumber _ col) _) =
case mapMaybeFail charToMoveTok move of
Nothing -> failP ("strange move:" ++ move)
Just mv_toks ->
let
(chs,mv_toks') = getChecks (reverse mv_toks)
(queen,mv_toks'') = getQueen mv_toks'
in
case parseAlgMove mv_toks'' queen brd of
(the_mv,new_brd) -> returnP (the_mv,chs,"$$",new_brd)
parseAlgMove
:: [MoveTok]
-> Maybe Piece
-> Board
-> (String,Board)
parseAlgMove [PartCastleTok,MoveToTok,PartCastleTok] Nothing
= findCastleKMove
parseAlgMove [PartCastleTok,MoveToTok,PartCastleTok,
MoveToTok,PartCastleTok] Nothing
= findCastleQMove
parseAlgMove (PieceTok King:r) Nothing = parsePieceMove r King
parseAlgMove (PieceTok Queen:r) Nothing = parsePieceMove r Queen
parseAlgMove (PieceTok Rook:r) Nothing = parsePieceMove r Rook
parseAlgMove (PieceTok Knight:r) Nothing = parsePieceMove r Knight
parseAlgMove (PieceTok Bishop:r) Nothing = parsePieceMove r Bishop
parseAlgMove [FileTok sf,RankTok sr,MoveToTok,FileTok df,RankTok dr] q =
findAPawnMove (extendBP (sf,sr)) (extendBP (df,dr)) q
parseAlgMove [FileTok sf,RankTok sr,CaptureTok,FileTok df,RankTok dr] q =
findAPawnMove (extendBP (sf,sr)) (extendBP (df,dr)) q
parseAlgMove [FileTok sf,RankTok sr,FileTok df,RankTok dr] q = \ brd ->
case lookupBoardPiece brd (sf,sr) of
Nothing -> error ("cant find piece at: " ++ userFormatBoardPos (sf,sr))
Just Pawn -> findAPawnMove (extendBP (sf,sr)) (extendBP (df,dr)) q brd
Just King | sf == 5 && df == 7 -> findCastleKMove brd
Just King | sf == 5 && df == 3 -> findCastleQMove brd
Just p -> findAMove p (extendBP (sf,sr)) (extendBP (df,dr)) brd
-- later !
parseAlgMove [FileTok df,RankTok dr] q =
findAPawnMove (Nothing,Nothing) (extendBP (df,dr)) q
parseAlgMove [FileTok sf,CaptureTok,FileTok df,RankTok dr] q =
findAPawnMove (Just sf,Nothing) (extendBP (df,dr)) q
parseAlgMove [FileTok sf,FileTok df] q =
findAPawnMove (Just sf,Nothing) (Just df,Nothing) q
parseAlgMove [FileTok sf,CaptureTok,FileTok df] q =
findAPawnMove (Just sf,Nothing) (Just df,Nothing) q
parseAlgMove _ _ = error "!>!"
parsePieceMove [FileTok df,RankTok dr] p
= findAMove p (Nothing,Nothing) (extendBP (df,dr))
parsePieceMove [CaptureTok,FileTok df,RankTok dr] p
= findAMove p (Nothing,Nothing) (extendBP (df,dr))
parsePieceMove [RankTok sr,FileTok df,RankTok dr] p
= findAMove p (Nothing,Just sr) (extendBP (df,dr))
parsePieceMove [RankTok sr,CaptureTok,FileTok df,RankTok dr] p
= findAMove p (Nothing,Just sr) (extendBP (df,dr))
parsePieceMove [FileTok sf,FileTok df,RankTok dr] p
= findAMove p (Just sf,Nothing) (extendBP (df,dr))
parsePieceMove [FileTok sf,CaptureTok,FileTok df,RankTok dr] p
= findAMove p (Just sf,Nothing) (extendBP (df,dr))
parsePieceMove [FileTok sf,RankTok sr,MoveToTok,FileTok df,RankTok dr] p
= findAMove p (extendBP (sf,sr)) (extendBP (df,dr))
parsePieceMove [FileTok sf,RankTok sr,CaptureTok,FileTok df,RankTok dr] p
= findAMove p (extendBP (sf,sr)) (extendBP (df,dr))
parsePieceMove _ p = failP ("syntax error in move:")
getChecks (CheckTok:CheckTok:r) = ("#",r)
getChecks (CheckTok:r) = ("+",r)
getChecks (MateTok:r) = ("#",r)
getChecks r = ("",r)
getQueen (PieceTok p:QueensWith:r) = (Just p,reverse r)
getQueen r = (Nothing,reverse r)
| shlevy/ghc | testsuite/tests/programs/andy_cherry/Interp.hs | bsd-3-clause | 8,294 | 0 | 20 | 2,203 | 3,298 | 1,700 | 1,598 | 180 | 5 |
{-# LANGUAGE DeriveDataTypeable #-}
module CostCentre (
CostCentre(..), CcName, IsCafCC(..),
-- All abstract except to friend: ParseIface.y
CostCentreStack,
CollectedCCs,
noCCS, currentCCS, dontCareCCS,
noCCSAttached, isCurrentCCS,
maybeSingletonCCS,
mkUserCC, mkAutoCC, mkAllCafsCC,
mkSingletonCCS,
isCafCCS, isCafCC, isSccCountCC, sccAbleCC, ccFromThisModule,
pprCostCentreCore,
costCentreUserName, costCentreUserNameFS,
costCentreSrcSpan,
cmpCostCentre -- used for removing dups in a list
) where
import Binary
import Var
import Name
import Module
import Unique
import Outputable
import SrcLoc
import FastString
import Util
import Data.Data
-----------------------------------------------------------------------------
-- Cost Centres
-- | A Cost Centre is a single @{-# SCC #-}@ annotation.
data CostCentre
= NormalCC {
cc_key :: {-# UNPACK #-} !Int,
-- ^ Two cost centres may have the same name and
-- module but different SrcSpans, so we need a way to
-- distinguish them easily and give them different
-- object-code labels. So every CostCentre has a
-- Unique that is distinct from every other
-- CostCentre in the same module.
--
-- XXX: should really be using Unique here, but we
-- need to derive Data below and there's no Data
-- instance for Unique.
cc_name :: CcName, -- ^ Name of the cost centre itself
cc_mod :: Module, -- ^ Name of module defining this CC.
cc_loc :: SrcSpan,
cc_is_caf :: IsCafCC -- see below
}
| AllCafsCC {
cc_mod :: Module, -- Name of module defining this CC.
cc_loc :: SrcSpan
}
deriving (Data, Typeable)
type CcName = FastString
data IsCafCC = NotCafCC | CafCC
deriving (Eq, Ord, Data, Typeable)
instance Eq CostCentre where
c1 == c2 = case c1 `cmpCostCentre` c2 of { EQ -> True; _ -> False }
instance Ord CostCentre where
compare = cmpCostCentre
cmpCostCentre :: CostCentre -> CostCentre -> Ordering
cmpCostCentre (AllCafsCC {cc_mod = m1}) (AllCafsCC {cc_mod = m2})
= m1 `compare` m2
cmpCostCentre NormalCC {cc_key = n1, cc_mod = m1}
NormalCC {cc_key = n2, cc_mod = m2}
-- first key is module name, then the integer key
= (m1 `compare` m2) `thenCmp` (n1 `compare` n2)
cmpCostCentre other_1 other_2
= let
tag1 = tag_CC other_1
tag2 = tag_CC other_2
in
if tag1 < tag2 then LT else GT
where
tag_CC :: CostCentre -> Int
tag_CC (NormalCC {}) = 0
tag_CC (AllCafsCC {}) = 1
-----------------------------------------------------------------------------
-- Predicates on CostCentre
isCafCC :: CostCentre -> Bool
isCafCC (AllCafsCC {}) = True
isCafCC (NormalCC {cc_is_caf = CafCC}) = True
isCafCC _ = False
-- | Is this a cost-centre which records scc counts
isSccCountCC :: CostCentre -> Bool
isSccCountCC cc | isCafCC cc = False
| otherwise = True
-- | Is this a cost-centre which can be sccd ?
sccAbleCC :: CostCentre -> Bool
sccAbleCC cc | isCafCC cc = False
| otherwise = True
ccFromThisModule :: CostCentre -> Module -> Bool
ccFromThisModule cc m = cc_mod cc == m
-----------------------------------------------------------------------------
-- Building cost centres
mkUserCC :: FastString -> Module -> SrcSpan -> Unique -> CostCentre
mkUserCC cc_name mod loc key
= NormalCC { cc_key = getKey key, cc_name = cc_name, cc_mod = mod, cc_loc = loc,
cc_is_caf = NotCafCC {-might be changed-}
}
mkAutoCC :: Id -> Module -> IsCafCC -> CostCentre
mkAutoCC id mod is_caf
= NormalCC { cc_key = getKey (getUnique id),
cc_name = str, cc_mod = mod,
cc_loc = nameSrcSpan (getName id),
cc_is_caf = is_caf
}
where
name = getName id
-- beware: only external names are guaranteed to have unique
-- Occnames. If the name is not external, we must append its
-- Unique.
-- See bug #249, tests prof001, prof002, also #2411
str | isExternalName name = occNameFS (getOccName id)
| otherwise = occNameFS (getOccName id)
`appendFS`
mkFastString ('_' : show (getUnique name))
mkAllCafsCC :: Module -> SrcSpan -> CostCentre
mkAllCafsCC m loc = AllCafsCC { cc_mod = m, cc_loc = loc }
-----------------------------------------------------------------------------
-- Cost Centre Stacks
-- | A Cost Centre Stack is something that can be attached to a closure.
-- This is either:
--
-- * the current cost centre stack (CCCS)
-- * a pre-defined cost centre stack (there are several
-- pre-defined CCSs, see below).
data CostCentreStack
= NoCCS
| CurrentCCS -- Pinned on a let(rec)-bound
-- thunk/function/constructor, this says that the
-- cost centre to be attached to the object, when it
-- is allocated, is whatever is in the
-- current-cost-centre-stack register.
| DontCareCCS -- We need a CCS to stick in static closures
-- (for data), but we *don't* expect them to
-- accumulate any costs. But we still need
-- the placeholder. This CCS is it.
| SingletonCCS CostCentre
deriving (Eq, Ord) -- needed for Ord on CLabel
-- synonym for triple which describes the cost centre info in the generated
-- code for a module.
type CollectedCCs
= ( [CostCentre] -- local cost-centres that need to be decl'd
, [CostCentre] -- "extern" cost-centres
, [CostCentreStack] -- pre-defined "singleton" cost centre stacks
)
noCCS, currentCCS, dontCareCCS :: CostCentreStack
noCCS = NoCCS
currentCCS = CurrentCCS
dontCareCCS = DontCareCCS
-----------------------------------------------------------------------------
-- Predicates on Cost-Centre Stacks
noCCSAttached :: CostCentreStack -> Bool
noCCSAttached NoCCS = True
noCCSAttached _ = False
isCurrentCCS :: CostCentreStack -> Bool
isCurrentCCS CurrentCCS = True
isCurrentCCS _ = False
isCafCCS :: CostCentreStack -> Bool
isCafCCS (SingletonCCS cc) = isCafCC cc
isCafCCS _ = False
maybeSingletonCCS :: CostCentreStack -> Maybe CostCentre
maybeSingletonCCS (SingletonCCS cc) = Just cc
maybeSingletonCCS _ = Nothing
mkSingletonCCS :: CostCentre -> CostCentreStack
mkSingletonCCS cc = SingletonCCS cc
-----------------------------------------------------------------------------
-- Printing Cost Centre Stacks.
-- The outputable instance for CostCentreStack prints the CCS as a C
-- expression.
instance Outputable CostCentreStack where
ppr NoCCS = ptext (sLit "NO_CCS")
ppr CurrentCCS = ptext (sLit "CCCS")
ppr DontCareCCS = ptext (sLit "CCS_DONT_CARE")
ppr (SingletonCCS cc) = ppr cc <> ptext (sLit "_ccs")
-----------------------------------------------------------------------------
-- Printing Cost Centres
--
-- There are several different ways in which we might want to print a
-- cost centre:
--
-- - the name of the cost centre, for profiling output (a C string)
-- - the label, i.e. C label for cost centre in .hc file.
-- - the debugging name, for output in -ddump things
-- - the interface name, for printing in _scc_ exprs in iface files.
--
-- The last 3 are derived from costCentreStr below. The first is given
-- by costCentreName.
instance Outputable CostCentre where
ppr cc = getPprStyle $ \ sty ->
if codeStyle sty
then ppCostCentreLbl cc
else text (costCentreUserName cc)
-- Printing in Core
pprCostCentreCore :: CostCentre -> SDoc
pprCostCentreCore (AllCafsCC {cc_mod = m})
= text "__sccC" <+> braces (ppr m)
pprCostCentreCore (NormalCC {cc_key = key, cc_name = n, cc_mod = m, cc_loc = loc,
cc_is_caf = caf})
= text "__scc" <+> braces (hsep [
ppr m <> char '.' <> ftext n,
ifPprDebug (ppr key),
pp_caf caf,
ifPprDebug (ppr loc)
])
pp_caf :: IsCafCC -> SDoc
pp_caf CafCC = text "__C"
pp_caf _ = empty
-- Printing as a C label
ppCostCentreLbl :: CostCentre -> SDoc
ppCostCentreLbl (AllCafsCC {cc_mod = m}) = ppr m <> text "_CAFs_cc"
ppCostCentreLbl (NormalCC {cc_key = k, cc_name = n, cc_mod = m,
cc_is_caf = is_caf})
= ppr m <> char '_' <> ztext (zEncodeFS n) <> char '_' <>
case is_caf of { CafCC -> ptext (sLit "CAF"); _ -> ppr (mkUniqueGrimily k)} <> text "_cc"
-- This is the name to go in the user-displayed string,
-- recorded in the cost centre declaration
costCentreUserName :: CostCentre -> String
costCentreUserName = unpackFS . costCentreUserNameFS
costCentreUserNameFS :: CostCentre -> FastString
costCentreUserNameFS (AllCafsCC {}) = mkFastString "CAF"
costCentreUserNameFS (NormalCC {cc_name = name, cc_is_caf = is_caf})
= case is_caf of
CafCC -> mkFastString "CAF:" `appendFS` name
_ -> name
costCentreSrcSpan :: CostCentre -> SrcSpan
costCentreSrcSpan = cc_loc
instance Binary IsCafCC where
put_ bh CafCC = do
putByte bh 0
put_ bh NotCafCC = do
putByte bh 1
get bh = do
h <- getByte bh
case h of
0 -> do return CafCC
_ -> do return NotCafCC
instance Binary CostCentre where
put_ bh (NormalCC aa ab ac _ad ae) = do
putByte bh 0
put_ bh aa
put_ bh ab
put_ bh ac
put_ bh ae
put_ bh (AllCafsCC ae _af) = do
putByte bh 1
put_ bh ae
get bh = do
h <- getByte bh
case h of
0 -> do aa <- get bh
ab <- get bh
ac <- get bh
ae <- get bh
return (NormalCC aa ab ac noSrcSpan ae)
_ -> do ae <- get bh
return (AllCafsCC ae noSrcSpan)
-- We ignore the SrcSpans in CostCentres when we serialise them,
-- and set the SrcSpans to noSrcSpan when deserialising. This is
-- ok, because we only need the SrcSpan when declaring the
-- CostCentre in the original module, it is not used by importing
-- modules.
| wxwxwwxxx/ghc | compiler/profiling/CostCentre.hs | bsd-3-clause | 10,877 | 7 | 17 | 3,352 | 2,107 | 1,147 | 960 | 183 | 3 |
module Trit (Trit, rationalToTrit, getIntegral, getFraction, getFraction',
neg, addTrits, subTrits, shiftLeft, shiftRight, multiply
) where
import Stream
import Utilities
import Data.Ratio
type Mantissa = Stream
type Fraction = Stream
type Trit = (Mantissa, Fraction)
-- Convert from a Rational number to its Trit representation (Integral, Fraction)
rationalToTrit :: Rational -> Trit
rationalToTrit x
|x<1 = ([0], rationalToStream x)
|otherwise = (u', rationalToStream v)
where u = n `div` d
u' = toBinary u
v = x - (toRational u)
n = numerator x
d = denominator x
-- Get the integral part of Trit
getIntegral :: Trit -> Mantissa
getIntegral = fst
-- Get the fraction part of Trit, with n digit of the stream
getFraction :: Int -> Trit -> Stream
getFraction n = take n. snd
-- Get the fraction part of Trit
getFraction' :: Trit -> Stream
getFraction' = snd
-- Negate a Trit
neg :: Trit -> Trit
neg (a, b) = (negate' a, negate' b)
-- Add two Trits
addTrits :: Trit -> Trit -> Trit
addTrits (m1, (x1:x2:xs)) (m2, (y1:y2:ys)) = (u,addStream (x1:x2:xs) (y1:y2:ys))
where u' = addFiniteStream m1 m2
c = [carry x1 x2 y1 y2]
u = addFiniteStream u' c
-- Substraction of 2 Trits
subTrits :: Trit -> Trit -> Trit
subTrits x y = addTrits x (neg y)
-- Shift left = *2 opertaion with Trit
shiftLeft :: Trit -> Trit
shiftLeft (x, (y:ys)) = (x++ [y], ys)
-- Shift right = /2 operation with Trit
shiftRight :: Trit -> Integer -> Trit
shiftRight (x, xs) 1 = (init x, (u:xs))
where u = last x
shiftRight (x, xs) n = shiftRight (init x, (u:xs)) (n-1)
where u = last x
-- Multiply a Trit stream by 1,0 or -1, simply return the stream
mulOneDigit :: Integer -> Stream -> Stream
mulOneDigit x xs
|x==1 = xs
|x==0 = zero'
|otherwise = negate' xs
where zero' = (0:zero')
-- Multiplication of two streams
multiply :: Stream -> Stream -> Stream
multiply (a0:a1:x) (b0:b1:y) = average p q
where p = average (a1*b0: (average (mulOneDigit b1 x)
(mulOneDigit a1 y)))
(average (mulOneDigit b0 x)
(mulOneDigit a0 y))
q = (a0*b0:a0*b1:a1*b1:(multiply x y))
start0 = take 30 (multiply (rationalToStream (1%2)) zo)
zo :: Stream
zo = 1:(-1):zero
where zero = 0:zero
start1 = take 30 (average (rationalToStream (1%2)) (negate' (rationalToStream (1%4))))
| ghc-android/ghc | testsuite/tests/concurrent/prog001/Trit.hs | bsd-3-clause | 2,483 | 21 | 14 | 654 | 924 | 508 | 416 | 57 | 1 |
module HAD.Y2014.M03.D03.Exercise where
import Data.List
import Data.Function
-- | Sort a list of list of elements by the maximum of each list,
-- in ascending order
--
-- Point-free: easy and readable
-- Level: EASY
--
-- Examples:
-- >>> sortByMax [[1,10],[5,5]]
-- [[5,5],[1,10]]
-- >>> sortByMax []
-- []
--
-- sortByMax [[], [1,2]]
-- should throw an execption: no max for empty list
sortByMax :: Ord a => [[a]] -> [[a]]
sortByMax = sortBy (compare `on` maximum)
| smwhr/1HAD | exercises/HAD/Y2014/M03/D03/Exercise.hs | mit | 472 | 0 | 8 | 83 | 78 | 53 | 25 | 5 | 1 |
{- |
Module : MainWindow.hs
Description : .
Maintainer : Kelvin Glaß, Chritoph Graebnitz, Kristin Knorr, Nicolas Lehmann (c)
License : MIT
Stability : experimental
The MainWindow-module depicts the main window of the editor.
-}
module MainWindow (
create
)
where
-- imports --
import qualified Graphics.UI.Gtk as Gtk
import qualified MenuBar as MB
import qualified ToolBar as TB
import qualified FooterBar as FB
import qualified TextArea as TA
import TextAreaContent as TAC
import qualified InteractionField as IAF
import Data.IORef
import qualified Interpreter as IN
import qualified Paths_rail_compiler_editor as Path
import Control.Monad
-- functions --
afterEvent evt ta footer =
evt (TA.drawingArea ta) $ \event -> do
let posRef = TA.currentPosition ta
readIORef posRef >>= FB.setPosition footer
return True
-- | creates a mainWindow
create :: IO ()
create = do
Gtk.initGUI
-- create and configure main window
window <- Gtk.windowNew
iconpath <- Path.getDataFileName "data/icon.png"
pb <- Gtk.pixbufNewFromFile iconpath
Gtk.windowSetIcon window (Just pb)
Gtk.onDestroy window Gtk.mainQuit
interDT <- IAF.create
let boxView = IAF.getContainer interDT
footer <- FB.create
let hboxInfoLine = FB.getContainer footer
-- create TextArea with TextAreaContent
tac <- TAC.init 100 100 (IAF.getInputBuffer interDT) (IAF.getOutputBuffer interDT)
ta <- TA.initTextAreaWithContent tac
lwin <- TA.getTextAreaContainer ta
-- reset label with current position
afterEvent Gtk.afterKeyPress ta footer
afterEvent Gtk.afterButtonPress ta footer
-- pack TextArea and InteractionField
boxLay <- Gtk.hBoxNew False 0
Gtk.boxPackStart boxLay lwin Gtk.PackGrow 1
vSep <- Gtk.vSeparatorNew
Gtk.boxPackStart boxLay vSep Gtk.PackNatural 2
Gtk.boxPackEnd boxLay boxView Gtk.PackNatural 1
table <- Gtk.tableNew 5 1 False
-- avoid setting focus through key-events
Gtk.containerSetFocusChain table [Gtk.toWidget $ TA.drawingArea ta]
-- buffer for plug 'n' play
let bufferOut = IAF.getOutputBuffer interDT
let bufferIn = IAF.getInputBuffer interDT
Gtk.on bufferIn Gtk.bufferInsertText $ \iter string -> do
putStrLn "In"
tac <- readIORef (TA.textAreaContent ta)
cnt <- readIORef (TAC.context tac)
let flags = TAC.railFlags cnt
if TAC.Interpret `elem` flags
then IN.interpret tac
else when (TAC.Step `elem` flags) $ IN.step tac
menuBar <- MB.create window ta bufferOut bufferIn
extraBar <- TB.create ta footer interDT
vSepa <- Gtk.hSeparatorNew
-- fill table layout
Gtk.tableAttach table menuBar 0 1 0 1 [Gtk.Fill] [Gtk.Fill] 0 0
Gtk.tableAttach table extraBar 0 1 1 2 [Gtk.Fill] [Gtk.Fill] 0 0
Gtk.tableAttach table boxLay 0 1 2 3 [Gtk.Expand,Gtk.Fill] [Gtk.Expand,Gtk.Fill] 0 0
Gtk.tableAttach table vSepa 0 1 3 4 [Gtk.Fill] [Gtk.Fill] 0 0
Gtk.tableAttach table hboxInfoLine 0 1 4 5 [Gtk.Fill] [Gtk.Fill] 2 2
Gtk.set window [Gtk.containerChild Gtk.:= table,
Gtk.windowDefaultHeight Gtk.:= 550,
Gtk.windowDefaultWidth Gtk.:= 850,
Gtk.windowWindowPosition Gtk.:= Gtk.WinPosCenter]
Gtk.widgetShowAll window
Gtk.mainGUI
| SWP-Ubau-SoSe2014-Haskell/SWPSoSe14 | src/RailEditor/MainWindow.hs | mit | 3,260 | 0 | 16 | 674 | 929 | 461 | 468 | 66 | 2 |
module Main where
size :: [t] -> Integer
size [] = 0
size (h:t) = 1 + size t
| henryaddison/7-lang-7-weeks | week7-haskell/1/lists.hs | mit | 83 | 0 | 7 | 25 | 48 | 26 | 22 | 4 | 1 |
-- The information used in this module was pulled from the @Wikipedia article
-- about ISO_3166-2:US@: <https://en.wikipedia.org/wiki/ISO_3166-2:US>.
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
module Data.StateCodes.ISO31662US
( StateCode(..)
, stateList
, districtList
, outlyingAreasList
, fromMName
, fromMText
, fromName
, fromText
, toName
, toText
) where
import Control.Arrow ((&&&))
import Data.Aeson
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable
import Text.Shakespeare.I18N
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative (pure)
#endif
data StateCode = AL -- ^ Alabama
| AK -- ^ Alaska
| AZ -- ^ Arizona
| AR -- ^ Arkansas
| CA -- ^ California
| CO -- ^ Colorado
| CT -- ^ Connecticut
| DE -- ^ Delaware
| FL -- ^ Florida
| GA -- ^ Georgia
| HI -- ^ Hawaii
| ID -- ^ Idaho
| IL -- ^ Illinois
| IN -- ^ Indiana
| IA -- ^ Iowa
| KS -- ^ Kansas
| KY -- ^ Kentucky
| LA -- ^ Louisiana
| ME -- ^ Maine
| MD -- ^ Maryland
| MA -- ^ Massachusetts
| MI -- ^ Michigan
| MN -- ^ Minnesota
| MS -- ^ Mississippi
| MO -- ^ Missouri
| MT -- ^ Montana
| NE -- ^ Nebraska
| NV -- ^ Nevada
| NH -- ^ New Hampshire
| NJ -- ^ New Jersey
| NM -- ^ New Mexico
| NY -- ^ New York
| NC -- ^ North Carolina
| ND -- ^ North Dakota
| OH -- ^ Ohio
| OK -- ^ Oklahoma
| OR -- ^ Oregon
| PA -- ^ Pennsylvania
| RI -- ^ Rhode Island
| SC -- ^ South Carolina
| SD -- ^ South Dakota
| TN -- ^ Tennessee
| TX -- ^ Texas
| UT -- ^ Utah
| VT -- ^ Vermont
| VA -- ^ Virginia
| WA -- ^ Washington
| WV -- ^ West Virginia
| WI -- ^ Wisconsin
| WY -- ^ Wyoming
| DC -- ^ District of Columbia
| AS -- ^ American Samoa
| GU -- ^ Guam
| MP -- ^ Northern Mariana Islands
| PR -- ^ Puerto Rico
| UM -- ^ United States Minor Outlying Islands
| VI -- ^ Virgin Islands, U.S.
deriving (Bounded, Eq, Enum, Show, Read, Ord, Typeable)
-- | Maybe get the state code from the text code
fromMText :: Text -> Maybe StateCode
fromMText "AL" = Just AL
fromMText "AK" = Just AK
fromMText "AZ" = Just AZ
fromMText "AR" = Just AR
fromMText "CA" = Just CA
fromMText "CO" = Just CO
fromMText "CT" = Just CT
fromMText "DE" = Just DE
fromMText "FL" = Just FL
fromMText "GA" = Just GA
fromMText "HI" = Just HI
fromMText "ID" = Just ID
fromMText "IL" = Just IL
fromMText "IN" = Just IN
fromMText "IA" = Just IA
fromMText "KS" = Just KS
fromMText "KY" = Just KY
fromMText "LA" = Just LA
fromMText "ME" = Just ME
fromMText "MD" = Just MD
fromMText "MA" = Just MA
fromMText "MI" = Just MI
fromMText "MN" = Just MN
fromMText "MS" = Just MS
fromMText "MO" = Just MO
fromMText "MT" = Just MT
fromMText "NE" = Just NE
fromMText "NV" = Just NV
fromMText "NH" = Just NH
fromMText "NJ" = Just NJ
fromMText "NM" = Just NM
fromMText "NY" = Just NY
fromMText "NC" = Just NC
fromMText "ND" = Just ND
fromMText "OH" = Just OH
fromMText "OK" = Just OK
fromMText "OR" = Just OR
fromMText "PA" = Just PA
fromMText "RI" = Just RI
fromMText "SC" = Just SC
fromMText "SD" = Just SD
fromMText "TN" = Just TN
fromMText "TX" = Just TX
fromMText "UT" = Just UT
fromMText "VT" = Just VT
fromMText "VA" = Just VA
fromMText "WA" = Just WA
fromMText "WV" = Just WV
fromMText "WI" = Just WI
fromMText "WY" = Just WY
fromMText "DC" = Just DC
fromMText "AS" = Just AS
fromMText "GU" = Just GU
fromMText "MP" = Just MP
fromMText "PR" = Just PR
fromMText "UM" = Just UM
fromMText "VI" = Just VI
fromMText _ = Nothing
-- | Get the state code from the text code. Errors if the code is unknown
fromText :: Text -> StateCode
fromText c = case fromMText c of
Just sc -> sc
_ -> error $ "fromText: Unknown state code:" ++ T.unpack c
-- | Get the state code as text
toText :: StateCode -> Text
toText AL = "AL"
toText AK = "AK"
toText AZ = "AZ"
toText AR = "AR"
toText CA = "CA"
toText CO = "CO"
toText CT = "CT"
toText DE = "DE"
toText FL = "FL"
toText GA = "GA"
toText HI = "HI"
toText ID = "ID"
toText IL = "IL"
toText IN = "IN"
toText IA = "IA"
toText KS = "KS"
toText KY = "KY"
toText LA = "LA"
toText ME = "ME"
toText MD = "MD"
toText MA = "MA"
toText MI = "MI"
toText MN = "MN"
toText MS = "MS"
toText MO = "MO"
toText MT = "MT"
toText NE = "NE"
toText NV = "NV"
toText NH = "NH"
toText NJ = "NJ"
toText NM = "NM"
toText NY = "NY"
toText NC = "NC"
toText ND = "ND"
toText OH = "OH"
toText OK = "OK"
toText OR = "OR"
toText PA = "PA"
toText RI = "RI"
toText SC = "SC"
toText SD = "SD"
toText TN = "TN"
toText TX = "TX"
toText UT = "UT"
toText VT = "VT"
toText VA = "VA"
toText WA = "WA"
toText WV = "WV"
toText WI = "WI"
toText WY = "WY"
toText DC = "DC"
toText AS = "AS"
toText GU = "GU"
toText MP = "MP"
toText PR = "PR"
toText UM = "UM"
toText VI = "VI"
-- | Maybe get the state code from the state readable name
fromMName :: Text -> Maybe StateCode
fromMName "Alabama" = Just AL
fromMName "Alaska" = Just AK
fromMName "Arizona" = Just AZ
fromMName "Arkansas" = Just AR
fromMName "California" = Just CA
fromMName "Colorado" = Just CO
fromMName "Connecticut" = Just CT
fromMName "Delaware" = Just DE
fromMName "Florida" = Just FL
fromMName "Georgia" = Just GA
fromMName "Hawaii" = Just HI
fromMName "Idaho" = Just ID
fromMName "Illinois" = Just IL
fromMName "Indiana" = Just IN
fromMName "Iowa" = Just IA
fromMName "Kansas" = Just KS
fromMName "Kentucky" = Just KY
fromMName "Louisiana" = Just LA
fromMName "Maine" = Just ME
fromMName "Maryland" = Just MD
fromMName "Massachusetts" = Just MA
fromMName "Michigan" = Just MI
fromMName "Minnesota" = Just MN
fromMName "Mississippi" = Just MS
fromMName "Missouri" = Just MO
fromMName "Montana" = Just MT
fromMName "Nebraska" = Just NE
fromMName "Nevada" = Just NV
fromMName "New Hampshire" = Just NH
fromMName "New Jersey" = Just NJ
fromMName "New Mexico" = Just NM
fromMName "New York" = Just NY
fromMName "North Carolina" = Just NC
fromMName "North Dakota" = Just ND
fromMName "Ohio" = Just OH
fromMName "Oklahoma" = Just OK
fromMName "Oregon" = Just OR
fromMName "Pennsylvania" = Just PA
fromMName "Rhode Island" = Just RI
fromMName "South Carolina" = Just SC
fromMName "South Dakota" = Just SD
fromMName "Tennessee" = Just TN
fromMName "Texas" = Just TX
fromMName "Utah" = Just UT
fromMName "Vermont" = Just VT
fromMName "Virginia" = Just VA
fromMName "Washington" = Just WA
fromMName "West Virginia" = Just WV
fromMName "Wisconsin" = Just WI
fromMName "Wyoming" = Just WY
fromMName "District of Columbia" = Just DC
fromMName "American Samoa" = Just AS
fromMName "Guam" = Just GU
fromMName "Northern Mariana Islands" = Just MP
fromMName "Puerto Rico" = Just PR
fromMName "United States Minor Outlying Islands" = Just UM
fromMName "Virgin Islands, U.S." = Just VI
fromMName _ = Nothing
-- | Get the state code from the state readable name. Errors if the name is unknown
fromName:: Text -> StateCode
fromName s = case fromMName s of
Just sc -> sc
_ -> error $ "fromName: Unknown state code:" ++ T.unpack s
-- | Get the state readable name
toName :: StateCode -> Text
toName AL = "Alabama"
toName AK = "Alaska"
toName AZ = "Arizona"
toName AR = "Arkansas"
toName CA = "California"
toName CO = "Colorado"
toName CT = "Connecticut"
toName DE = "Delaware"
toName FL = "Florida"
toName GA = "Georgia"
toName HI = "Hawaii"
toName ID = "Idaho"
toName IL = "Illinois"
toName IN = "Indiana"
toName IA = "Iowa"
toName KS = "Kansas"
toName KY = "Kentucky"
toName LA = "Louisiana"
toName ME = "Maine"
toName MD = "Maryland"
toName MA = "Massachusetts"
toName MI = "Michigan"
toName MN = "Minnesota"
toName MS = "Mississippi"
toName MO = "Missouri"
toName MT = "Montana"
toName NE = "Nebraska"
toName NV = "Nevada"
toName NH = "New Hampshire"
toName NJ = "New Jersey"
toName NM = "New Mexico"
toName NY = "New York"
toName NC = "North Carolina"
toName ND = "North Dakota"
toName OH = "Ohio"
toName OK = "Oklahoma"
toName OR = "Oregon"
toName PA = "Pennsylvania"
toName RI = "Rhode Island"
toName SC = "South Carolina"
toName SD = "South Dakota"
toName TN = "Tennessee"
toName TX = "Texas"
toName UT = "Utah"
toName VT = "Vermont"
toName VA = "Virginia"
toName WA = "Washington"
toName WV = "West Virginia"
toName WI = "Wisconsin"
toName WY = "Wyoming"
toName DC = "District of Columbia"
toName AS = "American Samoa"
toName GU = "Guam"
toName MP = "Northern Mariana Islands"
toName PR = "Puerto Rico"
toName UM = "United States Minor Outlying Islands"
toName VI = "Virgin Islands, U.S."
-- | List of states sorted by alphabetical order, with state code
-- this is ready to be used in a yesod selectField, for example
stateList :: [(Text, StateCode)]
stateList = map (toName &&& id) $ enumFromTo minBound WY
-- | List of districts sorted by alphabetical order, with state code
-- this is ready to be used in a yesod selectField, for example
districtList :: [(Text, StateCode)]
districtList = [("District of Columbia", DC)]
-- | List of outlying areas sorted by alphabetical order, with state code
-- this is ready to be used in a yesod selectField, for example
outlyingAreasList :: [(Text, StateCode)]
outlyingAreasList = map (toName &&& id) $ enumFromTo AS maxBound
-- | To JSON: as a simple string
instance ToJSON StateCode where
toJSON = toJSON . toText
-- | From JSON: as a simple string
instance FromJSON StateCode where
parseJSON (String s)
| Just a <- fromMText s = pure a
parseJSON _ = fail "StateCode"
-- | Show state readable name, in English (ignoring locale for now)
instance RenderMessage master StateCode where
renderMessage _ _ = toName
| acamino/state-codes | src/Data/StateCodes/ISO31662US.hs | mit | 12,401 | 0 | 10 | 4,722 | 2,908 | 1,487 | 1,421 | 338 | 2 |
module Handler.Home where
import Init
getHomeR :: Handler Html
getHomeR = defaultLayout $ do
setTitle "SomeBlog"
-- change src URIs in production
addStylesheetRemote "//rawgithub.com/mkrull/yesod-ember-skel/master/static/css/normalize.css"
addStylesheetRemote "//rawgithub.com/mkrull/yesod-ember-skel/master/static/css/bs-3.0/bootstrap.min.css"
addStylesheetRemote "//rawgithub.com/mkrull/yesod-ember-skel/master/static/css/style.css"
addScriptRemote "//rawgithub.com/mkrull/yesod-ember-skel/master/static/js/libs/jquery-1.9.1.js"
addScriptRemote "//rawgithub.com/mkrull/yesod-ember-skel/master/static/js/libs/handlebars-1.0.0.js"
addScriptRemote "//rawgithub.com/mkrull/yesod-ember-skel/master/static/js/libs/ember-1.0.0.js"
addScriptRemote "//rawgithub.com/mkrull/yesod-ember-skel/master/static/js/App.js"
addScriptRemote "//rawgithub.com/mkrull/yesod-ember-skel/master/static/js/libs/bs-3.0/bootstrap.min.js"
$(widgetFile "home")
| mkrull/yesod-ember-skel | src/Handler/Home.hs | mit | 984 | 0 | 10 | 91 | 91 | 38 | 53 | 14 | 1 |
import Test.DocTest
main = doctest ["-isrc", "src/Data/ByteString/Read.hs"]
| philopon/bytestring-read | tests/doctest.hs | mit | 76 | 0 | 6 | 7 | 20 | 11 | 9 | 2 | 1 |
{-# LANGUAGE CPP #-}
-- | Various utilities pertaining to searching for files & directories.
module Language.Haskell.GhcOpts.Utils where
import Control.Exception
import System.Process
import Data.Char (isSpace)
import System.FilePath
import System.Directory
getDirectoryContentsIfExists :: FilePath -> IO [FilePath]
getDirectoryContentsIfExists dir = do
b <- doesFileExist dir
if b then getDirectoryContents dir
else return []
absoluteFilePath :: FilePath -> IO FilePath
absoluteFilePath p = if isAbsolute p then return p else do
dir <- getCurrentDirectory
return $ dir </> p
pathsToRoot :: FilePath -> [FilePath]
pathsToRoot p
| p == parent = [p]
| otherwise = p : pathsToRoot parent
where
parent = takeDirectory p
splitBy :: Char -> String -> [String]
splitBy c str
| null str' = [x]
| otherwise = x : splitBy c (tail str')
where
(x, str') = span (c /=) str
trim :: String -> String
trim = f . f
where
f = reverse . dropWhile isSpace
#if __GLASGOW_HASKELL__ < 709
execInPath :: String -> FilePath -> IO (Maybe String)
execInPath cmd p = do
eIOEstr <- try $ createProcess prc :: IO (Either IOError ProcH)
case eIOEstr of
Right (_, Just h, _, _) -> Just <$> getClose h
Right (_, Nothing, _, _) -> return Nothing
-- This error is most likely "/bin/sh: stack: command not found"
-- which is caused by the package containing a stack.yaml file but
-- no stack command is in the PATH.
Left _ -> return Nothing
where
prc = (shell cmd) { cwd = Just $ takeDirectory p }
getClose :: Handle -> IO String
getClose h = do
str <- hGetContents h
hClose h
return str
type ProcH = (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle)
-- Not deleting this because this is likely more robust than the above! (but
-- only works on process-1.2.3.0 onwards
#else
execInPath :: String -> FilePath -> IO (Maybe String)
execInPath cmd p = do
eIOEstr <- try $ readCreateProcess prc "" :: IO (Either IOError String)
return $ case eIOEstr of
Right s -> Just s
-- This error is most likely "/bin/sh: stack: command not found"
-- which is caused by the package containing a stack.yaml file but
-- no stack command is in the PATH.
Left _ -> Nothing
where
prc = (shell cmd) { cwd = Just $ takeDirectory p }
#endif
| ranjitjhala/ghc-options | src/Language/Haskell/GhcOpts/Utils.hs | mit | 2,400 | 0 | 12 | 596 | 559 | 287 | 272 | 43 | 3 |
module Handler.ExampleXML where
import Assets (getAllExams)
import Import
import Widgets (titleWidget, iconWidget, publicExamWidget, privateExamWidget)
-- | Displays valid example XML
getExampleXMLR :: Handler Html
getExampleXMLR = do
setUltDestCurrent
memail <- lookupSession "_ID"
(publicExams, privateExams) <- runDB $ getAllExams memail
let spaces = [whamlet| |]
middleWidget = [whamlet|
<div style="margin:10px;">
<span class=simpleOrange style="font-weight:bold;">_{MsgShouldLookLike}
<p class=plainWhite style="font-weight:bold; margin-top:10px;"> _{MsgObeyRules}
<ul class=plainWhite>
<li> <span style="font-weight:bold; color:#FFA500;"><!DOCTYPE...></span> _{MsgDoctype}
<li> <span style="font-weight:bold; color:#FFA500;"><quiz></span> _{MsgWithAttribute 2} "title" _{MsgAnd} "passpercentage"
<li> <span style="font-weight:bold; color:#FFA500;"><question></span> _{MsgWithAttribute 1} "content"
<li> <span style="font-weight:bold; color:#FFA500;"><answer></span> _{MsgWithAttribute 1} "correct"
<li> <span style="font-weight:bold; color:#FFA500;">"title"</span> _{MsgAnd} <span style="font-weight:bold; color:#FFA500;">"content"</span> _{MsgContainsChars}
<li> <span style="font-weight:bold; color:#FFA500;">"correct"</span> _{MsgContainsBool}
<li> <span style="font-weight:bold; color:#FFA500;">"passpercentage"</span> _{MsgContainsNumber}
<li> _{MsgOneQuestion}
<li> _{MsgFourAnswers}
<li> _{MsgContentTags}
<br>
<span class=plainWhite style="margin-top:10px; font-weight: bold;"> _{MsgValidXML}
<div class=xml>
<<span class=xmlRed>!DOCTYPE</span> quiz SYSTEM <span class=xmlRed>"http://localhost:3000/static/dtd/examValidation.dtd"</span>><br>
<<span class=xmlRed>quiz</span> title="My Exam" passpercentage="77.44"><br>
^{spaces}<<span class=xmlRed>question</span> content="Which one of these is not an animal?"> <br>
^{spaces}^{spaces}<<span class=xmlRed>answer</span>
correct="false">Dog<<span class=xmlRed>/answer</span>><br>
^{spaces}^{spaces}<<span class=xmlRed>answer</span>
correct="false">Cat<<span class=xmlRed>/answer</span>><br>
^{spaces}^{spaces}<<span class=xmlRed>answer</span>
correct="true">Car<<span class=xmlRed>/answer</span>><br>
^{spaces}^{spaces}<<span class=xmlRed>answer</span>
correct="false">Giraffe<<span class=xmlRed>/answer</span>><br>
^{spaces}<<span class=xmlRed>/question</span>><br>
^{spaces}<<span class=xmlRed>question</span> content="What's 2 + 4?"> <br>
^{spaces}^{spaces}<<span class=xmlRed>answer</span>
correct="true">5 + 1<<span class=xmlRed>/answer</span>><br>
^{spaces}^{spaces}<<span class=xmlRed>answer</span>
correct="false">4<<span class=xmlRed>/answer</span>><br>
^{spaces}^{spaces}<<span class=xmlRed>answer</span>
correct="false">5<<span class=xmlRed>/answer</span>><br>
^{spaces}^{spaces}<<span class=xmlRed>answer</span>
correct="true">6<<span class=xmlRed>/answer</span>><br>
^{spaces}<<span class=xmlRed>/question</span>><br>
<<span class=xmlRed>/quiz</span>>
<a href=@{UploadR} style="font-size: 16px; font-weight: bold; color:white; float:right; margin:10px;"> _{MsgGotIt}
|]
defaultLayout $(widgetFile "exampleXml")
| cirquit/quizlearner | quizlearner/Handler/ExampleXML.hs | mit | 4,316 | 0 | 10 | 1,248 | 116 | 65 | 51 | -1 | -1 |
-- Rock Paper Scissors!
-- http://www.codewars.com/kata/5672a98bdbdd995fad00000f/
module Codewars.RockPaperScissors where
rps :: String -> String -> String
rps p1 p2 | p1 == p2 = "Draw!"
| (p1, p2) `elem` [("rock", "scissors"), ("paper", "rock"), ("scissors", "paper")] = "Player 1 won!"
| otherwise = "Player 2 won!"
| gafiatulin/codewars | src/8 kyu/RockPaperScissors.hs | mit | 340 | 0 | 10 | 67 | 98 | 56 | 42 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-app-datasource.html
module Stratosphere.ResourceProperties.OpsWorksAppDataSource where
import Stratosphere.ResourceImports
-- | Full data type definition for OpsWorksAppDataSource. See
-- 'opsWorksAppDataSource' for a more convenient constructor.
data OpsWorksAppDataSource =
OpsWorksAppDataSource
{ _opsWorksAppDataSourceArn :: Maybe (Val Text)
, _opsWorksAppDataSourceDatabaseName :: Maybe (Val Text)
, _opsWorksAppDataSourceType :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToJSON OpsWorksAppDataSource where
toJSON OpsWorksAppDataSource{..} =
object $
catMaybes
[ fmap (("Arn",) . toJSON) _opsWorksAppDataSourceArn
, fmap (("DatabaseName",) . toJSON) _opsWorksAppDataSourceDatabaseName
, fmap (("Type",) . toJSON) _opsWorksAppDataSourceType
]
-- | Constructor for 'OpsWorksAppDataSource' containing required fields as
-- arguments.
opsWorksAppDataSource
:: OpsWorksAppDataSource
opsWorksAppDataSource =
OpsWorksAppDataSource
{ _opsWorksAppDataSourceArn = Nothing
, _opsWorksAppDataSourceDatabaseName = Nothing
, _opsWorksAppDataSourceType = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-app-datasource.html#cfn-opsworks-app-datasource-arn
owadsArn :: Lens' OpsWorksAppDataSource (Maybe (Val Text))
owadsArn = lens _opsWorksAppDataSourceArn (\s a -> s { _opsWorksAppDataSourceArn = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-app-datasource.html#cfn-opsworks-app-datasource-databasename
owadsDatabaseName :: Lens' OpsWorksAppDataSource (Maybe (Val Text))
owadsDatabaseName = lens _opsWorksAppDataSourceDatabaseName (\s a -> s { _opsWorksAppDataSourceDatabaseName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-app-datasource.html#cfn-opsworks-app-datasource-type
owadsType :: Lens' OpsWorksAppDataSource (Maybe (Val Text))
owadsType = lens _opsWorksAppDataSourceType (\s a -> s { _opsWorksAppDataSourceType = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/OpsWorksAppDataSource.hs | mit | 2,258 | 0 | 12 | 253 | 355 | 202 | 153 | 32 | 1 |
maximum' :: (Ord a) => [a] -> a
maximum' [] = error "maximum of empty list"
maximum' [x] = x
maximum' (x:xs)
| x > maxTail = x
| otherwise = maxTail
where maxTail = maximum' xs
replicate' :: (Num i, Ord i)=> i -> a -> [a]
replicate' n x
| n <= 0 = []
| otherwise = x:replicate' (n-1) x
take' :: (Num i, Ord i) => i -> [a] -> [a]
take' n _
| n <= 0 = []
take' _ [] = []
take' n (x:xs) = x : take' (n-1) xs
reverse' :: [a] -> [a]
reverse' [] = []
reverse' (x:xs) = reverse' xs ++ [x]
repeat' :: a -> [a]
repeat' x = x:repeat' x
zip' :: [a] -> [b] -> [(a,b)]
zip' _ [] = []
zip' [] _ = []
zip' (x:xs) (y:ys) = (x,y):zip' xs ys
elem' :: (Eq a) => a -> [a] -> Bool
elem' a [] = False
elem' a (x:xs)
| a == x = True
| otherwise = a `elem'` xs
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) =
let smallerSorted = quicksort [a | a <- xs, a <= x]
biggerSorted = quicksort [a | a <- xs, a > x]
in smallerSorted ++ [x] ++ biggerSorted | axnion/playground | random/haskell/recursion.hs | mit | 1,017 | 0 | 12 | 292 | 646 | 336 | 310 | 36 | 1 |
module Main where
import System.IO
import System.Process
import Data.List
main :: IO ()
main = do
files <- readProcess "ls" ["Player"] []
let players = (takeWhile (/='.')) <$> lines files
putStrLn ("Players registerd:\n\n" ++ unlines players)
writeFile "Players.hs" (preamble ++ unlines (imported <$> players) ++ allPlayers (makePlayerName <$> players))
imported :: String -> String
imported name = "import Player." ++ name ++ " (player" ++ name ++ ")"
makePlayerName :: String -> String
makePlayerName name = "(\"" ++ name ++ "\", player" ++ name ++ ")"
allPlayers :: [String] -> String
allPlayers xs = unlines (
["players :: [(String, Player)]"
, "players = ["
, " " ++ concat (intersperse "," xs)
, " ]"] )
preamble :: String
preamble = unlines
[ "module Players (players) where"
, "import Types"
]
| 8Gitbrix/gomoku | src/MakePlayers.hs | mit | 841 | 0 | 13 | 170 | 272 | 142 | 130 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-fieldtomatch.html
module Stratosphere.ResourceProperties.WAFRegionalXssMatchSetFieldToMatch where
import Stratosphere.ResourceImports
-- | Full data type definition for WAFRegionalXssMatchSetFieldToMatch. See
-- 'wafRegionalXssMatchSetFieldToMatch' for a more convenient constructor.
data WAFRegionalXssMatchSetFieldToMatch =
WAFRegionalXssMatchSetFieldToMatch
{ _wAFRegionalXssMatchSetFieldToMatchData :: Maybe (Val Text)
, _wAFRegionalXssMatchSetFieldToMatchType :: Val Text
} deriving (Show, Eq)
instance ToJSON WAFRegionalXssMatchSetFieldToMatch where
toJSON WAFRegionalXssMatchSetFieldToMatch{..} =
object $
catMaybes
[ fmap (("Data",) . toJSON) _wAFRegionalXssMatchSetFieldToMatchData
, (Just . ("Type",) . toJSON) _wAFRegionalXssMatchSetFieldToMatchType
]
-- | Constructor for 'WAFRegionalXssMatchSetFieldToMatch' containing required
-- fields as arguments.
wafRegionalXssMatchSetFieldToMatch
:: Val Text -- ^ 'wafrxmsftmType'
-> WAFRegionalXssMatchSetFieldToMatch
wafRegionalXssMatchSetFieldToMatch typearg =
WAFRegionalXssMatchSetFieldToMatch
{ _wAFRegionalXssMatchSetFieldToMatchData = Nothing
, _wAFRegionalXssMatchSetFieldToMatchType = typearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-fieldtomatch.html#cfn-wafregional-xssmatchset-fieldtomatch-data
wafrxmsftmData :: Lens' WAFRegionalXssMatchSetFieldToMatch (Maybe (Val Text))
wafrxmsftmData = lens _wAFRegionalXssMatchSetFieldToMatchData (\s a -> s { _wAFRegionalXssMatchSetFieldToMatchData = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-wafregional-xssmatchset-fieldtomatch.html#cfn-wafregional-xssmatchset-fieldtomatch-type
wafrxmsftmType :: Lens' WAFRegionalXssMatchSetFieldToMatch (Val Text)
wafrxmsftmType = lens _wAFRegionalXssMatchSetFieldToMatchType (\s a -> s { _wAFRegionalXssMatchSetFieldToMatchType = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/WAFRegionalXssMatchSetFieldToMatch.hs | mit | 2,174 | 0 | 13 | 212 | 265 | 151 | 114 | 28 | 1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_isogram (
version,
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [1,1,0,1] []
bindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/Users/c19/Documents/projects/exercism/haskell/haskell/isogram/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/bin"
libdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/isogram/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/lib/x86_64-osx-ghc-8.0.2/isogram-1.1.0.1-5wJn8p3oLSz4PfC8o8w8XS"
dynlibdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/isogram/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/lib/x86_64-osx-ghc-8.0.2"
datadir = "/Users/c19/Documents/projects/exercism/haskell/haskell/isogram/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/share/x86_64-osx-ghc-8.0.2/isogram-1.1.0.1"
libexecdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/isogram/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/libexec"
sysconfdir = "/Users/c19/Documents/projects/exercism/haskell/haskell/isogram/.stack-work/install/x86_64-osx/lts-8.21/8.0.2/etc"
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "isogram_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "isogram_libdir") (\_ -> return libdir)
getDynLibDir = catchIO (getEnv "isogram_dynlibdir") (\_ -> return dynlibdir)
getDataDir = catchIO (getEnv "isogram_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "isogram_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "isogram_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| c19/Exercism-Haskell | isogram/.stack-work/dist/x86_64-osx/Cabal-1.24.2.0/build/autogen/Paths_isogram.hs | mit | 2,356 | 0 | 10 | 239 | 410 | 238 | 172 | 33 | 1 |
{-
Copyright (C) 2012 Kacper Bak <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
module Language.Clafer.Version where
version = "v0.3.2.11-4-2013" | juodaspaulius/bclafer_old | src/Language/Clafer/Version.hs | mit | 1,152 | 0 | 4 | 179 | 13 | 9 | 4 | 2 | 1 |
double x = x + x
quadruple x = double x + double x
factorial n = product [1..n]
average ns = sum ns `div` length ns
| codingSteve/fp101x | 20151019/test.hs | cc0-1.0 | 131 | 0 | 6 | 42 | 67 | 32 | 35 | 4 | 1 |
{-# LANGUAGE DeriveGeneric #-} -- needed for json parsing
module Main where
import Data.Aeson (FromJSON, ToJSON, decodeStrict, decode, encode)
import Data.Text (Text)
import Data.Maybe (fromMaybe)
import Data.List (foldl')
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import GHC.Generics (Generic)
import Data.ByteString.Lazy (ByteString)
import Network.TextServer
import Types
import Web
import Interpreter
instance FromJSON Ref
instance ToJSON Ref
instance FromJSON Sym
instance ToJSON Sym
instance FromJSON Lit
instance ToJSON Lit
instance FromJSON VE
instance ToJSON VE
instance FromJSON Arrow
instance ToJSON Arrow
data ServerCommand
= SCGet
| SCNew [Arrow]
deriving (Show, Generic)
instance FromJSON ServerCommand
instance ToJSON ServerCommand
type Data = ByteString
parseComm :: Data -> Maybe ServerCommand
parseComm = decode
handler :: Network.TextServer.Handler Web
handler msg web = fromMaybe (return (Nothing, web)) command
where
command = do
c <- parseComm msg
return $ case c of
SCGet -> do
putStrLn "get command"
return (Just $ encode $ toRows web, web)
SCNew arrows ->
let (_, web') = foldl' ne ([], web) arrows
-- TODO send increment in web (change newEdge to return added edge)
in do
putStrLn "put command"
return (Nothing, web')
ne (c, w) a = newEdge a c w
main = do
putStrLn "running"
mweb <- loadWeb "elements.web"
case mweb of
Right (web, "") -> do
putStrLn "loaded web file"
runServer web handler
_ -> do
putStrLn "failed to load web file"
runServer emptyWeb handler
| kovach/web | server-src/Main.hs | gpl-2.0 | 1,683 | 0 | 19 | 394 | 505 | 260 | 245 | 57 | 2 |
module Type.Poly.Roll where
import Type.Poly.Data
import Autolib.Util.Zufall
import Autolib.TES.Identifier
import Autolib.Util.Size
import Autolib.ToDoc
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Monad ( forM, guard )
import Data.List ( nub , minimumBy)
import Data.Maybe (isJust)
import Data.Ord ( comparing )
import Data.Ix ( inRange )
roller conf = do
let handle k =
if k <= 0
then error "Type.Poly.Roll: cannot generate problem instance"
else do
txss <- roll conf
case concat txss of
[] -> handle (k-1)
txs -> return $ minimumBy
( comparing $ size . target . fst )
txs
handle $ generator_retries conf
roll conf = forM [ 1 .. generator_iterations conf ] $ \ i -> do
sig <- Type.Poly.Roll.signature conf
let (lo, hi) = solution_size_range conf
candidates = concat $ take hi $ typed_terms sig
census = M.fromListWith min $ do
(t,x) <- candidates
return ( t
, ( size x
, ( TI { target = t , Type.Poly.Data.signature = sig } , x )
)
)
let allfun = S.fromList $ function_names conf
return $ do
(s, out) <- M.elems census
guard $ inRange ( lo,hi ) s
guard $ S.isSubsetOf allfun $ expression_signature ( snd out )
return out
-- | lazy infinite list,
-- on index k: terms of size k (with their type)
typed_terms :: Signature -> [[ ( Type, Expression ) ]]
typed_terms sig = output where
output = do
total <- [ 0 .. ]
return $ do
f <- Type.Poly.Data.functions sig
parts <-
distributions1 (total - 1) (length $ arguments f)
( sub, args ) <-
matches $ zip ( arguments f )
$ map ( \ p -> output !! p ) parts
let sub_in_order = do
v <- tyvars f
return $ sub M.! v
return ( apply sub $ result f
, Apply (classname sig) sub_in_order ( fname f ) args
)
distributions1 total slots | total <= 0 || slots <= 0 = do
guard $ total == 0 && slots == 0
return [ ]
distributions1 total 1 = return [ total ]
distributions1 total slots = do
x <- [ 1 .. total - slots + 1 ]
xs <- distributions1 ( total - x ) ( slots - 1 )
return $ x : xs
matches :: [(Type, [(Type, Expression)])]
-> [ (M.Map Identifier Type, [Expression]) ]
matches [] = return ( M.empty, [] )
matches (( s, txs) : rest) = do
(t,x) <- txs
m <- match s t
let rest' = map ( \ (s,tys) -> ( apply m s, tys)) rest
( m', xs ) <- matches rest'
return ( M.unionWith (error "matches") m m'
, x : xs
)
-- | variables in left arg are bound.
-- second arg shall not have variables
match :: Type -> Type -> [ M.Map Identifier Type ]
match s t = case (s, t) of
(TyVar v, t) -> return $ M.fromList [ (v, t) ]
(TyCon f xs, TyCon g ys) | f == g -> match_list xs ys
_ -> []
match_list [] [] = return M.empty
match_list (x:xs) (y:ys) = do
m <- match x y
m' <- match_list ( map ( apply m ) xs ) ys
return $ M.unionWith
(error $ "match_list: " ++ show ((x:xs, y:ys), (m, m') )) m m'
apply :: M.Map Identifier Type -> Type -> Type
apply m t = case t of
TyVar v -> case M.lookup v m of
Nothing -> t
Just s -> s
TyCon f args ->
TyCon f ( map ( apply m ) args )
--------------------------------------------------------
signature :: Conf -> IO Signature
signature conf = do
fs <- Type.Poly.Roll.functions conf
return $ Signature { classname = read "S"
, Type.Poly.Data.functions = fs
}
functions :: Conf -> IO [ Function ]
functions conf = forM ( function_names conf ) $ \ n -> do
a <- randomRIO $ arity_range conf
res : args <- forM [ 0, 1 .. a ] $ \ k -> do
s <- randomRIO $ type_expression_size_range conf
type_scheme conf s
let vars_in_args = nub $ do
t <- args ; TyVar v <- subtypes t ; return v
let nullaries = do
( t, 0) <- types_with_arities conf ; return t
repair t = case t of
TyVar v ->
if v `elem` vars_in_args
then return t
else do
f <- eins nullaries
return $ TyCon f []
TyCon f args -> do
xs <- forM args repair
return $ TyCon f xs
res' <- repair res
return $ Function { fname = n
, tyvars = vars_in_args
, arguments = args
, result = res'
}
type_scheme :: Conf
-> Int -- ^ size
-> IO ( Type )
type_scheme conf total | total >= 0 = do
let couldbe =
filter ( \ (t,a) -> ( total > a )
&& ( a > 0 || total == 1 ) )
$ types_with_arities conf
mkvar = do
v <- eins $ type_variables conf
return $ TyVar v
if null couldbe
then mkvar
else do
(t,a) <- eins couldbe
var_flag <- if a > 0 then return False
else randomRIO ( False, True )
if var_flag
then mkvar
else do
subs <- distribute1 (total - 1) a
ts <- forM subs $ type_scheme conf
return $ TyCon t ts
-- | sum is total, each number is >= 0
distribute0 :: Int -> Int -> IO [ Int ]
distribute0 total slots | total >= 0 = do
xs <- forM [ 1 .. total ] $ \ k -> randomRIO ( 1, slots )
let m = M.fromListWith (+) $ zip xs $ repeat 1
return $ do k <- [ 1 .. slots ] ; return $ M.findWithDefault 0 k m
-- | PRE: sum >= slots, sum is total, each number is >= 1
distribute1 :: Int -> Int -> IO [ Int ]
distribute1 total slots = do
xs <- distribute0 (total - slots) slots
return $ map succ xs
| marcellussiegburg/autotool | collection/src/Type/Poly/Roll.hs | gpl-2.0 | 6,240 | 2 | 21 | 2,453 | 2,223 | 1,124 | 1,099 | 154 | 4 |
{- LANGUAGE TemplateHaskell #-}
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.List
import Control.Lens
data Loc =
Albania
| Ankara
| Apulia
| Armenia
| Belgium
| Berlin
| Bohemia
| Brest
| Budapest
| Bulgaria
| Burgundy
| Clyde
| Constantinople
| Denmark
| Edinburgh
| Finland
| Galicia
| Gascony
| Greece
| Holland
| Kiel
| Liverpool
| Livonia
| London
| Marseilles
| Moscow
| Munich
| Naples
| NorthAfrica
| Norway
| Paris
| Picardy
| Piedmont
| Portugal
| Prussia
| Rome
| Ruhr
| Rumania
| Serbia
| Sevastopol
| Silesia
| Smyrna
| Spain
| StPetersburg
| Sweden
| Switzerland
| Syria
| Trieste
| Tunis
| Tuscany
| Tyrolia
| Ukraine
| Venice
| Vienna
| Wales
| Warsaw
| Yorkshire
| AdriaticSea
| AegeanSea
| BalticSea
| BarentsSea
| BlackSea
| EasternMediterranean
| EnglishChannel
| GulfOfLyon
| GulfOfBothnia
| HelgolandBight
| IonianSea
| IrishSea
| MidAtlanticOcean
| NorthAtlanticOcean
| NorthSea
| NorwegianSea
| Skagerrak
| TyrrhenianSea
| WesternMediterranean deriving (Eq, Ord, Show, Enum, Bounded)
waterProvinces = [
AdriaticSea,
AegeanSea,
BalticSea,
BarentsSea,
BlackSea,
EasternMediterranean,
EnglishChannel,
GulfOfLyon,
GulfOfBothnia,
HelgolandBight,
IonianSea,
IrishSea,
MidAtlanticOcean,
NorthAtlanticOcean,
NorthSea,
NorwegianSea,
Skagerrak,
TyrrhenianSea,
WesternMediterranean
]
coastalProvinces = [
Wales
]
adjMap = Map.fromList [
(Wales, [London, Yorkshire, Liverpool, EnglishChannel]),
(Switzerland, [])
]
data UnitType =
Army
| Fleet deriving (Eq, Show, Ord)
data Power =
Austria
| England
| France
| Germany
| Italy
| Russia
| Turkey deriving (Eq, Show, Ord)
data Season =
Spring
| Summer
| Fall
| Winter deriving (Eq, Show)
data Unit = Unit {
_power :: Power,
_utype :: UnitType
} deriving (Eq, Show, Ord)
data Occ = Occ {
_present :: Maybe Unit,
_displaced :: Maybe Unit
} deriving (Show)
data GameState = GameState {
_date :: (Int, Season),
_board :: Map Loc Occ,
_depots :: [(Loc, Maybe Power)]
} deriving (Show)
makeLenses ''GameState
makeLenses ''Unit
makeLenses ''Occ
data Order =
Hold Unit
| Move Unit Loc
| Support Unit Order
| Convoy Unit Loc Loc deriving (Eq, Show)
type Result = (Order, Either String ())
--getUnits b x = view (board . ix x) $ b0
--
--addUnit :: GameState -> Loc -> Unit -> GameState
--addUnit i l u = over (board . ix l . present) $ i
--
--removeUnit :: GameState -> Loc -> Unit -> GameState
--removeUnit i l u = over (board . ix l) (delete u) $ i
--
--resolveOrders :: GameState -> [Order] -> [Result]
--resolveOrders = undefined
--changeStatus :: GameState -> Loc -> Unit -> Status -> GameState
newGame = GameState (1901, Spring) initialUnits initialDepots
where
initialUnits = Map.fromList [(x, Occ Nothing Nothing) | x <- [minBound :: Loc ..]]
initialDepots = [
(Vienna, Just Austria), (Budapest, Just Austria), (Trieste, Just Austria)]
--advanceGame :: GameState -> Result -> GameState
--advanceGame :: GameState -> Order -> GameState
--advanceGame s o (Move u l) =
--evaluateOrders :: GameState -> [Orders] -> Result -> Result
--filterOrders :: ([Order], [Order]) -> [Order]
--filterOrders (xs, []) = xs
--filterOrders (xs, y:ys) = case (y) of
-- Hold _ -> filterOrders (y:xs, ys)
-- Move u t -> case (adjacent (location u) t) of
-- True -> filterOrders (y:xs, ys)
-- False -> filterOrders (xs, ys)
--
--legalOrder :: Board -> [Order] -> Order -> Bool
--legalOrder b as a = case (a) of
-- Hold u -> unitExists u
-- Move u t -> unitExists u && (adjacent (location u) t) && case (utype u) of
-- Army -> t `notElem` waterProvinces
-- Fleet -> t `elem` waterProvinces || t `elem` coastalProvinces
-- where
-- unitExists u = u `elem` b
--
--adjacent f t = fromMaybe False $ do
-- l <- M.lookup f adjMap
-- return (t `elem` l)
--
--
--
| bdmagnuson/haskell-diplomacy | src/Diplomacy.hs | gpl-2.0 | 4,221 | 0 | 11 | 1,123 | 845 | 527 | 318 | -1 | -1 |
module TemporalCache where
import qualified Data.Map as M
import Control.Concurrent.STM
-- | Map insertion which inserts value and drops previous values which
-- are older than given age. The key must be a timestamp or other kind
-- of increasing value.
ensureAgeInsert
:: (Num k, Ord k) => k -> k -> a -> M.Map k a -> M.Map k a
ensureAgeInsert age k a m = M.insert k a $ M.filterWithKey cond m
where cond this _ = this > k-age
-- | Return map which keeps only elements of given age. After
-- insertion it returns the oldest event in map.
newEmptyAgingMap :: (Num k, Ord k) => k -> IO (k -> a -> IO a)
newEmptyAgingMap age = do
var <- newTVarIO M.empty
return $ f var
where f var k a = atomically $ do
oldMap <- readTVar var
let newMap = ensureAgeInsert age k a oldMap
writeTVar var newMap
return $ snd $ M.findMin newMap
| zouppen/irc-markets | src/TemporalCache.hs | gpl-3.0 | 877 | 0 | 13 | 216 | 266 | 133 | 133 | 16 | 1 |
module Lamdu.CodeEdit.Sugar.Internal
( BodyU, ExpressionU
) where
import Lamdu.CodeEdit.Sugar.Types
type BodyU m a = Body MStoredName m (ExpressionU m a)
type ExpressionU m a = Expression MStoredName m a
| Mathnerd314/lamdu | src/Lamdu/CodeEdit/Sugar/Internal.hs | gpl-3.0 | 210 | 0 | 7 | 35 | 61 | 38 | 23 | 5 | 0 |
{-# LANGUAGE ForeignFunctionInterface, MagicHash, FlexibleInstances, OverloadedStrings, StandaloneDeriving, GeneralizedNewtypeDeriving, MultiParamTypeClasses, TypeFamilies #-}
module Util where
import Foreign.C
import Foreign hiding (unsafePerformIO)
import Data.String
import Data.IORef
import GHC.Base
import GHC.Ptr
import Control.Monad
import Debug.Trace
import qualified Data.Vector.Mutable as V
import System.IO.Unsafe
import qualified System.IO as IO
import qualified Data.Text as T
import qualified Data.Text.Internal as T
import qualified Data.Text.Internal as T
import qualified Data.Text.Array as T
warning :: String -> a -> a
warning s a = unsafePerformIO $ do
IO.hPutStr IO.stderr ("warning: " ++ s)
return a
tr :: String -> a -> a
tr = trace
tri :: (Functor m, Show a) => String -> m a -> m a
tri l = fmap (\x -> tr (l ++ show x) x)
foreign import ccall "stdio.h puts"
puts :: CString -> IO ()
foreign import ccall "stdio.h fputs"
fputs :: Ptr CFile -> CString -> IO ()
foreign import ccall "stdio.h stderr"
stderr :: Ptr CFile
putError :: CString -> IO ()
putError = fputs stderr
orFail :: IO Bool -> String -> IO ()
orFail x err = do
b <- x
when b (error err)
infixl 0 `orFail`
{-# INLINE while #-}
while :: IORef Bool -> IO a -> IO ()
while predicate block = loop
where
loop = do
ok <- readIORef predicate
when ok (block >> loop)
{-# INLINE while' #-}
while' :: a -> IORef Bool -> (a -> IO a) -> IO ()
while' z predicate f = loop z
where
loop s = do
ok <- readIORef predicate
when ok
(f s >>= loop)
{-# INLINE for #-}
for :: (Ord b, Num b) => b -> b -> b -> (b -> IO a) -> IO ()
for z s lim f = go z
where
go i | i < lim = f i >> go (i+s)
| otherwise = return ()
{-# NOINLINE unsafePackCString #-}
unsafePackCString :: String -> CString
unsafePackCString = unsafePerformIO . newCString
instance IsString CString where
{-# INLINE fromString #-}
fromString = unsafePackCString
-- Don't allocate a String to arrive at a CString; GHC internally uses
-- CStrings to allocate Strings.
{-# RULES
"fromString/CString"
forall s. unsafePackCString (unpackCString# s) = Ptr s
#-}
-- | A very unsafe but handy Num instance for pointers, to use them
-- like C pointers. The implementation coerces pointers, to and from
-- 'WordPtr', which has a Num instance.
instance Num (Ptr a) where
fromInteger = wordPtrToPtr . fromIntegral
x + y = wordPtrToPtr (ptrToWordPtr x + ptrToWordPtr y)
x * y = wordPtrToPtr (ptrToWordPtr x * ptrToWordPtr y)
x - y = wordPtrToPtr (ptrToWordPtr x - ptrToWordPtr y)
abs = wordPtrToPtr . abs . ptrToWordPtr
signum = wordPtrToPtr . signum . ptrToWordPtr
negate = wordPtrToPtr . negate . ptrToWordPtr
alloca' :: Storable a => (Ptr a -> IO b) -> IO a
alloca' f = alloca (\ptr -> f ptr >> peek ptr)
forIOV :: V.IOVector a -> (a -> IO b) -> IO ()
forIOV v f = go 0
where
len = V.length v
go i = when (i < len) $ do
f =<< V.unsafeRead v i
go (i+1)
updateAllIOV' :: V.IOVector a -> (a -> IO a) -> IO ()
updateAllIOV' v f = go 0
where
len = V.length v
go i = when (i < len) $ do
V.unsafeWrite v i
=<< f
=<< V.unsafeRead v i
go (i + 1)
updateAllIOV :: V.IOVector a -> (a -> a) -> IO ()
updateAllIOV v f = go 0
where
len = V.length v
go i = when (i < len) $ do
V.unsafeWrite v i . f
=<< V.unsafeRead v i
go (i + 1)
sign :: (Integral b, Num c) => b -> c
sign = fromIntegral . signum
| mikeplus64/plissken | src/Util.hs | gpl-3.0 | 3,628 | 0 | 13 | 953 | 1,282 | 652 | 630 | 98 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Handler.Posts where
import qualified Data.Text as T
import Database.Persist
import Database.Persist.Class
import Database.Persist.Sql
import Model.Post
import Model.User
import Text.Blaze.Html
import Util
import qualified View.Posts
import Web.Spock
index :: RequestedFormat () -> User -> WebAction ()
index (HtmlRequested ()) user =
loadPosts user >>= myBlaze user "index" . View.Posts.index user
index (JsonRequested ()) user =
loadPosts user >>= json
index _ user = show404 user
show :: RequestedFormat T.Text -> User -> WebAction ()
show (HtmlRequested slug) user =
-- TYPE Slug…
runSQL (loadPost user slug) >>=
maybe (show404 user) (showFound)
where showFound (Entity _ post) = do
-- TODO Error handling
Just author <- loadUser (postDomain $ post)
myBlaze user "Post" $ View.Posts.show author post
show (JsonRequested slug) user =
runSQL (loadPost user slug) >>=
--How about a JSON error?
maybe (show404 user) (json . entityVal)
show _ user = show404 user
-- TODO Multiple Tags
tagged tag = undefined
loadPost :: User -> T.Text -> SqlPersistM (Maybe (Entity Post))
loadPost user slug = selectFirst[PostSlug ==. slug,
PostDomain ==. userDomain user] []
loadPosts :: User -> WebAction [Post]
loadPosts user = fmap (map entityVal) $ runSQL $
selectList [PostDomain ==. userDomain user] []
| bitraten/hands | src/Handler/Posts.hs | gpl-3.0 | 1,471 | 0 | 12 | 334 | 484 | 247 | 237 | 36 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.LevelPuzzleMode.LevelPuzzleWorld.Content
(
Content (..),
makeContentEmpty,
makeContent,
makeContentCamera,
destroyContent,
RoomArray,
roomarrayList,
roomarrayAt,
roomarrayModifyAt,
roomarrayUpdate,
roomarrayUpdateIO,
module Game.Grid.GridWorld,
module Game.LevelPuzzleMode.LevelPuzzleWorld.Room,
module Game.LevelPuzzleMode.LevelPuzzleWorld.RoomIx,
) where
import MyPrelude
import Game.MEnv
import Data.Array.IArray hiding (range)
import Data.Array.MArray hiding (range)
import Data.Array.IO hiding (range)
import Data.Array.Base
import Game.Grid.GridWorld
import Game.Grid.GridWorld.Make
import Game.LevelPuzzleMode.LevelPuzzleWorld.Room
import Game.LevelPuzzleMode.LevelPuzzleWorld.RoomIx
data Content =
Content
{
contentGrid :: !GridWorld,
contentRooms :: !RoomArray,
contentRoomsSize :: !UInt,
-- current
contentRoom :: !RoomIx,
-- previous
contentEatRoom :: !RoomIx,
contentEatTick :: !TickT,
contentEatPathBegin :: !UInt
}
--------------------------------------------------------------------------------
--
makeContentEmpty :: MEnv' Content
makeContentEmpty = do
grid <- makeGridWorldEmpty
rooms <- io $ roomarrayList 0 []
return Content
{
contentGrid = grid,
contentRooms = rooms,
contentRoomsSize = 0,
contentRoom = 0,
contentEatPathBegin = 0,
contentEatRoom = 0,
contentEatTick = 0
}
makeContent :: UInt -> [(RoomIx, Room)] -> MEnv' Content
makeContent size rooms = do
makeContentCamera size rooms makeCamera
makeContentCamera :: UInt -> [(RoomIx, Room)] -> Camera -> MEnv' Content
makeContentCamera size ixrooms camera = do
roomslist <- renameIxs ixrooms
let roomssize = length' roomslist
grid <- makeGridWorldWithCamera size camera
rooms <- io $ roomarrayList roomssize roomslist
return Content
{
contentGrid = grid,
contentRooms = rooms,
contentRoomsSize = roomssize,
contentRoom = 0,
contentEatPathBegin = 0,
contentEatRoom = 0,
contentEatTick = 0.0
}
destroyContent :: Content -> MEnv' ()
destroyContent cnt = do
destroyGridWorld $ contentGrid cnt
--------------------------------------------------------------------------------
--
-- |
renameIxs :: [(RoomIx, Room)] -> MEnv' [Room]
renameIxs ixrooms = io $ do
let (ixs, rooms) = unzip ixrooms
mapM (mapRoomIx ixs) rooms
mapRoomIx :: [RoomIx] -> Room -> IO Room
mapRoomIx ixs = \room -> do
-- DotPlain is the only one with reference to a Room
dotplainarrayUpdate (roomDotPlainSize room)
(roomDotPlain room) $ \dot ->
dot { dotplainRoom = mapIx (dotplainRoom dot) }
return room
where
mapIx ix =
mapIx' ixs 0 ix
mapIx' (ix:ixs) jx ix' =
if ix == ix' then jx else mapIx' ixs (jx + 1) ix'
mapIx' [] jx ix' =
error "mapRoomIx: logic error"
--jx
--------------------------------------------------------------------------------
-- RoomArray
type RoomArray =
IOArray Int Room
roomarrayList :: UInt -> [Room] -> IO RoomArray
roomarrayList size rooms =
newListArray (0, fI size - 1) rooms
roomarrayAt :: RoomArray -> UInt -> IO Room
roomarrayAt array ix =
unsafeRead array (fI ix)
roomarrayWrite :: RoomArray -> UInt -> Room -> IO ()
roomarrayWrite array ix room' =
unsafeWrite array (fI ix) room'
roomarrayModifyAt :: RoomArray -> UInt -> (Room -> Room) -> IO ()
roomarrayModifyAt array ix f =
case fI ix of
ix -> unsafeRead array ix >>= \room -> unsafeWrite array ix (f room)
roomarrayUpdate :: UInt -> RoomArray -> (Room -> Room) -> IO ()
roomarrayUpdate size array f =
forM_ (range 0 size) $ \ix -> case fI ix of
ix -> unsafeRead array ix >>= \room -> unsafeWrite array ix(f room)
roomarrayUpdateIO :: UInt -> RoomArray -> (Room -> IO Room) -> IO ()
roomarrayUpdateIO size array f =
forM_ (range 0 size) $ \ix -> case fI ix of
ix -> unsafeRead array ix >>= \room -> (f room >>= unsafeWrite array ix)
| karamellpelle/grid | designer/source/Game/LevelPuzzleMode/LevelPuzzleWorld/Content.hs | gpl-3.0 | 5,132 | 0 | 14 | 1,355 | 1,203 | 655 | 548 | 120 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.IAM.Organizations.Roles.Undelete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Undeletes a custom Role.
--
-- /See:/ <https://cloud.google.com/iam/ Identity and Access Management (IAM) API Reference> for @iam.organizations.roles.undelete@.
module Network.Google.Resource.IAM.Organizations.Roles.Undelete
(
-- * REST Resource
OrganizationsRolesUndeleteResource
-- * Creating a Request
, organizationsRolesUndelete
, OrganizationsRolesUndelete
-- * Request Lenses
, oruXgafv
, oruUploadProtocol
, oruAccessToken
, oruUploadType
, oruPayload
, oruName
, oruCallback
) where
import Network.Google.IAM.Types
import Network.Google.Prelude
-- | A resource alias for @iam.organizations.roles.undelete@ method which the
-- 'OrganizationsRolesUndelete' request conforms to.
type OrganizationsRolesUndeleteResource =
"v1" :>
CaptureMode "name" "undelete" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] UndeleteRoleRequest :>
Post '[JSON] Role
-- | Undeletes a custom Role.
--
-- /See:/ 'organizationsRolesUndelete' smart constructor.
data OrganizationsRolesUndelete =
OrganizationsRolesUndelete'
{ _oruXgafv :: !(Maybe Xgafv)
, _oruUploadProtocol :: !(Maybe Text)
, _oruAccessToken :: !(Maybe Text)
, _oruUploadType :: !(Maybe Text)
, _oruPayload :: !UndeleteRoleRequest
, _oruName :: !Text
, _oruCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsRolesUndelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oruXgafv'
--
-- * 'oruUploadProtocol'
--
-- * 'oruAccessToken'
--
-- * 'oruUploadType'
--
-- * 'oruPayload'
--
-- * 'oruName'
--
-- * 'oruCallback'
organizationsRolesUndelete
:: UndeleteRoleRequest -- ^ 'oruPayload'
-> Text -- ^ 'oruName'
-> OrganizationsRolesUndelete
organizationsRolesUndelete pOruPayload_ pOruName_ =
OrganizationsRolesUndelete'
{ _oruXgafv = Nothing
, _oruUploadProtocol = Nothing
, _oruAccessToken = Nothing
, _oruUploadType = Nothing
, _oruPayload = pOruPayload_
, _oruName = pOruName_
, _oruCallback = Nothing
}
-- | V1 error format.
oruXgafv :: Lens' OrganizationsRolesUndelete (Maybe Xgafv)
oruXgafv = lens _oruXgafv (\ s a -> s{_oruXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
oruUploadProtocol :: Lens' OrganizationsRolesUndelete (Maybe Text)
oruUploadProtocol
= lens _oruUploadProtocol
(\ s a -> s{_oruUploadProtocol = a})
-- | OAuth access token.
oruAccessToken :: Lens' OrganizationsRolesUndelete (Maybe Text)
oruAccessToken
= lens _oruAccessToken
(\ s a -> s{_oruAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
oruUploadType :: Lens' OrganizationsRolesUndelete (Maybe Text)
oruUploadType
= lens _oruUploadType
(\ s a -> s{_oruUploadType = a})
-- | Multipart request metadata.
oruPayload :: Lens' OrganizationsRolesUndelete UndeleteRoleRequest
oruPayload
= lens _oruPayload (\ s a -> s{_oruPayload = a})
-- | The \`name\` parameter\'s value depends on the target resource for the
-- request, namely
-- [\`projects\`](\/iam\/reference\/rest\/v1\/projects.roles) or
-- [\`organizations\`](\/iam\/reference\/rest\/v1\/organizations.roles).
-- Each resource type\'s \`name\` value format is described below: *
-- [\`projects.roles.undelete()\`](\/iam\/reference\/rest\/v1\/projects.roles\/undelete):
-- \`projects\/{PROJECT_ID}\/roles\/{CUSTOM_ROLE_ID}\`. This method
-- undeletes only [custom roles](\/iam\/docs\/understanding-custom-roles)
-- that have been created at the project level. Example request URL:
-- \`https:\/\/iam.googleapis.com\/v1\/projects\/{PROJECT_ID}\/roles\/{CUSTOM_ROLE_ID}\`
-- *
-- [\`organizations.roles.undelete()\`](\/iam\/reference\/rest\/v1\/organizations.roles\/undelete):
-- \`organizations\/{ORGANIZATION_ID}\/roles\/{CUSTOM_ROLE_ID}\`. This
-- method undeletes only [custom
-- roles](\/iam\/docs\/understanding-custom-roles) that have been created
-- at the organization level. Example request URL:
-- \`https:\/\/iam.googleapis.com\/v1\/organizations\/{ORGANIZATION_ID}\/roles\/{CUSTOM_ROLE_ID}\`
-- Note: Wildcard (*) values are invalid; you must specify a complete
-- project ID or organization ID.
oruName :: Lens' OrganizationsRolesUndelete Text
oruName = lens _oruName (\ s a -> s{_oruName = a})
-- | JSONP
oruCallback :: Lens' OrganizationsRolesUndelete (Maybe Text)
oruCallback
= lens _oruCallback (\ s a -> s{_oruCallback = a})
instance GoogleRequest OrganizationsRolesUndelete
where
type Rs OrganizationsRolesUndelete = Role
type Scopes OrganizationsRolesUndelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient OrganizationsRolesUndelete'{..}
= go _oruName _oruXgafv _oruUploadProtocol
_oruAccessToken
_oruUploadType
_oruCallback
(Just AltJSON)
_oruPayload
iAMService
where go
= buildClient
(Proxy :: Proxy OrganizationsRolesUndeleteResource)
mempty
| brendanhay/gogol | gogol-iam/gen/Network/Google/Resource/IAM/Organizations/Roles/Undelete.hs | mpl-2.0 | 6,269 | 0 | 16 | 1,251 | 795 | 471 | 324 | 112 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer2.Accounts.Clients.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new client buyer.
--
-- /See:/ <https://developers.google.com/authorized-buyers/apis/reference/rest/ Ad Exchange Buyer API II Reference> for @adexchangebuyer2.accounts.clients.create@.
module Network.Google.Resource.AdExchangeBuyer2.Accounts.Clients.Create
(
-- * REST Resource
AccountsClientsCreateResource
-- * Creating a Request
, accountsClientsCreate
, AccountsClientsCreate
-- * Request Lenses
, accXgafv
, accUploadProtocol
, accAccessToken
, accUploadType
, accPayload
, accAccountId
, accCallback
) where
import Network.Google.AdExchangeBuyer2.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer2.accounts.clients.create@ method which the
-- 'AccountsClientsCreate' request conforms to.
type AccountsClientsCreateResource =
"v2beta1" :>
"accounts" :>
Capture "accountId" (Textual Int64) :>
"clients" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Client :> Post '[JSON] Client
-- | Creates a new client buyer.
--
-- /See:/ 'accountsClientsCreate' smart constructor.
data AccountsClientsCreate =
AccountsClientsCreate'
{ _accXgafv :: !(Maybe Xgafv)
, _accUploadProtocol :: !(Maybe Text)
, _accAccessToken :: !(Maybe Text)
, _accUploadType :: !(Maybe Text)
, _accPayload :: !Client
, _accAccountId :: !(Textual Int64)
, _accCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsClientsCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'accXgafv'
--
-- * 'accUploadProtocol'
--
-- * 'accAccessToken'
--
-- * 'accUploadType'
--
-- * 'accPayload'
--
-- * 'accAccountId'
--
-- * 'accCallback'
accountsClientsCreate
:: Client -- ^ 'accPayload'
-> Int64 -- ^ 'accAccountId'
-> AccountsClientsCreate
accountsClientsCreate pAccPayload_ pAccAccountId_ =
AccountsClientsCreate'
{ _accXgafv = Nothing
, _accUploadProtocol = Nothing
, _accAccessToken = Nothing
, _accUploadType = Nothing
, _accPayload = pAccPayload_
, _accAccountId = _Coerce # pAccAccountId_
, _accCallback = Nothing
}
-- | V1 error format.
accXgafv :: Lens' AccountsClientsCreate (Maybe Xgafv)
accXgafv = lens _accXgafv (\ s a -> s{_accXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
accUploadProtocol :: Lens' AccountsClientsCreate (Maybe Text)
accUploadProtocol
= lens _accUploadProtocol
(\ s a -> s{_accUploadProtocol = a})
-- | OAuth access token.
accAccessToken :: Lens' AccountsClientsCreate (Maybe Text)
accAccessToken
= lens _accAccessToken
(\ s a -> s{_accAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
accUploadType :: Lens' AccountsClientsCreate (Maybe Text)
accUploadType
= lens _accUploadType
(\ s a -> s{_accUploadType = a})
-- | Multipart request metadata.
accPayload :: Lens' AccountsClientsCreate Client
accPayload
= lens _accPayload (\ s a -> s{_accPayload = a})
-- | Unique numerical account ID for the buyer of which the client buyer is a
-- customer; the sponsor buyer to create a client for. (required)
accAccountId :: Lens' AccountsClientsCreate Int64
accAccountId
= lens _accAccountId (\ s a -> s{_accAccountId = a})
. _Coerce
-- | JSONP
accCallback :: Lens' AccountsClientsCreate (Maybe Text)
accCallback
= lens _accCallback (\ s a -> s{_accCallback = a})
instance GoogleRequest AccountsClientsCreate where
type Rs AccountsClientsCreate = Client
type Scopes AccountsClientsCreate =
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient AccountsClientsCreate'{..}
= go _accAccountId _accXgafv _accUploadProtocol
_accAccessToken
_accUploadType
_accCallback
(Just AltJSON)
_accPayload
adExchangeBuyer2Service
where go
= buildClient
(Proxy :: Proxy AccountsClientsCreateResource)
mempty
| brendanhay/gogol | gogol-adexchangebuyer2/gen/Network/Google/Resource/AdExchangeBuyer2/Accounts/Clients/Create.hs | mpl-2.0 | 5,250 | 0 | 18 | 1,209 | 804 | 467 | 337 | 115 | 1 |
{- |
Module : Bio.Motions.Common
Description : Common utility functions for working with common types.
License : Apache
Stability : experimental
Portability : unportable
-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
module Bio.Motions.Common where
import Bio.Motions.Types
import Control.Lens
import qualified Data.Vector.Unboxed as U
laminType :: BinderType
laminType = BinderType 0
doesNotBind :: EnergyVector -> Bool
doesNotBind = U.all (== 0) . getEnergyVector
bindsWithLamins :: EnergyVector -> Bool
bindsWithLamins = (/= 0) . (U.! getBinderType laminType) . getEnergyVector
-- |Represents the energy between two objects, e.g. atoms
class HaveEnergyBetween x y where
-- |Returns the energy between the two objects
energyBetween :: x -> y -> Energy
instance HaveEnergyBetween EnergyVector BinderType where
energyBetween (EnergyVector vec) (BinderType idx) = vec U.! idx
{-# INLINE energyBetween #-}
instance HaveEnergyBetween BeadSignature BinderSignature where
energyBetween x y = energyBetween (x ^. beadEV) (y ^. binderType)
{-# INLINE energyBetween #-}
instance HaveEnergyBetween BinderSignature BeadSignature where
energyBetween = flip energyBetween
{-# INLINE energyBetween #-}
instance HaveEnergyBetween AtomSignature AtomSignature where
energyBetween (BeadSig beadInfo) (BinderSig binderInfo) = energyBetween beadInfo binderInfo
energyBetween (BinderSig binderInfo) (BeadSig beadInfo) = energyBetween beadInfo binderInfo
energyBetween _ _ = 0
{-# INLINE energyBetween #-}
instance {-# INCOHERENT #-} HaveEnergyBetween x y => HaveEnergyBetween (Located x) y where
energyBetween x = energyBetween (x ^. located)
{-# INLINE energyBetween #-}
instance {-# INCOHERENT #-} HaveEnergyBetween x y => HaveEnergyBetween x (Located y) where
energyBetween x y = energyBetween x (y ^. located)
{-# INLINE energyBetween #-}
instance {-# INCOHERENT #-} HaveEnergyBetween x y => HaveEnergyBetween (Maybe x) y where
energyBetween (Just x) y = energyBetween x y
energyBetween _ _ = 0
{-# INLINE energyBetween #-}
instance {-# INCOHERENT #-} HaveEnergyBetween x y => HaveEnergyBetween x (Maybe y) where
energyBetween x (Just y) = energyBetween x y
energyBetween _ _ = 0
{-# INLINE energyBetween #-}
-- |A convenient unwrapper of 'wrappedPosition'.
position :: Lens' (Located a) Vec3
position = wrappedPosition . _Wrapping Identity
{-# INLINE position #-}
class AsAtom' f a where
asAtom' :: a -> Atom' f
instance AsAtom' f (Atom' f) where
asAtom' = id
{-# INLINE asAtom' #-}
instance AsAtom' f (BinderInfo' f) where
asAtom' = fmap BinderSig
{-# INLINE asAtom' #-}
instance AsAtom' f (BeadInfo' f) where
asAtom' = fmap BeadSig
{-# INLINE asAtom' #-}
-- |A type-constrained version of 'asAtom''.
asAtom :: AsAtom' Identity a => a -> Atom
asAtom = asAtom'
{-# INLINE asAtom #-}
type AsAtom a = AsAtom' Identity a
| Motions/motions | src/Bio/Motions/Common.hs | apache-2.0 | 3,047 | 0 | 8 | 559 | 678 | 360 | 318 | 62 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sl-SI">
<title>HTTPS Info Add-on</title>
<maps>
<homeID>httpsinfo</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | secdec/zap-extensions | addOns/httpsInfo/src/main/javahelp/org/zaproxy/zap/extension/httpsinfo/resources/help_sl_SI/helpset_sl_SI.hs | apache-2.0 | 968 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{- Scene.hs
- Heirarchical Scene Description
-
- Timothy A. Chagnon
- CS 636 - Spring 2009
-}
module Scene where
import Color
import Math
import Ray
import Primitive
import Mesh
data Scene =
Scene {
outfile :: String,
width :: Int,
height :: Int,
camera :: Camera,
background :: Color,
objects :: !ObjectTree
}
deriving Show
data Camera =
Camera {
dist :: RealT,
fovAngle :: RealT,
location :: Vec3f,
direction :: Vec3f,
up :: Vec3f
}
deriving Show
data ObjectTree = Group ![ObjectTree]
| Transform !Mat4f !ObjectTree
| Primitive !Primitive
| LoadMesh !String
| Mesh !Mesh
deriving Show
-- Load scene
loadScene :: Scene -> IO Scene
loadScene scene = do
objs1 <- loadObjs (objects scene)
return scene{objects = objs1}
-- Load object tree
loadObjs :: ObjectTree -> IO ObjectTree
loadObjs (Group objs) = do
objs' <- mapM loadObjs objs
return (Group objs')
loadObjs (Transform t obj) = do
obj' <- loadObjs obj
return (Transform t obj')
loadObjs (Primitive p) = return (Primitive p)
loadObjs (LoadMesh file) = do
mesh <- loadSMF file
return (Mesh mesh)
loadObjs (Mesh m) = error "loadObjs undexpected Mesh"
-- Precompute, flatten and invert transforms
prepScene :: Scene -> Scene
prepScene scene =
let objs2 = prepObjs id4f (objects scene) in
scene{objects = objs2}
-- Push transforms down to primitives
prepObjs :: Mat4f -> ObjectTree -> ObjectTree
prepObjs t (Group objs) = Group (map (prepObjs t) objs)
prepObjs t (Transform t2 objs) = prepObjs (t `mmMul` t2) objs
prepObjs t (Primitive p) = Transform (inverse4f t) (Primitive p)
prepObjs t (Mesh m) = Transform (inverse4f t) (Mesh m)
-- Traverse the Object tree looking for intersections
intersect :: Ray -> ObjectTree -> [Intersection]
intersect r (Group objs) = concatMap (intersect r) objs
intersect r (Transform m obj) = intersect (transformR m r) obj
intersect r (Primitive p) = intersectP r p
intersect r (Mesh m) = intersectM r m
intersect _ _ = []
-- Generate a translation transformation matrix
translate :: Vec3f -> Mat4f
translate v =
let (x, y, z) = vec3fElts v in
mat4f (vec4f 1 0 0 x)
(vec4f 0 1 0 y)
(vec4f 0 0 1 z)
(vec4f 0 0 0 1)
-- Generate a scaling transformation matrix
scale :: Vec3f -> Mat4f
scale v =
let (x, y, z) = vec3fElts v in
mat4f (vec4f x 0 0 0)
(vec4f 0 y 0 0)
(vec4f 0 0 z 0)
(vec4f 0 0 0 1)
-- Generate a rotation transformation matrix
-- First arg is angle in degrees
-- Second arg is a vector around which to rotate
rotate :: RealT -> Vec3f -> Mat4f
rotate theta v =
let (x, y, z) = vec3fElts (norm v) in
let c = cos (deg2rad theta) in
let s = sin (deg2rad theta) in
mat4f (vec4f (x^2+(1-x^2)*c) (x*y*(1-c)-z*s) (x*z*(1-c)+y*s) 0)
(vec4f (x*y*(1-c)+z*s) (y^2+(1-y^2)*c) (y*z*(1-c)-x*s) 0)
(vec4f (x*z*(1-c)-y*s) (y*z*(1-c)+x*s) (z^2+(1-z^2)*c) 0)
(vec4f 0 0 0 1)
| tchagnon/cs636-raytracer | a1/Scene.hs | apache-2.0 | 3,310 | 0 | 19 | 1,046 | 1,279 | 660 | 619 | 97 | 1 |
-- {-# OPTIONS_GHC -package=ghc-7.10.1 #-}
{-# LANGUAGE TypeFamilies, DataKinds, PolyKinds, StandaloneDeriving, TypeOperators, FlexibleInstances, ScopedTypeVariables #-}
module TySet(plugin, Set, Union) where
import TypeRep
import Type
import Kind
import TcEvidence
import CoAxiom
import Name
import OccName
import Var
import TyCon
import BasicTypes
-- friends:
import Var
import VarEnv
import VarSet
import Name
import BasicTypes
import TyCon
import Class
import CoAxiom
-- others
import PrelNames
import Outputable
import FastString
import Util
import DynFlags
import TcPluginM ( TcPluginM, tcPluginIO, tcPluginTrace )
import TcRnMonad ( TcPlugin(..), TcPluginResult(..)
, Ct(..), CtEvidence(..), CtLoc, ctLoc, ctPred
, mkNonCanonical, isTouchableTcM, unsafeTcPluginTcM)
import Plugins ( CommandLineOption, defaultPlugin, Plugin(..) )
import Debug.Trace
import Outputable
import DynFlags
import Control.Monad
import Control.Applicative
import qualified Data.Typeable as DT
import Data.List
import Test.QuickCheck
import Test.QuickCheck.Property
{- Set constructor -}
data Set (a :: [k])
{- Union operation -}
type family Union (a :: [k]) (b :: [k]) :: [k]
type family Member (a :: k) (b :: [k]) :: Bool where
Member x '[] = False
Member x (x ': xs) = True
Member x (y ': xs) = Member x xs
{- Plugin setup -}
plugin :: Plugin
plugin = defaultPlugin { tcPlugin = Just . thePlugin }
thePlugin :: [CommandLineOption] -> TcPlugin
thePlugin opts = TcPlugin
{ tcPluginInit = pluginInit opts
, tcPluginSolve = pluginSolve
, tcPluginStop = pluginStop
}
pluginInit :: [CommandLineOption] -> TcPluginM ()
pluginInit _ = return ()
pluginSolve :: () -> [Ct] -> [Ct] -> [Ct] -> TcPluginM TcPluginResult
pluginSolve () given derived wanted =
do -- For debugging, show the constraints
tcPluginTrace "SET" $ ppCts wanted
tcPluginTrace "SET" $ ppr $ head $ wanted
solved <- findSetEquality wanted
-- For the sake of debugging
tcPluginIO $ putStrLn "Running Set equality plugin"
return $ TcPluginOk solved []
-- Possible eqations for the solver
-- Union a b ~ a => b ~ '[]
-- Union a b ~ b => a ~ '[]
-- Union (Union a b) c ~ d => Union a (Union b c) ~ d
-- Union '[] a ~ b => a ~ b
-- Union a '[] ~ b => a ~ b
-- Union a a ~ b => a ~ b (can do in a reduce phases that looks through a tree of union terms..)
-- Reqires an `ordering'
-- Union a b ~ c => Union b a ~ c [for the purpose of normalisation]
-- Pretty print constraints
ppCts [] = text "(empty)"
ppCts cs = vcat (map ppr cs)
{- Some debugging Show instances to help me understand what is coming in and out of GHC -}
deriving instance Show TyLit
deriving instance Show Type
instance Show Var where
show v = showSDocUnsafe $ ppr v
instance Show TyCon where
show tycon = showSDocUnsafe $ ppr tycon
-- Prediacate one whether a type is = Set '[]
isEmptySetType t = do (x, tcs) <- splitTyConApp_maybe t
guard $ length tcs == 2
guard $ getOccString (tyConName x) == "Set"
case tcs of [TyVarTy k, TyConApp con [TyVarTy k']] ->
do guard $ k == k'
guard $ (getOccString . tyConName $ con) == "[]"
_ -> Nothing
getNameStr = getOccString . tyConName
isUnion :: Type -> Bool
isUnion t = case splitTyConApp_maybe t of
Just (x, tcs) -> (getOccString . tyConName $ x) == "Union"
_ -> False
-- splitUnion :: Type -> ([Type], [Type])
splitUnion t = do (x, tcs) <- splitTyConApp_maybe t
guard $ (getOccString . tyConName $ x) == "Union"
splitList t = do (tcon, args) <- splitTyConApp_maybe t
case getNameStr tcon of
":" -> do guard $ (length args >= 2)
(t1 : (t2 : ts)) <- return args
(kind, emptyArgs) <- splitTyConApp_maybe $ t1
guard $ emptyArgs == []
-- maybe be unnecessary and even restrictive
guard $ getNameStr kind == "*"
ts' <- splitList $ head ts -- possible bug here
return $ (t2 : ts')
"[]" -> do guard $ (length args == 1)
[t] <- return args
(kind, emptyArgs) <- splitTyConApp_maybe $ t
-- maybe be unnecessary and even restrictive
guard $ getNameStr kind == "*"
guard $ emptyArgs == []
return $ []
_ -> return $ []
unionSingle t = do (x, tcs) <- splitTyConApp_maybe t
guard $ length tcs == 2
guard $ getNameStr x == "Set"
return ()
{- Experimenting, finds Set '[] ~ Set '[] and returns a refl coercion
This was set up before when 'Set' was a type family, but is now redundant
-}
findSetEquality :: [Ct] -> TcPluginM [(EvTerm, Ct)]
findSetEquality [] = return []
findSetEquality (ct : xs) = let x = (do (Nominal,t1,t2) <- getEqPredTys_maybe (ctPred ct)
isEmptySetType t1
isEmptySetType t2
return ((EvCoercion $ TcRefl Nominal t1, ct), t1))
y = do (Nominal,t1,t2) <- getEqPredTys_maybe (ctPred ct)
return t1
in
case x of
Just (ct, tcsA) -> do xs' <- (findSetEquality xs)
tcPluginTrace "SET" $ ppr $ tcsA
tcPluginTrace "SET" $ text $ show tcsA
return $ ct : xs'
Nothing -> case y of
Just t -> do tcPluginTrace "SET" $ text $ show t
return []
Nothing -> findSetEquality xs
pluginStop :: () -> TcPluginM ()
pluginStop _ = return ()
data TermTree a = Union (TermTree a) (TermTree a) | Empty | Var String | Data [a] deriving Show
equal :: Eq a => TermTree a -> TermTree a -> Bool
equal t1 t2 = let (vs1, ds1) = normalisedRep t1
(vs2, ds2) = normalisedRep t2
in (vs1 == vs2) && (all (flip elem $ ds2) ds1) && (all (flip elem $ ds1) ds2)
where
normalisedRep :: Eq a => TermTree a -> ([String], [a])
normalisedRep t = let (vs, ds) = separate t
in (nub . sort $ vs, nub ds)
separate :: TermTree a -> ([String], [a])
separate Empty = ([], [])
separate (Var s) = ([s], [])
separate (Data xs) = ([], xs)
separate (Union a b) = let (vs1, ds1) = separate a
(vs2, ds2) = separate b
in (vs1++vs2, ds1++ds2)
{-- Unit testing of normaliser and set equality --}
testSetTermEquality :: IO ()
testSetTermEquality = quickCheck (\((n, m) :: (TermTree Int, TermTree Int)) -> equal n m)
instance Arbitrary (TermTree Int, TermTree Int) where
arbitrary = sized $ \vars ->
sized $ \datums ->
do v <- (vector vars)::(Gen [String])
dat <- (vector datums)::(Gen [Int])
v' <- shuffle v
dat' <- shuffle dat
g1 <- gen v dat
g2 <- gen v' dat'
-- Soundness check on generated tree
let (v0, dat0) = separate g1
(v1, dat1) = separate g2
norm :: (Ord a) => [a] -> [a]
norm = nub . sort
if and [(norm v0) == (norm v), (norm v1) == (norm v),
(norm dat) == (norm dat0), (norm dat) == (norm dat1)]
then return ()
else error $ "Generated trees failed soundness check: " ++ (show g1) ++ " " ++ (show g2)
return (g1, g2)
-- Arbitrarily permute a union of two generators
unionPerm x y = do x' <- x
y' <- y
elements [Union x' y', Union y' x']
-- Generates arbitrary terms from a list of variables and list of Int adtums
gen :: [String] -> [Int] -> Gen (TermTree Int)
gen [] [] = return $ Empty
gen vs ds = do -- Choose between 0 and 1 if 'vs' is empty, or 0-2 if 'vs' has elements
choose <- suchThat arbitrary (\x -> x<=(2 - if vs == [] then 1 else 0) && x>=0)
case choose::Int of
-- Union with Empty
0 -> unionPerm (gen vs ds) (return $ Empty)
-- Pick some number of elements (maybe none) and create a data leaf
1 -> do i <- suchThat arbitrary (<= (length ds))
unionPerm (gen vs (drop i ds)) (return $ Data (take i ds))
-- Case where vs is non-empty, create a variable node, and either remove that
-- variable, or keep it around as a possibility again to test idempotency
2 -> oneof [unionPerm (gen (tail vs) ds) (return $ Var (head vs)),
unionPerm (gen vs ds) (return $ Var (head vs))]
_ -> error "unpossible"
| k-bx/type-level-sets | TySet.hs | bsd-2-clause | 10,398 | 0 | 18 | 4,433 | 2,706 | 1,416 | 1,290 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, TypeSynonymInstances, FlexibleInstances #-}
-- | Properties in this module ensure that things are currently mounted,
-- but without making the mount persistent. Use `Propellor.Property.Fstab`
-- to configure persistent mounts.
module Propellor.Property.Mount where
import Propellor.Base
import Utility.Path
import Data.List
-- | type of filesystem to mount ("auto" to autodetect)
type FsType = String
-- | A device or other thing to be mounted.
type Source = String
-- | A mount point for a filesystem.
type MountPoint = FilePath
-- | Filesystem mount options. Eg, MountOpts ["errors=remount-ro"]
--
-- For default mount options, use `mempty`.
newtype MountOpts = MountOpts [String]
deriving Monoid
class ToMountOpts a where
toMountOpts :: a -> MountOpts
instance ToMountOpts MountOpts where
toMountOpts = id
instance ToMountOpts String where
toMountOpts s = MountOpts [s]
formatMountOpts :: MountOpts -> String
formatMountOpts (MountOpts []) = "defaults"
formatMountOpts (MountOpts l) = intercalate "," l
-- | Mounts a device, without listing it in </etc/fstab>.
--
-- Note that this property will fail if the device is already mounted
-- at the MountPoint.
mounted :: FsType -> Source -> MountPoint -> MountOpts -> Property UnixLike
mounted fs src mnt opts = property (mnt ++ " mounted") $
toResult <$> liftIO (mount fs src mnt opts)
-- | Bind mounts the first directory so its contents also appear
-- in the second directory.
bindMount :: FilePath -> FilePath -> Property Linux
bindMount src dest = tightenTargets $
cmdProperty "mount" ["--bind", src, dest]
`assume` MadeChange
`describe` ("bind mounted " ++ src ++ " to " ++ dest)
-- | Enables swapping to a device, which must be formatted already as a swap
-- partition.
swapOn :: Source -> RevertableProperty Linux Linux
swapOn mnt = tightenTargets doswapon <!> tightenTargets doswapoff
where
swaps = lines <$> readProcess "swapon" ["--show=NAME"]
doswapon = check (notElem mnt <$> swaps) $
cmdProperty "swapon" [mnt]
doswapoff = check (elem mnt <$> swaps) $
cmdProperty "swapoff" [mnt]
mount :: FsType -> Source -> MountPoint -> MountOpts -> IO Bool
mount fs src mnt opts = boolSystem "mount" $
[ Param "-t", Param fs
, Param "-o", Param (formatMountOpts opts)
, Param src
, Param mnt
]
-- | Lists all mount points of the system.
mountPoints :: IO [MountPoint]
mountPoints = lines <$> readProcess "findmnt" ["-rn", "--output", "target"]
-- | Finds all filesystems mounted inside the specified directory.
mountPointsBelow :: FilePath -> IO [MountPoint]
mountPointsBelow target = filter (\p -> simplifyPath p /= simplifyPath target)
. filter (dirContains target)
<$> mountPoints
-- | Filesystem type mounted at a given location.
getFsType :: MountPoint -> IO (Maybe FsType)
getFsType = findmntField "fstype"
-- | Mount options for the filesystem mounted at a given location.
getFsMountOpts :: MountPoint -> IO MountOpts
getFsMountOpts p = maybe mempty toMountOpts
<$> findmntField "fs-options" p
type UUID = String
-- | UUID of filesystem mounted at a given location.
getMountUUID :: MountPoint -> IO (Maybe UUID)
getMountUUID = findmntField "uuid"
-- | UUID of a device
getSourceUUID :: Source -> IO (Maybe UUID)
getSourceUUID = blkidTag "UUID"
type Label = String
-- | Label of filesystem mounted at a given location.
getMountLabel :: MountPoint -> IO (Maybe Label)
getMountLabel = findmntField "label"
-- | Label of a device
getSourceLabel :: Source -> IO (Maybe UUID)
getSourceLabel = blkidTag "LABEL"
-- | Device mounted at a given location.
getMountSource :: MountPoint -> IO (Maybe Source)
getMountSource = findmntField "source"
findmntField :: String -> FilePath -> IO (Maybe String)
findmntField field mnt = catchDefaultIO Nothing $
headMaybe . filter (not . null) . lines
<$> readProcess "findmnt" ["-n", mnt, "--output", field]
blkidTag :: String -> Source -> IO (Maybe String)
blkidTag tag dev = catchDefaultIO Nothing $
headMaybe . filter (not . null) . lines
<$> readProcess "blkid" [dev, "-s", tag, "-o", "value"]
-- | Unmounts a device or mountpoint,
-- lazily so any running processes don't block it.
umountLazy :: FilePath -> IO ()
umountLazy mnt =
unlessM (boolSystem "umount" [ Param "-l", Param mnt ]) $
stopPropellorMessage $ "failed unmounting " ++ mnt
-- | Unmounts anything mounted inside the specified directory.
unmountBelow :: FilePath -> IO ()
unmountBelow d = do
submnts <- mountPointsBelow d
forM_ submnts umountLazy
| ArchiveTeam/glowing-computing-machine | src/Propellor/Property/Mount.hs | bsd-2-clause | 4,517 | 28 | 12 | 767 | 1,102 | 579 | 523 | 79 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Text.Grampa.Combinators (moptional, concatMany, concatSome,
flag, count, upto,
delimiter, operator, keyword) where
import Control.Applicative(Applicative(..), Alternative(..))
import Data.List.NonEmpty (fromList)
import Data.Monoid (Monoid, (<>))
import Data.Monoid.Factorial (FactorialMonoid)
import Data.Semigroup (Semigroup(sconcat))
import Data.Semigroup.Cancellative (LeftReductive)
import Text.Grampa.Class (InputParsing(ParserInput, string), LexicalParsing(lexicalToken, keyword))
import Text.Parser.Combinators (Parsing((<?>)), count)
-- | Attempts to parse a monoidal value, if the argument parser fails returns 'mempty'.
moptional :: (Alternative p, Monoid a) => p a -> p a
moptional p = p <|> pure mempty
-- | Zero or more argument occurrences like 'many', with concatenated monoidal results.
concatMany :: (Alternative p, Monoid a) => p a -> p a
concatMany p = mconcat <$> many p
-- | One or more argument occurrences like 'some', with concatenated monoidal results.
concatSome :: (Alternative p, Semigroup a) => p a -> p a
concatSome p = sconcat . fromList <$> some p
-- | Returns 'True' if the argument parser succeeds and 'False' otherwise.
flag :: Alternative p => p a -> p Bool
flag p = True <$ p <|> pure False
-- | Parses between 0 and N occurrences of the argument parser in sequence and returns the list of results.
upto :: Alternative p => Int -> p a -> p [a]
upto n p
| n > 0 = (:) <$> p <*> upto (pred n) p
<|> pure []
| otherwise = pure []
-- | Parses the given delimiter, such as a comma or a brace
delimiter :: (Show s, FactorialMonoid s, LeftReductive s, s ~ ParserInput m, LexicalParsing m) => s -> m s
delimiter s = lexicalToken (string s) <?> ("delimiter " <> show s)
-- | Parses the given operator symbol
operator :: (Show s, FactorialMonoid s, LeftReductive s, s ~ ParserInput m, LexicalParsing m) => s -> m s
operator s = lexicalToken (string s) <?> ("operator " <> show s)
| blamario/grampa | grammatical-parsers/src/Text/Grampa/Combinators.hs | bsd-2-clause | 2,039 | 0 | 10 | 408 | 608 | 330 | 278 | 29 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ViewPatterns #-}
{-| This library exports two utilities for compiling Dhall expressions to Bash:
* `dhallToExpression`, which emits a Bash expression (i.e. a valid
right-hand side for an assignment)
* `dhallToStatement`, which emits a Bash @declare@ or @unset@ statement
suitable for use with `eval`
`dhallToExpression` only supports the conversion of primitive values, such
as:
* @Bool@ - which translates to a string that is either @"true"@ or @"false"@
* @Natural@ - which translates to a Bash integer
* @Integer@ - which translates to a Bash integer
* @Text@ - which translates to a Bash string (properly escaped if necessary)
The @dhall-to-bash@ executable by default tries to compile Dhall expressions
to Bash expressions using the `dhallToExpression` function. For example:
> $ dhall-to-bash <<< 'True'
> true
> $ dhall-to-bash <<< 'False'
> false
> $ dhall-to-bash <<< '1'
> 1
> $ dhall-to-bash <<< '+1'
> 1
> $ dhall-to-bash <<< '"ABC"'
> ABC
> $ dhall-to-bash <<< '" X "'
> $' X '
> $ dhall-to-bash <<< 'Natural/even +100'
> true
The output of `dhallToExpression` is a valid Bash expression that can be
embedded anywhere Bash expressions are valid, such as the right-hand side of
an assignment statement:
> $ FOO=$(dhall-to-bash <<< 'List/length Natural [1, 2, 3]')
> $ echo "${FOO}"
> 3
`dhallToStatement` supports a wider range of expressions by also adding
support for:
* @Optional@ - which translates to a variable which is either set or unset
* @List@ - which translates to a Bash array
* records - which translate to Bash associative arrays
The @dhall-to-bash@ executable can emit a statement instead of an expression
if you add the @--declare@ flag specifying which variable to set or unset.
For example:
> $ dhall-to-bash --declare FOO <<< 'None Natural'
> unset FOO
> $ dhall-to-bash --declare FOO <<< 'Some 1'
> declare -r -i FOO=1
> $ dhall-to-bash --declare FOO <<< 'Some (Some 1)'
> declare -r -i FOO=1
> $ dhall-to-bash --declare FOO <<< 'Some (None Natural)'
> unset FOO
> $ dhall-to-bash --declare FOO <<< '[1, 2, 3]'
> declare -r -a FOO=(1 2 3)
> $ dhall-to-bash --declare FOO <<< '{ bar = 1, baz = True }'
> declare -r -A FOO=([bar]=1 [baz]=true)
The output of `dhallToExpression` is either a @declare@ or @unset@ Bash
statement that you can pass to @eval@:
> $ eval $(dhall-to-bash --declare FOO <<< '{ bar = 1, baz = True }')
> $ echo "${FOO[bar]}"
> 1
> $ echo "${FOO[baz]}"
> true
@dhall-to-bash@ declares variables read-only (i.e. @-r@) to prevent you from
accidentally overwriting, deleting or mutating variables:
> $ eval $(dist/build/dhall-to-bash/dhall-to-bash --declare BAR <<< '1')
> $ echo "${BAR"}
> 1
> $ unset BAR
> bash: unset: BAR: cannot unset: readonly variable
> $ eval $(dist/build/dhall-to-bash/dhall-to-bash --declare BAR <<< '2')
> bash: declare: BAR: readonly variable
-}
module Dhall.Bash (
-- * Dhall to Bash
dhallToExpression
, dhallToStatement
-- * Exceptions
, ExpressionError(..)
, StatementError(..)
) where
import Control.Exception (Exception)
import Data.Bifunctor (first)
import Data.ByteString
import Data.Typeable (Typeable)
import Data.Void (Void, absurd)
import Dhall.Core (Chunks (..), Expr (..))
import qualified Data.Foldable
import qualified Data.Text
import qualified Data.Text.Encoding
import qualified Dhall.Core
import qualified Dhall.Map
import qualified Dhall.Pretty
import qualified NeatInterpolation
import qualified Text.ShellEscape
_ERROR :: Data.Text.Text
_ERROR = "\ESC[1;31mError\ESC[0m"
{-| This is the exception type for errors that might arise when translating
Dhall expressions to Bash statements
Because the majority of Dhall language features do not easily translate to
Bash this just returns the expression that failed
-}
data StatementError
= UnsupportedStatement (Expr Void Void)
| UnsupportedSubexpression (Expr Void Void)
deriving (Typeable)
instance Show StatementError where
show (UnsupportedStatement e) =
Data.Text.unpack [NeatInterpolation.text|
$_ERROR: Cannot translate to a Bash statement
Explanation: Only primitive values, records, ❰List❱s, and ❰Optional❱ values can
be translated from Dhall to a Bash statement
The following Dhall expression could not be translated to a Bash statement:
↳ $txt
|]
where
txt = Dhall.Core.pretty e
show (UnsupportedSubexpression e) =
-- Carefully note: No tip suggesting `--declare` since it won't work
-- here (and the user is already using `--declare`)
Data.Text.unpack [NeatInterpolation.text|
$_ERROR: Cannot translate to a Bash expression
Explanation: Only primitive values can be translated from Dhall to a Bash
expression
The following Dhall expression could not be translated to a Bash expression:
↳ $txt
|]
where
txt = Dhall.Core.pretty e
instance Exception StatementError
{-| This is the exception type for errors that might arise when translating
Dhall expressions to Bash expressions
Because the majority of Dhall language features do not easily translate to
Bash this just returns the expression that failed
-}
data ExpressionError = UnsupportedExpression (Expr Void Void) deriving (Typeable)
instance Show ExpressionError where
show (UnsupportedExpression e) =
Data.Text.unpack [NeatInterpolation.text|
$_ERROR: Cannot translate to a Bash expression
Explanation: Only primitive values can be translated from Dhall to a Bash
expression
The following Dhall expression could not be translated to a Bash expression:
↳ $txt$tip
|]
where
txt = Dhall.Core.pretty e
tip = case e of
Some _ -> "\n\n" <> [NeatInterpolation.text|
Tip: You can convert an ❰Optional❱ value to a Bash statement using the --declare
flag
|]
ListLit _ _ -> "\n\n" <> [NeatInterpolation.text|
Tip: You can convert a ❰List❱ to a Bash statement using the --declare flag
|]
RecordLit _ -> "\n\n" <> [NeatInterpolation.text|
Tip: You can convert a record to a Bash statement using the --declare flag
|]
_ -> ""
instance Exception ExpressionError
{-| Compile a Dhall expression to a Bash statement that @declare@s or @unset@s a
a variable of your choice
This only supports:
* @Bool@s
* @Natural@s
* @Integer@s
* @Text@s
* @Optional@s
* @List@s
* records
-}
dhallToStatement
:: Expr s Void
-- ^ Dhall expression to compile
-> ByteString
-- ^ Variable to @declare@ or @unset@
-> Either StatementError ByteString
-- ^ Bash statement or compile failure
dhallToStatement expr0 var0 = go (Dhall.Core.normalize expr0)
where
var = Text.ShellEscape.bytes (Text.ShellEscape.bash var0)
adapt (UnsupportedExpression e) = UnsupportedSubexpression e
go (BoolLit a) =
go (TextLit (if a then "true" else "false"))
go (NaturalLit a) =
go (IntegerLit (fromIntegral a))
go (IntegerLit a) = do
e <- first adapt (dhallToExpression (IntegerLit a))
let bytes = "declare -r -i " <> var <> "=" <> e
return bytes
go (TextLit a) = do
e <- first adapt (dhallToExpression (TextLit a))
let bytes = "declare -r " <> var <> "=" <> e
return bytes
go (ListLit _ bs) = do
bs' <- first adapt (mapM dhallToExpression bs)
let bytes
= "declare -r -a "
<> var
<> "=("
<> Data.ByteString.intercalate " " (Data.Foldable.toList bs')
<> ")"
return bytes
go (Some b) = go b
go (App None _) = return ("unset " <> var)
go e
| Just text <- Dhall.Pretty.temporalToText e =
go (TextLit (Chunks [] text))
go (RecordLit a) = do
let process (k, v) = do
v' <- dhallToExpression v
let bytes = Data.Text.Encoding.encodeUtf8 k
let k' = Text.ShellEscape.bytes (Text.ShellEscape.bash bytes)
return ("[" <> k' <> "]=" <> v')
kvs' <- first adapt (traverse process (Dhall.Map.toList $ Dhall.Core.recordFieldValue <$> a))
let bytes
= "declare -r -A "
<> var
<> "=("
<> Data.ByteString.intercalate " " kvs'
<> ")"
return bytes
go (Field (Union m) k) = do
e <- first adapt (dhallToExpression (Field (Union m) k))
let bytes = "declare -r " <> var <> "=" <> e
return bytes
go (Embed x) =
absurd x
go (Note _ e) =
go e
-- Use an exhaustive pattern match here so that we don't forget to handle
-- new constructors added to the API
go e@(Const {}) = Left (UnsupportedStatement e)
go e@(Var {}) = Left (UnsupportedStatement e)
go e@(Lam {}) = Left (UnsupportedStatement e)
go e@(Pi {}) = Left (UnsupportedStatement e)
go e@(App {}) = Left (UnsupportedStatement e)
go e@(Let {}) = Left (UnsupportedStatement e)
go e@(Annot {}) = Left (UnsupportedStatement e)
go e@(Bool {}) = Left (UnsupportedStatement e)
go e@(BoolAnd {}) = Left (UnsupportedStatement e)
go e@(BoolOr {}) = Left (UnsupportedStatement e)
go e@(BoolEQ {}) = Left (UnsupportedStatement e)
go e@(BoolNE {}) = Left (UnsupportedStatement e)
go e@(BoolIf {}) = Left (UnsupportedStatement e)
go e@(Natural ) = Left (UnsupportedStatement e)
go e@(NaturalFold ) = Left (UnsupportedStatement e)
go e@(NaturalBuild ) = Left (UnsupportedStatement e)
go e@(NaturalIsZero ) = Left (UnsupportedStatement e)
go e@(NaturalEven ) = Left (UnsupportedStatement e)
go e@(NaturalOdd ) = Left (UnsupportedStatement e)
go e@(NaturalToInteger ) = Left (UnsupportedStatement e)
go e@(NaturalShow ) = Left (UnsupportedStatement e)
go e@(NaturalSubtract ) = Left (UnsupportedStatement e)
go e@(NaturalPlus {}) = Left (UnsupportedStatement e)
go e@(NaturalTimes {}) = Left (UnsupportedStatement e)
go e@(Integer ) = Left (UnsupportedStatement e)
go e@(IntegerClamp ) = Left (UnsupportedStatement e)
go e@(IntegerNegate ) = Left (UnsupportedStatement e)
go e@(IntegerShow ) = Left (UnsupportedStatement e)
go e@(IntegerToDouble ) = Left (UnsupportedStatement e)
go e@(Double ) = Left (UnsupportedStatement e)
go e@(DoubleLit {}) = Left (UnsupportedStatement e)
go e@(DoubleShow ) = Left (UnsupportedStatement e)
go e@(Text ) = Left (UnsupportedStatement e)
go e@(TextAppend {}) = Left (UnsupportedStatement e)
go e@(TextReplace {}) = Left (UnsupportedStatement e)
go e@(TextShow {}) = Left (UnsupportedStatement e)
go e@(Date ) = Left (UnsupportedStatement e)
go e@(DateLiteral {}) = Left (UnsupportedStatement e)
go e@(Time ) = Left (UnsupportedStatement e)
go e@(TimeLiteral {}) = Left (UnsupportedStatement e)
go e@(TimeZone ) = Left (UnsupportedStatement e)
go e@(TimeZoneLiteral {}) = Left (UnsupportedStatement e)
go e@(List ) = Left (UnsupportedStatement e)
go e@(ListAppend {}) = Left (UnsupportedStatement e)
go e@(ListBuild ) = Left (UnsupportedStatement e)
go e@(ListFold ) = Left (UnsupportedStatement e)
go e@(ListLength ) = Left (UnsupportedStatement e)
go e@(ListHead ) = Left (UnsupportedStatement e)
go e@(ListLast ) = Left (UnsupportedStatement e)
go e@(ListIndexed ) = Left (UnsupportedStatement e)
go e@(ListReverse ) = Left (UnsupportedStatement e)
go e@(Optional ) = Left (UnsupportedStatement e)
go e@(None ) = Left (UnsupportedStatement e)
go e@(Record {}) = Left (UnsupportedStatement e)
go e@(Union {}) = Left (UnsupportedStatement e)
go e@(Combine {}) = Left (UnsupportedStatement e)
go e@(CombineTypes {}) = Left (UnsupportedStatement e)
go e@(Prefer {}) = Left (UnsupportedStatement e)
go e@(RecordCompletion {}) = Left (UnsupportedStatement e)
go e@(Merge {}) = Left (UnsupportedStatement e)
go e@(ToMap {}) = Left (UnsupportedStatement e)
go e@(ShowConstructor {}) = Left (UnsupportedStatement e)
go e@(Field {}) = Left (UnsupportedStatement e)
go e@(Project {}) = Left (UnsupportedStatement e)
go e@(Assert {}) = Left (UnsupportedStatement e)
go e@(Equivalent {}) = Left (UnsupportedStatement e)
go e@(With {}) = Left (UnsupportedStatement e)
go e@(ImportAlt {}) = Left (UnsupportedStatement e)
{-| Compile a Dhall expression to a Bash expression
This only supports:
* @Bool@s
* @Natural@s
* @Integer@s
* @Text@s
-}
dhallToExpression
:: Expr s Void
-- ^ Dhall expression to compile
-> Either ExpressionError ByteString
-- ^ Bash expression or compile failure
dhallToExpression expr0 = go (Dhall.Core.normalize expr0)
where
go (BoolLit a) =
go (TextLit (if a then "true" else "false"))
go (NaturalLit a) =
go (IntegerLit (fromIntegral a))
go (IntegerLit a) =
go (TextLit (Chunks [] (Data.Text.pack (show a))))
go (TextLit (Chunks [] a)) = do
let bytes = Data.Text.Encoding.encodeUtf8 a
return (Text.ShellEscape.bytes (Text.ShellEscape.bash bytes))
go e@(Field (Union m) (Dhall.Core.fieldSelectionLabel -> k)) =
case Dhall.Map.lookup k m of
Just Nothing -> go (TextLit (Chunks [] k))
_ -> Left (UnsupportedExpression e)
go e
| Just text <- Dhall.Pretty.temporalToText e =
go (TextLit (Chunks [] text))
go e = Left (UnsupportedExpression e)
| Gabriel439/Haskell-Dhall-Library | dhall-bash/src/Dhall/Bash.hs | bsd-3-clause | 14,370 | 0 | 20 | 3,869 | 3,290 | 1,709 | 1,581 | 192 | 81 |
, testGroup "AU: standard typeclasses" [
testCase "show" $ "AU 15.5" @=? show (AU 15.5)
, testCase "showList" $ "[AU 15.3,AU 15.7]" @=? showList [AU 15.3, AU 15.7] ""
, testCase "showsPrec" $ "AU 15.5" @=? showsPrec 0 (AU 15.5) ""
, testCase "== (True)" $ True @=? (AU 15.5) == (AU 15.5)
, testCase "== (False)" $ False @=? (AU 15.3) == (AU 15.5)
, testCase "/= (True)" $ True @=? (AU 15.3) /= (AU 15.5)
, testCase "/= (False)" $ False @=? (AU 15.5) /= (AU 15.5)
, testCase "compare: LT" $ LT @=? (AU 15.3) `compare` (AU 15.5)
, testCase "compare: EQ" $ EQ @=? (AU 15.5) `compare` (AU 15.5)
, testCase "compare: GT" $ GT @=? (AU 15.7) `compare` (AU 15.5)
, testCase "<" $ True @=? (AU 15.3) < (AU 15.7)
, testCase "<=" $ True @=? (AU 15.3) <= (AU 15.7)
, testCase ">" $ False @=? (AU 15.3) > (AU 15.7)
, testCase ">=" $ False @=? (AU 15.3) >= (AU 15.7)
, testCase "max" $ (AU 15.7) @=? max (AU 15.3) (AU 15.7)
, testCase "min" $ (AU 15.3) @=? min (AU 15.3) (AU 15.7)
, testCase "abs" $ (AU 15.7) @=? abs (AU (-15.7))
, testCase "signum > 0" $ (AU 1.0) @=? signum (AU 15.5)
, testCase "signum = 0" $ (AU 0.0) @=? signum (AU 0.0)
, testCase "signum < 0" $ (AU $ -1.0) @=? signum (AU $ -15.5)
, testCase "toRational" $ (31 % 2) @=? toRational (AU 15.5)
, testCase "recip" $ (AU 0.01) @=? recip (AU 100)
, testCase "properFraction" $ (15, AU 0.5) @=? properFraction (AU 15.5)
]
| Alexander-Ignatyev/astro | test/Data/Astro/tmp.hs | bsd-3-clause | 1,720 | 2 | 12 | 610 | 720 | 358 | 362 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, NegativeLiterals #-}
module ETA.CodeGen.Prim where
import ETA.Main.DynFlags
import ETA.Types.TyCon
import ETA.Types.Type
import ETA.StgSyn.StgSyn
import ETA.Prelude.PrimOp
import ETA.Utils.Panic
import ETA.Utils.FastString
import Data.Maybe
import Codec.JVM
import ETA.CodeGen.ArgRep
import ETA.CodeGen.Monad
import ETA.CodeGen.Foreign
import ETA.CodeGen.Env
import ETA.CodeGen.Layout
import ETA.CodeGen.Types
import ETA.CodeGen.Utils
import ETA.CodeGen.Rts
import ETA.CodeGen.Name
import ETA.Debug
import Data.Monoid ((<>))
import Data.Foldable (fold)
import Data.Text (Text)
cgOpApp :: StgOp
-> [StgArg]
-> Type
-> CodeGen ()
cgOpApp (StgFCallOp fcall _) args resType = cgForeignCall fcall args resType
-- TODO: Is this primop necessary like in GHC?
cgOpApp (StgPrimOp TagToEnumOp) args@[_arg] resType = do
dflags <- getDynFlags
codes <- getNonVoidArgCodes args
let code = case codes of
[code'] -> code'
_ -> panic "TagToEnumOp had void arg"
emitReturn [mkLocDirect True $ tagToClosure dflags tyCon code]
where tyCon = tyConAppTyCon resType
cgOpApp (StgPrimOp ObjectArrayNewOp) args resType = do
[nCode] <- getNonVoidArgCodes args
emitReturn [mkLocDirect False (arrayFt, nCode <> new arrayFt)]
where arrayFt
| arrayFt' == jobject = jarray jobject
| otherwise = arrayFt'
where arrayFt' = fromJust
. repFieldType_maybe
. head . tail . snd
$ splitTyConApp resType
cgOpApp (StgPrimOp primOp) args resType = do
dflags <- getDynFlags
argCodes <- getNonVoidArgFtCodes args
case shouldInlinePrimOp dflags primOp argCodes resType of
Left (rtsGroup, rtsFunName) -> do
loadArgs <- getNonVoidArgCodes args
let (_, argTypes, _, _, _) = primOpSig primOp
fts = repFieldTypes argTypes
withContinuation $ loadContext
<> fold loadArgs
<> invokestatic (mkMethodRef rtsGroup rtsFunName
(contextType:fts) (ret closureType))
-- TODO: Optimize: Remove the intermediate temp locations
-- and allow direct code locations
Right codes'
| ReturnsPrim VoidRep <- resultInfo
-> do
codes <- codes'
emit $ fold codes
emitReturn []
| ReturnsPrim rep <- resultInfo
-- res <- newTemp rep'
-- f [res]
-- emitReturn [res]
-> do -- Assumes Returns Prim is of Non-closure type
codes <- codes'
emitReturn [ mkLocDirect False (primRepFieldType rep, head codes) ]
| ReturnsAlg tyCon <- resultInfo, isUnboxedTupleTyCon tyCon
-> do -- locs <- newUnboxedTupleLocs resType
-- f locs
codes <- codes'
let reps = getUnboxedResultReps resType
emitReturn
. map (\(rep, code) ->
mkLocDirect (isGcPtrRep rep) (primRepFieldType rep, code))
$ zip reps codes
| otherwise -> panic "cgPrimOp"
where resultInfo = getPrimOpResultInfo primOp
cgOpApp (StgPrimCallOp (PrimCall label _)) args _resType = do
argsFtCodes <- getNonVoidArgFtCodes args
let (argFts, callArgs) = unzip argsFtCodes
withContinuation $ loadContext
<> fold callArgs
<> invokestatic (mkMethodRef clsName methodName
(contextType:argFts) (ret closureType))
where (clsName, methodName) = labelToMethod (unpackFS label)
inlinePrimCall :: String -> [(FieldType, Code)] -> Code
inlinePrimCall name = error $ "inlinePrimCall: unimplemented = " ++ name
-- TODO: This is a hack to get around the bytecode verifier for array-related
-- primops that are used generically.
arrayFtCast :: FieldType -> (FieldType, Code)
arrayFtCast ft
| ft == jobject = (objArray, gconv ft objArray)
| otherwise = (ft, mempty)
where objArray = jarray jobject
shouldInlinePrimOp :: DynFlags -> PrimOp -> [(FieldType, Code)] -> Type -> Either (Text, Text) (CodeGen [Code])
shouldInlinePrimOp _dflags ObjectArrayAtOp ((origFt, arrayObj):args) _ =
Right $ return [arrayObj <> maybeCast <> fold codes <> gaload elemFt]
where (arrayFt, maybeCast) = arrayFtCast origFt
(_, codes) = unzip args
elemFt = fromJust $ getArrayElemFt arrayFt
shouldInlinePrimOp _dflags ObjectArraySetOp ((origFt, arrayObj):args) _ =
Right $ return [arrayObj <> maybeCast <> fold codes <> gastore elemFt]
where (arrayFt, maybeCast) = arrayFtCast origFt
(_, codes) = unzip args
elemFt = fromJust $ getArrayElemFt arrayFt
shouldInlinePrimOp _ ArrayLengthOp [(origFt, arrayObj)] _ =
Right $ return [arrayObj <> maybeCast <> arraylength arrayFt]
where (arrayFt, maybeCast) = arrayFtCast origFt
shouldInlinePrimOp _ ClassCastOp args resType = Right $
let (_, codes) = unzip args
fromFt = fst (head args)
toFt = fromJust . repFieldType_maybe $ resType
in return [normalOp (gconv fromFt toFt) codes]
shouldInlinePrimOp dflags op args _ = shouldInlinePrimOp' dflags op $ snd (unzip args)
shouldInlinePrimOp' :: DynFlags -> PrimOp -> [Code] -> Either (Text, Text) (CodeGen [Code])
-- TODO: Inline array operations conditionally
shouldInlinePrimOp' _ CopyArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "copyArray"
[stgArrayType, jint, stgArrayType, jint, jint]
void)
]
shouldInlinePrimOp' _ CopyMutableArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "copyArray"
[stgArrayType, jint, stgArrayType, jint, jint]
void)
]
shouldInlinePrimOp' _ CloneArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "cloneArray" [stgArrayType, jint, jint]
(ret stgArrayType))
]
shouldInlinePrimOp' _ CloneMutableArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "cloneArray" [stgArrayType, jint, jint]
(ret stgArrayType))
]
shouldInlinePrimOp' _ FreezeArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "cloneArray" [stgArrayType, jint, jint]
(ret stgArrayType))
]
shouldInlinePrimOp' _ ThawArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "cloneArray" [stgArrayType, jint, jint]
(ret stgArrayType))
]
shouldInlinePrimOp' _ CopySmallArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "copyArray"
[stgArrayType, jint, stgArrayType, jint, jint]
void)
]
shouldInlinePrimOp' _ CopySmallMutableArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "copyArray"
[stgArrayType, jint, stgArrayType, jint, jint]
void)
]
shouldInlinePrimOp' _ CloneSmallArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "cloneArray" [stgArrayType, jint, jint]
(ret stgArrayType))
]
shouldInlinePrimOp' _ CloneSmallMutableArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "cloneArray" [stgArrayType, jint, jint]
(ret stgArrayType))
]
shouldInlinePrimOp' _ FreezeSmallArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "cloneArray" [stgArrayType, jint, jint]
(ret stgArrayType))
]
shouldInlinePrimOp' _ ThawSmallArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "cloneArray" [stgArrayType, jint, jint]
(ret stgArrayType))
]
shouldInlinePrimOp' _ CopyArrayArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "copyArray"
[stgArrayType, jint, stgArrayType, jint, jint]
void)
]
shouldInlinePrimOp' _ CopyMutableArrayArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "copyArray"
[stgArrayType, jint, stgArrayType, jint, jint]
void)
]
shouldInlinePrimOp' _ NewByteArrayOp_Char args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgByteArray "create" [jint] (ret stgByteArrayType))
]
shouldInlinePrimOp' _ NewPinnedByteArrayOp_Char args = Right $ return
[
fold args
<> iconst jbool 1
<> invokestatic (mkMethodRef stgByteArray "create" [jint, jbool] (ret stgByteArrayType))
]
shouldInlinePrimOp' _ NewAlignedPinnedByteArrayOp_Char args = Right $ return
[
fold args
<> iconst jbool 1
<> invokestatic (mkMethodRef stgByteArray "create" [jint, jint, jbool] (ret stgByteArrayType))
]
shouldInlinePrimOp' _ NewArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "create" [jint, closureType] (ret stgArrayType))
]
shouldInlinePrimOp' _ NewSmallArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "create" [jint, closureType] (ret stgArrayType))
]
shouldInlinePrimOp' _ NewArrayArrayOp args = Right $ return
[
fold args
<> invokestatic (mkMethodRef stgArray "create" [jint, closureType] (ret stgArrayType))
]
shouldInlinePrimOp' _ NewMutVarOp args = Right $ return
[
new stgMutVarType
<> dup stgMutVarType
<> fold args
<> invokespecial (mkMethodRef stgMutVar "<init>" [closureType] void)
]
shouldInlinePrimOp' _ NewTVarOp args = Right $ return
[
new stgTVarType
<> dup stgTVarType
<> fold args
<> invokespecial (mkMethodRef stgTVar "<init>" [closureType] void)
]
shouldInlinePrimOp' _ NewMVarOp _ = Right $ return
[
new stgMVarType
<> dup stgMVarType
<> aconst_null closureType
<> invokespecial (mkMethodRef stgMVar "<init>" [closureType] void)
]
shouldInlinePrimOp' _ IsEmptyMVarOp [mvar] = Right $ return
[ intCompOp ifnull [mvar <> mVarValue] ]
shouldInlinePrimOp' _ DelayOp [time] = Right $
let millis = time <> iconst jint 1000 <> idiv <> gconv jint jlong
nanos = time <> iconst jint 1000 <> irem
<> iconst jint 1000 <> imul
in return [ normalOp (invokestatic (mkMethodRef "java/lang/Thread" "sleep" [jlong, jint] void)) [millis, nanos] ]
shouldInlinePrimOp' _ MakeStableNameOp args = Right $ return
[ normalOp (invokestatic (mkMethodRef "java/lang/System" "identityHashCode" [jobject] (ret jint))) args ]
shouldInlinePrimOp' _ MakeStablePtrOp args = Right $ return
[ normalOp (invokestatic (mkMethodRef "eta/runtime/stg/StablePtrTable" "makeStablePtr" [closureType] (ret jint))) args ]
shouldInlinePrimOp' _ DeRefStablePtrOp args = Right $ return
[ normalOp (invokestatic (mkMethodRef "eta/runtime/stg/StablePtrTable" "getClosure" [jint] (ret closureType))) args ]
shouldInlinePrimOp' _ UnsafeThawArrayOp args = Right $ return [fold args]
shouldInlinePrimOp' _ UnsafeThawSmallArrayOp args = Right $ return [fold args]
shouldInlinePrimOp' _ primOp args
| primOpOutOfLine primOp = Left $ mkRtsPrimOp primOp
| otherwise = Right $ emitPrimOp primOp args
mkRtsPrimOp :: PrimOp -> (Text, Text)
mkRtsPrimOp RaiseOp = (stgExceptionGroup, "raise")
mkRtsPrimOp CatchOp = (stgExceptionGroup, "catch_")
mkRtsPrimOp RaiseIOOp = (stgExceptionGroup, "raise")
mkRtsPrimOp MaskAsyncExceptionsOp = (stgExceptionGroup, "maskAsyncExceptions")
mkRtsPrimOp MaskUninterruptibleOp = (stgExceptionGroup, "maskUninterruptible")
mkRtsPrimOp UnmaskAsyncExceptionsOp = (stgExceptionGroup, "unmaskAsyncExceptions")
mkRtsPrimOp MaskStatus = (stgExceptionGroup, "getMaskingState")
mkRtsPrimOp FloatDecode_IntOp = (ioGroup, "decodeFloat_Int")
mkRtsPrimOp AtomicallyOp = (stmGroup, "atomically")
mkRtsPrimOp RetryOp = (stmGroup, "retry")
mkRtsPrimOp CatchRetryOp = (stmGroup, "catchRetry")
mkRtsPrimOp CatchSTMOp = (stmGroup, "catchSTM")
mkRtsPrimOp Check = (stmGroup, "check")
mkRtsPrimOp ReadTVarOp = (stmGroup, "readTVar")
mkRtsPrimOp ReadTVarIOOp = (stmGroup, "readTVarIO")
mkRtsPrimOp WriteTVarOp = (stmGroup, "writeTVar")
mkRtsPrimOp TakeMVarOp = (concGroup, "takeMVar")
mkRtsPrimOp TryTakeMVarOp = (concGroup, "tryTakeMVar")
mkRtsPrimOp PutMVarOp = (concGroup, "putMVar")
mkRtsPrimOp TryPutMVarOp = (concGroup, "tryPutMVar")
mkRtsPrimOp ReadMVarOp = (concGroup, "readMVar")
mkRtsPrimOp TryReadMVarOp = (concGroup, "tryReadMVar")
mkRtsPrimOp ForkOp = (concGroup, "fork")
mkRtsPrimOp ForkOnOp = (concGroup, "forkOn")
mkRtsPrimOp KillThreadOp = (stgExceptionGroup, "killThread")
mkRtsPrimOp YieldOp = (concGroup, "yield")
mkRtsPrimOp LabelThreadOp = (concGroup, "labelThread")
mkRtsPrimOp IsCurrentThreadBoundOp = (concGroup, "isCurrentThreadBound")
mkRtsPrimOp NoDuplicateOp = (stgGroup, "noDuplicate")
mkRtsPrimOp ThreadStatusOp = (concGroup, "threadStatus")
mkRtsPrimOp MkWeakOp = (stgGroup, "mkWeak")
mkRtsPrimOp MkWeakNoFinalizerOp = (stgGroup, "mkWeakNoFinalizer")
mkRtsPrimOp AddCFinalizerToWeakOp = (stgGroup, "addJavaFinalizerToWeak")
mkRtsPrimOp DeRefWeakOp = (stgGroup, "deRefWeak")
mkRtsPrimOp FinalizeWeakOp = (stgGroup, "finalizeWeak")
mkRtsPrimOp AtomicModifyMutVarOp = (ioGroup, "atomicModifyMutVar")
mkRtsPrimOp CasMutVarOp = (ioGroup, "casMutVar")
mkRtsPrimOp GetSparkOp = (parGroup, "getSpark")
mkRtsPrimOp NumSparks = (parGroup, "numSparks")
mkRtsPrimOp NewBCOOp = (interpGroup, "newBCO")
mkRtsPrimOp TraceEventOp = (concGroup, "traceEvent")
mkRtsPrimOp primop = pprPanic "mkRtsPrimOp: unimplemented!" (ppr primop)
cgPrimOp :: PrimOp -- the op
-> [StgArg] -- arguments
-> CodeGen [Code]
cgPrimOp op args = do
argExprs <- getNonVoidArgCodes args
emitPrimOp op argExprs
-- emitPrimOp :: [CgLoc] -- where to put the results
-- -> PrimOp -- the op
-- -> [Code] -- arguments
-- -> CodeGen ()
emitPrimOp :: PrimOp -> [Code] -> CodeGen [Code]
emitPrimOp IndexOffAddrOp_Char [arg1, arg2]
= return [ arg1
<> arg2
<> invokevirtual (mkMethodRef jstringC "charAt"
[jint] (ret jchar))]
-- TODO: You may have to cast to int or do some extra stuff here
-- or maybe instead reference the direct byte array
emitPrimOp DataToTagOp [arg] = return [ getTagMethod arg <> iconst jint 1 <> isub ]
emitPrimOp IntQuotRemOp args = do
codes1 <- emitPrimOp IntQuotOp args
codes2 <- emitPrimOp IntRemOp args
return $ codes1 ++ codes2
emitPrimOp WordQuotRemOp args = do
codes1 <- emitPrimOp WordQuotOp args
codes2 <- emitPrimOp WordRemOp args
return $ codes1 ++ codes2
emitPrimOp IntAddCOp [arg1, arg2] = do
tmp <- newTemp False jint
emit $ storeLoc tmp (arg1 <> arg2 <> iadd)
let sum = loadLoc tmp
return $ [ sum
, (arg1 <> sum <> ixor)
<> (arg2 <> sum <> ixor)
<> iand
<> inot
]
emitPrimOp IntSubCOp [arg1, arg2] = do
tmp <- newTemp False jint
emit $ storeLoc tmp (arg1 <> arg2 <> isub)
let diff = loadLoc tmp
return $ [ diff
, (arg1 <> arg2 <> ixor)
<> (arg1 <> diff <> ixor)
<> iand
<> inot
]
emitPrimOp IntMulMayOfloOp [arg1, arg2] = do
tmp <- newTemp False jlong
emit $ storeLoc tmp ( (arg1 <> gconv jint jlong)
<> (arg2 <> gconv jint jlong)
<> lmul )
let mul = loadLoc tmp
return $ [ mul
<> gconv jlong jint
<> gconv jint jlong
<> mul
<> lcmp
]
-- Spark Ops
emitPrimOp SparkOp [arg] = do
tmp <- newTemp True closureType
emit $ storeLoc tmp arg
let loadArg = loadLoc tmp
return [ loadContext
<> contextMyCapability
<> loadArg
<> invokevirtual (mkMethodRef capability "newSpark" [closureType]
(ret jbool))
<> pop jbool
<> loadArg ]
emitPrimOp op [arg]
| nopOp op = return [arg]
emitPrimOp op args
| Just execute <- simpleOp op
= return [execute args]
emitPrimOp op _ = pprPanic "emitPrimOp: unimplemented" (ppr op)
nopOp :: PrimOp -> Bool
nopOp Int2WordOp = True
nopOp Word2IntOp = True
nopOp OrdOp = True
nopOp ChrOp = True
nopOp Int642Word64 = True
nopOp Word642Int64 = True
nopOp JBool2IntOp = True
nopOp _ = False
normalOp :: Code -> [Code] -> Code
normalOp code args = fold args <> code
idOp :: [Code] -> Code
idOp = normalOp mempty
intCompOp :: (Code -> Code -> Code) -> [Code] -> Code
intCompOp op args = flip normalOp args $ op (iconst jint 1) (iconst jint 0)
simpleOp :: PrimOp -> Maybe ([Code] -> Code)
simpleOp MyThreadIdOp = Just $ normalOp $ loadContext <> currentTSOField
-- Array# & MutableArray# ops
simpleOp UnsafeFreezeArrayOp = Just idOp
simpleOp SameMutableArrayOp = Just $ intCompOp if_acmpeq
simpleOp SizeofArrayOp = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "size" [] (ret jint))
simpleOp SizeofMutableArrayOp = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "size" [] (ret jint))
-- TODO: Inline the get/set's
simpleOp WriteArrayOp = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "set" [jint, closureType] void
simpleOp ReadArrayOp = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "get" [jint] (ret closureType)
simpleOp IndexArrayOp = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "get" [jint] (ret closureType)
-- SmallArray# & SmallMutableArray# ops
simpleOp UnsafeFreezeSmallArrayOp = Just idOp
simpleOp SameSmallMutableArrayOp = Just $ intCompOp if_acmpeq
simpleOp SizeofSmallArrayOp = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "size" [] (ret jint))
simpleOp SizeofSmallMutableArrayOp = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "size" [] (ret jint))
-- TODO: Inline the get/set's
simpleOp WriteSmallArrayOp = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "set" [jint, closureType] void
simpleOp ReadSmallArrayOp = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "get" [jint] (ret closureType)
simpleOp IndexSmallArrayOp = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "get" [jint] (ret closureType)
-- ArrayArray# & MutableArrayArray# ops
simpleOp UnsafeFreezeArrayArrayOp = Just idOp
simpleOp SameMutableArrayArrayOp = Just $ intCompOp if_acmpeq
simpleOp SizeofArrayArrayOp = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "size" [] (ret jint))
simpleOp SizeofMutableArrayArrayOp = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "size" [] (ret jint))
-- TODO: Inline the get/set's
simpleOp IndexArrayArrayOp_ByteArray = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "get" [jint] (ret closureType))
<> gconv closureType stgByteArrayType
simpleOp IndexArrayArrayOp_ArrayArray = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "get" [jint] (ret closureType))
<> gconv closureType stgArrayType
simpleOp ReadArrayArrayOp_ByteArray = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "get" [jint] (ret closureType))
<> gconv closureType stgByteArrayType
simpleOp ReadArrayArrayOp_MutableByteArray = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "get" [jint] (ret closureType))
<> gconv closureType stgByteArrayType
simpleOp ReadArrayArrayOp_ArrayArray = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "get" [jint] (ret closureType))
<> gconv closureType stgArrayType
simpleOp ReadArrayArrayOp_MutableArrayArray = Just $
normalOp $ invokevirtual (mkMethodRef stgArray "get" [jint] (ret closureType))
<> gconv closureType stgArrayType
simpleOp WriteArrayArrayOp_ByteArray = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "set" [jint, closureType] void
simpleOp WriteArrayArrayOp_MutableByteArray = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "set" [jint, closureType] void
simpleOp WriteArrayArrayOp_ArrayArray = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "set" [jint, closureType] void
simpleOp WriteArrayArrayOp_MutableArrayArray = Just $
normalOp $ invokevirtual
$ mkMethodRef stgArray "set" [jint, closureType] void
-- ByteArray# & MutableByteArray# ops
simpleOp ByteArrayContents_Char = Just $ normalOp byteArrayBuf
simpleOp UnsafeFreezeByteArrayOp = Just idOp
simpleOp IndexByteArrayOp_Char = Just $ byteArrayIndexOp jbyte preserveByte
simpleOp IndexByteArrayOp_WideChar = Just $ byteArrayIndexOp jint mempty
simpleOp IndexByteArrayOp_Int = Just $ byteArrayIndexOp jint mempty
simpleOp IndexByteArrayOp_Word = Just $ byteArrayIndexOp jint mempty
simpleOp IndexByteArrayOp_Addr = Just $ byteArrayIndexOp jlong mempty
simpleOp IndexByteArrayOp_Float = Just $ byteArrayIndexOp jfloat mempty
simpleOp IndexByteArrayOp_Double = Just $ byteArrayIndexOp jdouble mempty
simpleOp IndexByteArrayOp_StablePtr = Just $ byteArrayIndexOp jint mempty
simpleOp IndexByteArrayOp_Int8 = Just $ byteArrayIndexOp jbyte preserveByte
simpleOp IndexByteArrayOp_Int16 = Just $ byteArrayIndexOp jshort preserveShort
simpleOp IndexByteArrayOp_Int32 = Just $ byteArrayIndexOp jint mempty
simpleOp IndexByteArrayOp_Int64 = Just $ byteArrayIndexOp jlong mempty
simpleOp IndexByteArrayOp_Word8 = Just $ byteArrayIndexOp jbyte preserveByte
simpleOp IndexByteArrayOp_Word16 = Just $ byteArrayIndexOp jshort preserveShort
simpleOp IndexByteArrayOp_Word32 = Just $ byteArrayIndexOp jint mempty
simpleOp IndexByteArrayOp_Word64 = Just $ byteArrayIndexOp jlong mempty
simpleOp ReadByteArrayOp_Char = Just $ byteArrayIndexOp jbyte preserveByte
simpleOp ReadByteArrayOp_WideChar = Just $ byteArrayIndexOp jint mempty
simpleOp ReadByteArrayOp_Int = Just $ byteArrayIndexOp jint mempty
simpleOp ReadByteArrayOp_Word = Just $ byteArrayIndexOp jint mempty
simpleOp ReadByteArrayOp_Addr = Just $ byteArrayIndexOp jlong mempty
simpleOp ReadByteArrayOp_Float = Just $ byteArrayIndexOp jfloat mempty
simpleOp ReadByteArrayOp_Double = Just $ byteArrayIndexOp jdouble mempty
simpleOp ReadByteArrayOp_StablePtr = Just $ byteArrayIndexOp jint mempty
simpleOp ReadByteArrayOp_Int8 = Just $ byteArrayIndexOp jbyte preserveByte
simpleOp ReadByteArrayOp_Int16 = Just $ byteArrayIndexOp jshort preserveShort
simpleOp ReadByteArrayOp_Int32 = Just $ byteArrayIndexOp jint mempty
simpleOp ReadByteArrayOp_Int64 = Just $ byteArrayIndexOp jlong mempty
simpleOp ReadByteArrayOp_Word8 = Just $ byteArrayIndexOp jbyte preserveByte
simpleOp ReadByteArrayOp_Word16 = Just $ byteArrayIndexOp jshort preserveShort
simpleOp ReadByteArrayOp_Word32 = Just $ byteArrayIndexOp jint mempty
simpleOp ReadByteArrayOp_Word64 = Just $ byteArrayIndexOp jlong mempty
simpleOp WriteByteArrayOp_Char = Just $ byteArrayWriteOp jbyte mempty
simpleOp WriteByteArrayOp_WideChar = Just $ byteArrayWriteOp jint mempty
simpleOp WriteByteArrayOp_Int = Just $ byteArrayWriteOp jint mempty
simpleOp WriteByteArrayOp_Word = Just $ byteArrayWriteOp jint mempty
simpleOp WriteByteArrayOp_Addr = Just $ byteArrayWriteOp jlong mempty
simpleOp WriteByteArrayOp_Float = Just $ byteArrayWriteOp jfloat mempty
simpleOp WriteByteArrayOp_Double = Just $ byteArrayWriteOp jdouble mempty
-- TODO: Verify writes for Word/Int 8/16 - add additional casts?
simpleOp WriteByteArrayOp_StablePtr = Just $ byteArrayWriteOp jint mempty
simpleOp WriteByteArrayOp_Int8 = Just $ byteArrayWriteOp jbyte preserveByte
simpleOp WriteByteArrayOp_Int16 = Just $ byteArrayWriteOp jshort preserveShort
simpleOp WriteByteArrayOp_Int32 = Just $ byteArrayWriteOp jint mempty
simpleOp WriteByteArrayOp_Int64 = Just $ byteArrayWriteOp jlong mempty
simpleOp WriteByteArrayOp_Word8 = Just $ byteArrayWriteOp jbyte preserveByte
simpleOp WriteByteArrayOp_Word16 = Just $ byteArrayWriteOp jshort preserveShort
simpleOp WriteByteArrayOp_Word32 = Just $ byteArrayWriteOp jint mempty
simpleOp WriteByteArrayOp_Word64 = Just $ byteArrayWriteOp jlong mempty
-- Int# ops
simpleOp IntAddOp = Just $ normalOp iadd
simpleOp IntSubOp = Just $ normalOp isub
simpleOp IntMulOp = Just $ normalOp imul
simpleOp IntQuotOp = Just $ normalOp idiv
simpleOp IntRemOp = Just $ normalOp irem
simpleOp AndIOp = Just $ normalOp iand
simpleOp OrIOp = Just $ normalOp ior
simpleOp XorIOp = Just $ normalOp ixor
simpleOp NotIOp = Just $ normalOp inot
simpleOp ISllOp = Just $ normalOp ishl
simpleOp ISraOp = Just $ normalOp ishr
simpleOp ISrlOp = Just $ normalOp iushr
simpleOp IntNegOp = Just $ normalOp ineg
simpleOp IntEqOp = Just $ intCompOp if_icmpeq
simpleOp IntNeOp = Just $ intCompOp if_icmpne
simpleOp IntLeOp = Just $ intCompOp if_icmple
simpleOp IntLtOp = Just $ intCompOp if_icmplt
simpleOp IntGeOp = Just $ intCompOp if_icmpge
simpleOp IntGtOp = Just $ intCompOp if_icmpgt
-- Word# ops
-- TODO: Take a look at compareUnsigned in JDK 8
-- and see if that's more efficient
simpleOp WordEqOp = Just $ intCompOp if_icmpeq
simpleOp WordNeOp = Just $ intCompOp if_icmpeq
simpleOp WordAddOp = Just $ normalOp iadd
simpleOp WordSubOp = Just $ normalOp isub
simpleOp WordMulOp = Just $ normalOp imul
simpleOp WordQuotOp = Just $ unsignedOp ldiv
simpleOp WordRemOp = Just $ unsignedOp lrem
simpleOp WordGtOp = Just $ unsignedCmp ifgt
simpleOp WordGeOp = Just $ unsignedCmp ifge
simpleOp WordLeOp = Just $ unsignedCmp ifle
simpleOp WordLtOp = Just $ unsignedCmp iflt
--Verify true for unsigned operations
simpleOp AndOp = Just $ normalOp iand
simpleOp OrOp = Just $ normalOp ior
simpleOp XorOp = Just $ normalOp ixor
simpleOp NotOp = Just $ normalOp inot
simpleOp SllOp = Just $ normalOp ishl
simpleOp SrlOp = Just $ normalOp iushr
-- Char# ops
simpleOp CharEqOp = Just $ intCompOp if_icmpeq
simpleOp CharNeOp = Just $ intCompOp if_icmpne
simpleOp CharGtOp = Just $ unsignedCmp ifgt
simpleOp CharGeOp = Just $ unsignedCmp ifge
simpleOp CharLeOp = Just $ unsignedCmp ifle
simpleOp CharLtOp = Just $ unsignedCmp iflt
-- Double# ops
simpleOp DoubleEqOp = Just $ typedCmp jdouble ifeq
simpleOp DoubleNeOp = Just $ typedCmp jdouble ifne
simpleOp DoubleGeOp = Just $ typedCmp jdouble ifge
simpleOp DoubleLeOp = Just $ typedCmp jdouble ifle
simpleOp DoubleGtOp = Just $ typedCmp jdouble ifgt
simpleOp DoubleLtOp = Just $ typedCmp jdouble iflt
simpleOp DoubleAddOp = Just $ normalOp dadd
simpleOp DoubleSubOp = Just $ normalOp dsub
simpleOp DoubleMulOp = Just $ normalOp dmul
simpleOp DoubleDivOp = Just $ normalOp ddiv
simpleOp DoubleNegOp = Just $ normalOp dneg
simpleOp DoubleExpOp = Just $ normalOp $ doubleMathEndoOp "exp"
simpleOp DoubleLogOp = Just $ normalOp $ doubleMathEndoOp "log"
simpleOp DoubleSqrtOp = Just $ normalOp $ doubleMathEndoOp "sqrt"
simpleOp DoubleSinOp = Just $ normalOp $ doubleMathEndoOp "sin"
simpleOp DoubleCosOp = Just $ normalOp $ doubleMathEndoOp "cos"
simpleOp DoubleTanOp = Just $ normalOp $ doubleMathEndoOp "tan"
simpleOp DoubleAsinOp = Just $ normalOp $ doubleMathEndoOp "asin"
simpleOp DoubleAcosOp = Just $ normalOp $ doubleMathEndoOp "acos"
simpleOp DoubleAtanOp = Just $ normalOp $ doubleMathEndoOp "atan"
simpleOp DoubleSinhOp = Just $ normalOp $ doubleMathEndoOp "sinh"
simpleOp DoubleCoshOp = Just $ normalOp $ doubleMathEndoOp "cosh"
simpleOp DoubleTanhOp = Just $ normalOp $ doubleMathEndoOp "tanh"
simpleOp DoublePowerOp = Just $ normalOp $ doubleMathOp "pow" [jdouble, jdouble] jdouble
-- Float# ops
simpleOp FloatEqOp = Just $ typedCmp jfloat ifeq
simpleOp FloatNeOp = Just $ typedCmp jfloat ifne
simpleOp FloatGeOp = Just $ typedCmp jfloat ifge
simpleOp FloatLeOp = Just $ typedCmp jfloat ifle
simpleOp FloatGtOp = Just $ typedCmp jfloat ifgt
simpleOp FloatLtOp = Just $ typedCmp jfloat iflt
simpleOp FloatAddOp = Just $ normalOp fadd
simpleOp FloatSubOp = Just $ normalOp fsub
simpleOp FloatMulOp = Just $ normalOp fmul
simpleOp FloatDivOp = Just $ normalOp fdiv
simpleOp FloatNegOp = Just $ normalOp fneg
simpleOp FloatExpOp = Just $ normalOp $ floatMathEndoOp "exp"
simpleOp FloatLogOp = Just $ normalOp $ floatMathEndoOp "log"
simpleOp FloatSqrtOp = Just $ normalOp $ floatMathEndoOp "sqrt"
simpleOp FloatSinOp = Just $ normalOp $ floatMathEndoOp "sin"
simpleOp FloatCosOp = Just $ normalOp $ floatMathEndoOp "cos"
simpleOp FloatTanOp = Just $ normalOp $ floatMathEndoOp "tan"
simpleOp FloatAsinOp = Just $ normalOp $ floatMathEndoOp "asin"
simpleOp FloatAcosOp = Just $ normalOp $ floatMathEndoOp "acos"
simpleOp FloatAtanOp = Just $ normalOp $ floatMathEndoOp "atan"
simpleOp FloatSinhOp = Just $ normalOp $ floatMathEndoOp "sinh"
simpleOp FloatCoshOp = Just $ normalOp $ floatMathEndoOp "cosh"
simpleOp FloatTanhOp = Just $ normalOp $ floatMathEndoOp "tanh"
simpleOp FloatPowerOp = Just $ \[arg1, arg2] ->
(arg1 <> gconv jfloat jdouble)
<> (arg2 <> floatMathOp "pow" [jdouble, jdouble] jdouble)
-- Conversions
simpleOp Int2DoubleOp = Just $ normalOp $ gconv jint jdouble
simpleOp Double2IntOp = Just $ normalOp $ gconv jdouble jint
simpleOp Int2FloatOp = Just $ normalOp $ gconv jint jfloat
simpleOp Float2IntOp = Just $ normalOp $ gconv jfloat jint
simpleOp Float2DoubleOp = Just $ normalOp $ gconv jfloat jdouble
simpleOp Double2FloatOp = Just $ normalOp $ gconv jdouble jfloat
simpleOp Word2FloatOp = Just $ normalOp $ unsignedExtend mempty <> gconv jlong jfloat
simpleOp Word2DoubleOp = Just $ normalOp $ unsignedExtend mempty <> gconv jlong jdouble
simpleOp Word64Eq = Just $ typedCmp jlong ifeq
simpleOp Word64Ne = Just $ typedCmp jlong ifne
simpleOp Word64Lt = Just $ unsignedLongCmp iflt
simpleOp Word64Le = Just $ unsignedLongCmp ifle
simpleOp Word64Gt = Just $ unsignedLongCmp ifgt
simpleOp Word64Ge = Just $ unsignedLongCmp ifge
simpleOp Word64Quot = Just $
normalOp $ invokestatic $ mkMethodRef rtsUnsigned "divideUnsigned" [jlong, jlong] (ret jlong)
simpleOp Word64Rem = Just $
normalOp $ invokestatic $ mkMethodRef rtsUnsigned "remainderUnsigned" [jlong, jlong] (ret jlong)
simpleOp Word64And = Just $ normalOp land
simpleOp Word64Or = Just $ normalOp lor
simpleOp Word64Xor = Just $ normalOp lxor
simpleOp Word64Not = Just $ normalOp lnot
simpleOp Word64SllOp = Just $ normalOp lshl
simpleOp Word64SrlOp = Just $ normalOp lushr
-- TODO: Check if these operations are optimal
simpleOp PopCntOp = Just $ normalOp $ popCntOp
simpleOp PopCnt8Op = Just $ normalOp $ preserveByte <> popCntOp
simpleOp PopCnt16Op = Just $ normalOp $ preserveShort <> popCntOp
simpleOp PopCnt32Op = Just $ normalOp $ popCntOp
simpleOp PopCnt64Op = Just $ normalOp $
invokestatic $ mkMethodRef "java/lang/Long" "bitCount" [jlong] (ret jint)
simpleOp ClzOp = Just $ normalOp $ clzOp
simpleOp Clz8Op = Just $ normalOp $ preserveByte <> clzOp <> iconst jint 24 <> isub
simpleOp Clz16Op = Just $ normalOp $ preserveShort <> clzOp <> iconst jint 16 <> isub
simpleOp Clz32Op = Just $ normalOp $ clzOp
simpleOp Clz64Op = Just $ normalOp $
invokestatic $ mkMethodRef "java/lang/Long" "numberOfLeadingZeros" [jlong] (ret jint)
simpleOp CtzOp = Just $ normalOp $ ctzOp
simpleOp Ctz8Op = Just $ normalOp $ iconst jint 0x100 <> ior <> ctzOp
simpleOp Ctz16Op = Just $ normalOp $ iconst jint 0x10000 <> ior <> ctzOp
simpleOp Ctz32Op = Just $ normalOp $ ctzOp
simpleOp Ctz64Op = Just $ normalOp $
invokestatic $ mkMethodRef "java/lang/Long" "numberOfTrailingZeros" [jlong] (ret jint)
-- TODO: Verify all the BSwap operations
-- TODO: Is this correct?
simpleOp BSwap16Op = Just $ normalOp $
gconv jint jshort
<> invokestatic (mkMethodRef "java/lang/Short" "reverseBytes" [jshort] (ret jshort))
<> gconv jshort jint
simpleOp BSwap32Op = Just $ normalOp $
invokestatic (mkMethodRef "java/lang/Integer" "reverseBytes" [jint] (ret jint))
simpleOp BSwap64Op = Just $ normalOp $
invokestatic (mkMethodRef "java/lang/Long" "reverseBytes" [jlong] (ret jlong))
simpleOp BSwapOp = Just $ normalOp $
invokestatic (mkMethodRef "java/lang/Integer" "reverseBytes" [jint] (ret jint))
simpleOp Int64Eq = Just $ typedCmp jlong ifeq
simpleOp Int64Ne = Just $ typedCmp jlong ifne
simpleOp Int64Lt = Just $ typedCmp jlong iflt
simpleOp Int64Le = Just $ typedCmp jlong ifle
simpleOp Int64Gt = Just $ typedCmp jlong ifgt
simpleOp Int64Ge = Just $ typedCmp jlong ifge
simpleOp Int64Quot = Just $ normalOp ldiv
simpleOp Int64Rem = Just $ normalOp lrem
simpleOp Int64Add = Just $ normalOp ladd
simpleOp Int64Sub = Just $ normalOp lsub
simpleOp Int64Mul = Just $ normalOp lmul
simpleOp Int64Neg = Just $ normalOp lneg
simpleOp Int64SllOp = Just $ normalOp lshl
simpleOp Int64SraOp = Just $ normalOp lshr
simpleOp Int64SrlOp = Just $ normalOp lushr
simpleOp Int2Int64 = Just $ normalOp $ gconv jint jlong
simpleOp Int642Int = Just $ normalOp $ gconv jlong jint
simpleOp Word2Word64 = Just $ unsignedExtend . head
-- TODO: Right conversion?
simpleOp Word64ToWord = Just $ normalOp $ gconv jlong jint
simpleOp DecodeDoubleInteger = Just $ normalOp $ gconv jlong jint
simpleOp IndexJByteArrayOp = Just $ normalOp $ gaload jbyte
simpleOp ReadJByteArrayOp = Just $ normalOp $ gaload jbyte
simpleOp WriteJByteArrayOp = Just $ normalOp $ gastore jbyte
simpleOp NewJByteArrayOp = Just $ normalOp $ new (jarray jbyte)
simpleOp NewJBooleanArrayOp = Just $ normalOp $ new (jarray jbool)
simpleOp ReadJBooleanArrayOp = Just $ normalOp $ gaload jbool
simpleOp WriteJBooleanArrayOp = Just $ normalOp $ gastore jbool
simpleOp NewJCharArrayOp = Just $ normalOp $ new (jarray jchar)
simpleOp ReadJCharArrayOp = Just $ normalOp $ gaload jchar
simpleOp WriteJCharArrayOp = Just $ normalOp $ gastore jchar
simpleOp NewJShortArrayOp = Just $ normalOp $ new (jarray jshort)
simpleOp ReadJShortArrayOp = Just $ normalOp $ gaload jshort
simpleOp WriteJShortArrayOp = Just $ normalOp $ gastore jshort
simpleOp NewJIntArrayOp = Just $ normalOp $ new (jarray jint)
simpleOp ReadJIntArrayOp = Just $ normalOp $ gaload jint
simpleOp WriteJIntArrayOp = Just $ normalOp $ gastore jint
simpleOp NewJLongArrayOp = Just $ normalOp $ new (jarray jlong)
simpleOp ReadJLongArrayOp = Just $ normalOp $ gaload jlong
simpleOp WriteJLongArrayOp = Just $ normalOp $ gastore jlong
simpleOp NewJFloatArrayOp = Just $ normalOp $ new (jarray jfloat)
simpleOp ReadJFloatArrayOp = Just $ normalOp $ gaload jfloat
simpleOp WriteJFloatArrayOp = Just $ normalOp $ gastore jfloat
simpleOp NewJDoubleArrayOp = Just $ normalOp $ new (jarray jdouble)
simpleOp ReadJDoubleArrayOp = Just $ normalOp $ gaload jdouble
simpleOp WriteJDoubleArrayOp = Just $ normalOp $ gastore jdouble
-- TODO: Take care of converting the StackMapTable as well
simpleOp Int2JBoolOp = Just idOp
simpleOp JByte2CharOp = Just $ normalOp preserveByte
simpleOp JByte2IntOp = Just idOp
simpleOp Int2JByteOp = Just $ normalOp $ gconv jint jbyte
simpleOp JShort2IntOp = Just idOp
simpleOp Int2JShortOp = Just $ normalOp $ gconv jint jshort
simpleOp JChar2WordOp = Just $ normalOp preserveShort
simpleOp Word2JCharOp = Just $ normalOp $ gconv jint jchar
-- MutVar ops
simpleOp ReadMutVarOp = Just $ normalOp mutVarValue
simpleOp WriteMutVarOp = Just $ normalOp mutVarSetValue
simpleOp SameMutVarOp = Just $ intCompOp if_acmpeq
-- Addr# ops
-- WARNING: Addr2IntOp and Int2AddrOp are unsafe in the sense that allocating more
-- than 2GB will disable this from being addressable.
simpleOp Addr2IntOp = Just $ normalOp $ gconv jlong jint
simpleOp Int2AddrOp = Just $ normalOp $ gconv jint jlong
simpleOp Addr2Int64Op = Just idOp
simpleOp Int642AddrOp = Just idOp
simpleOp AddrAddOp = Just $ \[addr, n] ->
addr <> n <> gconv jint jlong <> ladd
simpleOp AddrSubOp = Just $ normalOp (lsub <> gconv jlong jint)
-- TODO: Is this the right implementation?
simpleOp AddrRemOp = Just $ \[addr, n] ->
addr <> gconv jlong jint <> n <> irem
simpleOp AddrGtOp = Just $ typedCmp jlong ifgt
simpleOp AddrGeOp = Just $ typedCmp jlong ifge
simpleOp AddrEqOp = Just $ typedCmp jlong ifeq
simpleOp AddrNeOp = Just $ typedCmp jlong ifne
simpleOp AddrLtOp = Just $ typedCmp jlong iflt
simpleOp AddrLeOp = Just $ typedCmp jlong ifle
simpleOp IndexOffAddrOp_Char = Just $ addrIndexOp jbyte preserveByte
simpleOp IndexOffAddrOp_WideChar = Just $ addrIndexOp jint mempty
simpleOp IndexOffAddrOp_Int = Just $ addrIndexOp jint mempty
simpleOp IndexOffAddrOp_Word = Just $ addrIndexOp jint mempty
simpleOp IndexOffAddrOp_Addr = Just $ addrIndexOp jlong mempty
simpleOp IndexOffAddrOp_Float = Just $ addrIndexOp jfloat mempty
simpleOp IndexOffAddrOp_Double = Just $ addrIndexOp jdouble mempty
simpleOp IndexOffAddrOp_StablePtr = Just $ addrIndexOp jint mempty
simpleOp IndexOffAddrOp_Int8 = Just $ addrIndexOp jbyte preserveByte
simpleOp IndexOffAddrOp_Int16 = Just $ addrIndexOp jshort preserveShort
simpleOp IndexOffAddrOp_Int32 = Just $ addrIndexOp jint mempty
simpleOp IndexOffAddrOp_Int64 = Just $ addrIndexOp jlong mempty
simpleOp IndexOffAddrOp_Word8 = Just $ addrIndexOp jbyte preserveByte
simpleOp IndexOffAddrOp_Word16 = Just $ addrIndexOp jshort preserveShort
simpleOp IndexOffAddrOp_Word32 = Just $ addrIndexOp jint mempty
simpleOp IndexOffAddrOp_Word64 = Just $ addrIndexOp jlong mempty
simpleOp ReadOffAddrOp_Char = Just $ addrIndexOp jbyte preserveByte
simpleOp ReadOffAddrOp_WideChar = Just $ addrIndexOp jint mempty
simpleOp ReadOffAddrOp_Int = Just $ addrIndexOp jint mempty
simpleOp ReadOffAddrOp_Word = Just $ addrIndexOp jint mempty
simpleOp ReadOffAddrOp_Addr = Just $ addrIndexOp jlong mempty
simpleOp ReadOffAddrOp_Float = Just $ addrIndexOp jfloat mempty
simpleOp ReadOffAddrOp_Double = Just $ addrIndexOp jdouble mempty
simpleOp ReadOffAddrOp_StablePtr = Just $ addrIndexOp jint mempty
simpleOp ReadOffAddrOp_Int8 = Just $ addrIndexOp jbyte preserveByte
simpleOp ReadOffAddrOp_Int16 = Just $ addrIndexOp jshort preserveShort
simpleOp ReadOffAddrOp_Int32 = Just $ addrIndexOp jint mempty
simpleOp ReadOffAddrOp_Int64 = Just $ addrIndexOp jlong mempty
simpleOp ReadOffAddrOp_Word8 = Just $ addrIndexOp jbyte preserveByte
simpleOp ReadOffAddrOp_Word16 = Just $ addrIndexOp jshort preserveShort
simpleOp ReadOffAddrOp_Word32 = Just $ addrIndexOp jint mempty
simpleOp ReadOffAddrOp_Word64 = Just $ addrIndexOp jlong mempty
simpleOp WriteOffAddrOp_Char = Just $ addrWriteOp jbyte mempty
simpleOp WriteOffAddrOp_WideChar = Just $ addrWriteOp jint mempty
simpleOp WriteOffAddrOp_Int = Just $ addrWriteOp jint mempty
simpleOp WriteOffAddrOp_Word = Just $ addrWriteOp jint mempty
simpleOp WriteOffAddrOp_Addr = Just $ addrWriteOp jlong mempty
simpleOp WriteOffAddrOp_Float = Just $ addrWriteOp jfloat mempty
simpleOp WriteOffAddrOp_Double = Just $ addrWriteOp jdouble mempty
-- TODO: Verify writes for Word/Int 8/16 - add additional casts?
simpleOp WriteOffAddrOp_StablePtr = Just $ addrWriteOp jint mempty
simpleOp WriteOffAddrOp_Int8 = Just $ addrWriteOp jbyte preserveByte
simpleOp WriteOffAddrOp_Int16 = Just $ addrWriteOp jshort preserveShort
simpleOp WriteOffAddrOp_Int32 = Just $ addrWriteOp jint mempty
simpleOp WriteOffAddrOp_Int64 = Just $ addrWriteOp jlong mempty
simpleOp WriteOffAddrOp_Word8 = Just $ addrWriteOp jbyte preserveByte
simpleOp WriteOffAddrOp_Word16 = Just $ addrWriteOp jshort preserveShort
simpleOp WriteOffAddrOp_Word32 = Just $ addrWriteOp jint mempty
simpleOp WriteOffAddrOp_Word64 = Just $ addrWriteOp jlong mempty
-- TODO: Verify that narrowing / preserving are compatible with GHC
-- Narrowing ops
simpleOp Narrow8IntOp = Just $ normalOp $ preserveByte
simpleOp Narrow16IntOp = Just $ normalOp $ preserveShort
simpleOp Narrow32IntOp = Just idOp
simpleOp Narrow8WordOp = Just $ normalOp $ preserveByte
simpleOp Narrow16WordOp = Just $ normalOp $ preserveShort
simpleOp Narrow32WordOp = Just idOp
-- Misc
simpleOp SameTVarOp = Just $ intCompOp if_acmpeq
simpleOp SameMVarOp = Just $ intCompOp if_acmpeq
simpleOp EqStablePtrOp = Just $ intCompOp if_icmpeq
simpleOp EqStableNameOp = Just $ intCompOp if_icmpeq
simpleOp SameMutableByteArrayOp = Just $ intCompOp if_acmpeq
simpleOp ReallyUnsafePtrEqualityOp = Just $ intCompOp if_acmpeq
simpleOp StableNameToIntOp = Just idOp
simpleOp TouchOp = Just $ const mempty
simpleOp CopyAddrToByteArrayOp = Just $ normalOp $
invokestatic $ mkMethodRef stgByteArray "copyAddrToByteArray"
[jlong, stgByteArrayType, jint, jint] void
simpleOp CopyMutableByteArrayToAddrOp = Just $ normalOp $
invokestatic $ mkMethodRef stgByteArray "copyByteArrayToAddr"
[stgByteArrayType, jint, jlong, jint] void
simpleOp CopyByteArrayToAddrOp = Just $ normalOp $
invokestatic $ mkMethodRef stgByteArray "copyByteArrayToAddr"
[stgByteArrayType, jint, jlong, jint] void
simpleOp CopyByteArrayOp = Just $ normalOp $
invokestatic $ mkMethodRef stgByteArray "copyByteArray"
[stgByteArrayType, jint, stgByteArrayType, jint, jint] void
simpleOp CopyMutableByteArrayOp = Just $ normalOp $
invokestatic $ mkMethodRef stgByteArray "copyByteArray"
[stgByteArrayType, jint, stgByteArrayType, jint, jint] void
simpleOp StablePtr2AddrOp = Just $ normalOp $ gconv jint jlong
simpleOp Addr2StablePtrOp = Just $ normalOp $ gconv jlong jint
simpleOp SizeofMutableByteArrayOp = Just $ normalOp byteArraySize
simpleOp GetSizeofMutableByteArrayOp = Just $ normalOp byteArraySize
simpleOp SizeofByteArrayOp = Just $ normalOp byteArraySize
-- Sparks
-- TODO: Implement
simpleOp ParOp = Just $ \_ -> iconst jint 0
simpleOp IsNullObjectOp = Just $ \[o] -> o <> ifnull (iconst jint 1) (iconst jint 0)
simpleOp _ = Nothing
popCntOp, clzOp, ctzOp :: Code
popCntOp = invokestatic $ mkMethodRef "java/lang/Integer" "bitCount" [jint] (ret jint)
clzOp = invokestatic $ mkMethodRef "java/lang/Integer" "numberOfLeadingZeros" [jint] (ret jint)
ctzOp = invokestatic $ mkMethodRef "java/lang/Integer" "numberOfTrailingZeros" [jint] (ret jint)
floatMathEndoOp :: Text -> Code
floatMathEndoOp f = gconv jfloat jdouble <> doubleMathEndoOp f <> gconv jdouble jfloat
floatMathOp :: Text -> [FieldType] -> FieldType -> Code
floatMathOp f args ret = gconv jfloat jdouble <> doubleMathOp f args ret <> gconv jdouble jfloat
doubleMathOp :: Text -> [FieldType] -> FieldType -> Code
doubleMathOp f args ret = invokestatic $ mkMethodRef "java/lang/Math" f args (Just ret)
doubleMathEndoOp :: Text -> Code
doubleMathEndoOp f = doubleMathOp f [jdouble] jdouble
indexMultiplier :: FieldType -> Code
indexMultiplier ft
| size == 1 = mempty
| otherwise = iconst jint (fromIntegral size)
<> imul
where size = fieldByteSize ft
addrIndexOp :: FieldType -> Code -> [Code] -> Code
addrIndexOp ft resCode = \[this, ix] ->
this
<> ix
<> indexMultiplier ft
<> gconv jint jlong
<> ladd
<> addressGet ft
<> resCode
addrWriteOp :: FieldType -> Code -> [Code] -> Code
addrWriteOp ft argCode = \[this, ix, val] ->
this
<> ix
<> indexMultiplier ft
<> gconv jint jlong
<> ladd
<> val
<> argCode
<> addressPut ft
byteArrayIndexOp :: FieldType -> Code -> [Code] -> Code
byteArrayIndexOp ft resCode = \[this, ix] ->
addrIndexOp ft resCode [this <> byteArrayBuf, ix]
byteArrayWriteOp :: FieldType -> Code -> [Code] -> Code
byteArrayWriteOp ft argCode = \[this, ix, val] ->
addrWriteOp ft argCode [this <> byteArrayBuf, ix, val]
preserveByte :: Code
preserveByte = iconst jint 0xFF <> iand
preserveShort :: Code
preserveShort = iconst jint 0xFFFF <> iand
unsignedOp :: Code -> [Code] -> Code
unsignedOp op [arg1, arg2]
= unsignedExtend arg1
<> unsignedExtend arg2
<> op
<> gconv jlong jint
unsignedOp _ _ = error $ "unsignedOp: bad unsignedOp"
typedCmp :: FieldType -> (Code -> Code -> Code) -> [Code] -> Code
typedCmp ft ifop [arg1, arg2]
= gcmp ft arg1 arg2
<> ifop (iconst jint 1) (iconst jint 0)
typedCmp _ _ _ = error $ "typedCmp: bad typedCmp"
unsignedCmp :: (Code -> Code -> Code) -> [Code] -> Code
unsignedCmp ifop args
= typedCmp jlong ifop $ map unsignedExtend args
unsignedExtend :: Code -> Code
unsignedExtend i = i <> gconv jint jlong <> lconst 0xFFFFFFFF <> land
lONG_MIN_VALUE :: Code
lONG_MIN_VALUE = lconst (-9223372036854775808)
unsignedLongCmp :: (Code -> Code -> Code) -> [Code] -> Code
unsignedLongCmp ifop args
= typedCmp jlong ifop $ map addMin args
where addMin x = x <> lONG_MIN_VALUE <> ladd
| pparkkin/eta | compiler/ETA/CodeGen/Prim.hs | bsd-3-clause | 45,153 | 0 | 20 | 9,250 | 12,972 | 6,440 | 6,532 | 866 | 3 |
module Data.List.TypeLevel.Intersection where
import Data.Constraint
import Data.List.TypeLevel.Cmp
import Data.List.TypeLevel.Subtraction (Subtraction)
import qualified Data.List.TypeLevel.Subtraction as Subtraction
import Data.List.TypeLevel.Union (Union)
import Data.List.TypeLevel.Witness
import qualified Data.List.TypeLevel.Witness.BoundedList as BoundedList
import qualified Data.List.TypeLevel.Witness.OrdList as OrdList
import Data.Tuple.TypeLevel
import Data.Type.Equality
import Data.Vinyl.Core hiding (Dict)
import Data.Vinyl.DictFun (DictFun (..))
type Intersection as bs = Subtraction as (Subtraction as bs)
-- This function is unusual because it is left biased. Maybe not.
rec :: Rec CmpDict ls -> Rec CmpDict rs -> Rec f ls -> Rec f (Intersection ls rs)
rec lsCmp rsCmp ls = Subtraction.rec lsCmp (Subtraction.dict lsCmp rsCmp) ls
dict :: Rec CmpDict ls -> Rec CmpDict rs -> Rec CmpDict (Intersection ls rs)
dict ls rs = rec ls rs ls
commutativity :: Rec CmpDict as -> Rec CmpDict bs -> OrdList as -> OrdList bs
-> Intersection as bs :~: Intersection bs as
commutativity = go
where
go :: forall as bs. Rec CmpDict as -> Rec CmpDict bs -> OrdList as -> OrdList bs
-> Intersection as bs :~: Intersection bs as
go RNil RNil OrdListNil OrdListNil = Refl
go as RNil _ OrdListNil = case Subtraction.leftIdentity as of
Refl -> case Subtraction.rightIdentity as of
Refl -> case Subtraction.zeroIdentity as of
Refl -> Refl
go (a@DictFun :& asNext) (b@DictFun :& bsNext) asOrd bsOrd = let
asOrdNext = OrdList.tail asOrd
bsOrdNext = OrdList.tail bsOrd
in case compareTypes (proxyFst a) (proxyFst b) of
CmpEQ -> case eqTProxy (proxySnd a) (proxySnd b) of
Nothing -> error "intersection commutativity failure"
Just Refl -> case tupleEquality a b of
Sub Dict -> case go asNext bsNext asOrdNext bsOrdNext of
Refl -> case Subtraction.upperBound2 (OrdList.toBoundedList asOrd) (OrdList.toBoundedList bsOrd) asNext bsNext asOrdNext bsOrdNext of
(BoundedListCons,BoundedListCons) -> Refl
(BoundedListCons,BoundedListNil) -> Refl
(BoundedListNil,BoundedListCons) -> Refl
(BoundedListNil,BoundedListNil) -> Refl
CmpLT -> case go (a :& asNext) bsNext asOrd bsOrdNext of
Refl -> case selfEquality (proxyFst b) of
Refl -> Refl
CmpGT -> case go asNext (b :& bsNext) asOrdNext bsOrd of
Refl -> case selfEquality (proxyFst a) of
Refl -> Refl
-- proof :: BoundedList b as -> proxy1 bs
-- -> Subtraction as (b ': bs) :~: Subtraction as bs
-- proof b _ = case b of
-- BoundedListNil -> Refl
-- BoundedListCons -> Refl
| andrewthad/vinyl-vectors | src/Data/List/TypeLevel/Intersection.hs | bsd-3-clause | 2,890 | 0 | 27 | 751 | 815 | 429 | 386 | -1 | -1 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Parsec (module AP, Parser, LineParser) where
import Control.Applicative as AP hiding (many,optional,(<|>))
import qualified Data.Text.Lazy as L
import Text.Parsec as AP hiding (satisfy, ParseError, errorPos)
import Text.Parsec.Error as AP
type Parser = Parsec L.Text ()
type LineParser = Parsec [L.Text] ()
| kazu-yamamoto/piki | src/Parsec.hs | bsd-3-clause | 413 | 0 | 7 | 54 | 108 | 72 | 36 | 9 | 0 |
module Sudoku.Control.MoveSpec (main, spec) where
import Data.Set(singleton)
import Test.Hspec
import Sudoku.Control.Move
import Sudoku.Data.Board.Internal
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Move properties:" $ do
let set = Set "0" "1" "1"
let set2 = Set "3" "4" "5"
let check = Check
let erase = Erase "0" "1"
let erase2 = Erase "0" "4"
let reset = Reset
let quit = Quit
context "Show:" $ do
it "should work for Set" $ do
show set `shouldBe` "Set \"0\" \"1\" \"1\""
show set2 `shouldBe` "Set \"3\" \"4\" \"5\""
it "should work for Check" $
show check `shouldBe` "Check"
it "should work for Erase" $ do
show erase `shouldBe` "Erase \"0\" \"1\""
show erase2 `shouldBe` "Erase \"0\" \"4\""
it "should work for Reset" $
show reset `shouldBe` "Reset"
it "should work for Quit" $
show quit `shouldBe` "Quit"
context "Equality:" $ do
it "should hold between moves that are the same." $ do
set `shouldBe` set
check `shouldBe` check
erase `shouldBe` erase
reset `shouldBe` reset
quit `shouldBe` quit
it "should not hold between different kinds of moves." $ do
set `shouldNotBe` check
set `shouldNotBe` erase
set `shouldNotBe` reset
set `shouldNotBe` quit
check `shouldNotBe` erase
check `shouldNotBe` reset
check `shouldNotBe` quit
erase `shouldNotBe` reset
erase `shouldNotBe` quit
reset `shouldNotBe` quit
it "should not hold between moves with different data members." $ do
set `shouldNotBe` set2
erase `shouldNotBe` erase2
describe "MoveError properties:" $ do
let r = 100::Int
let rs = show r
let c = 100::Int
let cs = show c
let v = 100::Int
let vs = show v
let squares = singleton $ Loc 1 2
let ssquares = show squares
context "Show:" $ do
context "NaNError:" $
it "should display correctly." $
show (NaNError "q") `shouldBe` ("Value " ++ "q" ++ " is not a number.")
it "OutOfBoundsError should display correctly." $
show (OutOfBoundsError r c) `shouldBe` ("Square (" ++ rs ++ ", " ++ cs ++
") is out of bounds.")
it "InvalidValueError should display correctly." $
show (InvalidValueError v) `shouldBe` ("Value " ++ vs ++ " is invalid.")
it "InvalidBoardError should display correctly," $
show (InvalidBoardError squares) `shouldBe` ("Board is invalid. " ++
"Invalid squares: " ++ ssquares)
it "OtherError should display correctly." $
show (OtherError "fish") `shouldBe` ("General error: " ++ "fish")
it "QuitError." $
show QuitError `shouldBe` "Asked or required to quit!"
| andrewmichaud/JustSudoku | test/Sudoku/Control/MoveSpec.hs | bsd-3-clause | 3,362 | 0 | 18 | 1,353 | 830 | 408 | 422 | 76 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_NV_shader_subgroup_partitioned - device extension
--
-- == VK_NV_shader_subgroup_partitioned
--
-- [__Name String__]
-- @VK_NV_shader_subgroup_partitioned@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 199
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.1
--
-- [__Contact__]
--
-- - Jeff Bolz
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_NV_shader_subgroup_partitioned] @jeffbolznv%0A<<Here describe the issue or question you have about the VK_NV_shader_subgroup_partitioned extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2018-03-17
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/NV/SPV_NV_shader_subgroup_partitioned.html SPV_NV_shader_subgroup_partitioned>
--
-- - This extension provides API support for
-- <https://github.com/KhronosGroup/GLSL/blob/master/extensions/nv/GL_NV_shader_subgroup_partitioned.txt GL_NV_shader_subgroup_partitioned>
--
-- [__Contributors__]
--
-- - Jeff Bolz, NVIDIA
--
-- == Description
--
-- This extension enables support for a new class of
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#shaders-group-operations group operations>
-- on
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#shaders-scope-subgroup subgroups>
-- via the
-- <https://github.com/KhronosGroup/GLSL/blob/master/extensions/nv/GL_NV_shader_subgroup_partitioned.txt GL_NV_shader_subgroup_partitioned>
-- GLSL extension and
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/NV/SPV_NV_shader_subgroup_partitioned.html SPV_NV_shader_subgroup_partitioned>
-- SPIR-V extension. Support for these new operations is advertised via the
-- 'Vulkan.Core11.Enums.SubgroupFeatureFlagBits.SUBGROUP_FEATURE_PARTITIONED_BIT_NV'
-- bit.
--
-- This extension requires Vulkan 1.1, for general subgroup support.
--
-- == New Enum Constants
--
-- - 'NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME'
--
-- - 'NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core11.Enums.SubgroupFeatureFlagBits.SubgroupFeatureFlagBits':
--
-- - 'Vulkan.Core11.Enums.SubgroupFeatureFlagBits.SUBGROUP_FEATURE_PARTITIONED_BIT_NV'
--
-- == Version History
--
-- - Revision 1, 2018-03-17 (Jeff Bolz)
--
-- - Internal revisions
--
-- == See Also
--
-- No cross-references are available
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_NV_shader_subgroup_partitioned Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_NV_shader_subgroup_partitioned ( NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION
, pattern NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION
, NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME
, pattern NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME
) where
import Data.String (IsString)
type NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION"
pattern NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION :: forall a . Integral a => a
pattern NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION = 1
type NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME = "VK_NV_shader_subgroup_partitioned"
-- No documentation found for TopLevel "VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME"
pattern NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME = "VK_NV_shader_subgroup_partitioned"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_NV_shader_subgroup_partitioned.hs | bsd-3-clause | 4,234 | 0 | 8 | 696 | 200 | 154 | 46 | -1 | -1 |
{-# LANGUAGE ConstraintKinds, KindSignatures, DataKinds, ScopedTypeVariables, DeriveFunctor, RankNTypes, ViewPatterns, MultiParamTypeClasses, FlexibleInstances, FlexibleContexts, DeriveDataTypeable, DeriveGeneric, TupleSections #-}
module BlobStore where
import TypedBinary
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Builder as Builder
import qualified Data.HashMap.Strict as HM
import Crypto.Hash
import Data.IORef
import qualified Data.Hashable as H
-- import qualified Data.Cache.LRU as LRU
import Control.Applicative
import Control.Monad.Catch
import Data.Typeable
import GHC.Generics hiding (from, to)
import qualified Data.Binary.Get as Bin
import Control.DeepSeq
data Address = SHA512Key (Digest SHA512)
deriving (Eq, Ord, Show, Generic)
instance Grammatical Address
class ToFileName a where
toFileName :: a -> FilePath
instance ToFileName Address where
toFileName (SHA512Key k) = show k
{-
instance Hashable Address where
hashWithSalt salt (SHA512Key k) = hashWithSalt salt (toBytes k)
class Addressable a o where
address :: o -> a
verifyAddress :: o -> a -> Bool
instance Addressable Address B.ByteString where
address o = SHA512Key $ SHA512.hash o
verifyAddress o (SHA512Key k) = SHA512.hash o == k
data Decorated a = Decorated a B.ByteString
deriving (Eq, Show)
instance Eq a => Addressable a (Decorated a) where
address (Decorated a _) = a
verifyAddress (Decorated a1 _) a2 = a1 == a2
instance Byteable (Decorated a) where
toBytes (Decorated _ o) = o
type Put a x = (Addressable a x, Byteable x)
decorate :: (Addressable a o, Byteable o) => o -> Decorated a
decorate o = Decorated (address o) (toBytes o)
class Get f a x where
undecorate :: Decorated a -> f x
undecorate (Decorated a x) = unroll a x
unroll :: a -> B.ByteString -> f x
unroll a x = undecorate (Decorated a x)
instance Monad m => Get m a (Decorated a) where
undecorate = return
instance Monad m => Get m a B.ByteString where
unroll _ = return
-}
data Store f a o = Store
{ store :: o -> f a
, load :: a -> f o
}
data GrammarException = GrammarException String
deriving (Show, Typeable)
instance Exception GrammarException
grammarStore :: (Functor f, Monad f, MonadCatch f) => Grammar o -> Store f a L.ByteString -> Store f a o
grammarStore g st = Store { store = doStore, load = doLoad }
where doStore x = case writeDefault g x of
Left s -> throwM (GrammarException s)
Right o -> store st $ Builder.toLazyByteString o
doLoad a = do o <- load st a
case Bin.runGetOrFail (parseFull g) o of
Left (_, _, s) -> throwM (GrammarException s)
Right (_, _, x) -> return x
data UnknownAddress = UnknownAddress
deriving (Show, Typeable)
instance Exception UnknownAddress
memoryStore :: (Eq a, H.Hashable a) => IORef (HM.HashMap a o) -> Store IO a (a, o)
memoryStore mapRef = Store { store = doStore, load = doLoad }
where doStore (a, o) = atomicModifyIORef' mapRef (\m -> (HM.insert a o m, a))
doLoad a = maybe (throwM UnknownAddress) (return . (a, )) . HM.lookup a =<< readIORef mapRef
newMemoryStore :: (Eq a, H.Hashable a) => IO (Store IO a (a, o))
newMemoryStore = memoryStore <$> newIORef HM.empty
{-
lruCache :: (Ord a, Put a o, Get IO a o) => IORef (LRU.LRU a B.ByteString) -> Store IO a o
lruCache cacheRef = Store { store = doStore, load = doLoad }
where doStore (decorate -> Decorated a o) = atomicModifyIORef' cacheRef (\m -> (LRU.insert a o m, a))
doLoad a = maybe (throwM UnknownAddress) (unroll a) =<< atomicModifyIORef' cacheRef (LRU.lookup a)
newLRUCache :: (Ord a, Put a o, Get IO a o) => Maybe Integer -> IO (Store IO a o)
newLRUCache len = lruCache <$> newIORef (LRU.newLRU len)
-}
fsStore :: (ToFileName a) => FilePath -> Store IO a (a, L.ByteString)
fsStore dir = Store { store = doStore, load = doLoad }
where
addrPath k = dir ++ "/" ++ toFileName k
doStore (a, o) = putStrLn ("Write " ++ addrPath a ++ " : " ++ show o) >> (a <$ L.writeFile (addrPath a) o)
doLoad a = do o <- L.readFile (addrPath a); putStrLn $ "Read " ++ addrPath a ++ " : " ++ show o; o `deepseq` return (a, o)
hashStore :: (Functor f) => Store f Address (Address, L.ByteString) -> Store f Address L.ByteString
hashStore st = Store { store = doStore, load = doLoad }
where
doStore o = store st (SHA512Key (hashlazy o), o)
doLoad a = snd <$> load st a
{-
data InvalidObject = InvalidObject
deriving (Show, Typeable)
instance Exception InvalidObject
verify :: (Functor f, MonadCatch f, Put a o, Get f a o, Eq a, Addressable a o) => Store f a o -> Store f a o
verify st = Store { store = doStore, load = doLoad }
where doStore x = do a <- store st x
when (a /= address x) $ throwM InvalidObject
return a
doLoad a = do o <- load st a
when (a /= address o) $ throwM InvalidObject
return o
-}
{-
-- possible implementation of <|>
orM :: Monad f => f (Maybe a) -> f (Maybe a) -> f (Maybe a)
orM m n = do x <- m
case x of
Just _ -> return x
Nothing -> n
type Rel a = a -> a -> a
duplicated :: Monad f => Rel (f ()) -> Rel (f (Maybe Decorated)) ->
RawStore p1 f -> RawStore p2 f -> RawStore p3 f
duplicated (<&>) (<|>) a b = RawStore { store = \o -> store a o <&> store b o
, load = \i -> load a i <|> load b i }
duplicatedSerial :: Monad f => RawStore p1 f -> RawStore p2 f -> RawStore p2 f
duplicatedSerial = duplicated (>>) orM
cache :: Monad f => RawStore Cached f -> RawStore p f -> RawStore p f
cache = duplicatedSerial
multi :: Monad f => (Address -> f (RawStore p f)) -> RawStore p f
multi locate = RawStore { store = doStore, load = doLoad }
where doStore o@(Decorated a _) = locate a >>= (`store` o)
doLoad a = locate a >>= (`load` a)
-}
| aristidb/datastorage | src/BlobStore.hs | bsd-3-clause | 6,103 | 0 | 14 | 1,580 | 1,039 | 555 | 484 | 55 | 3 |
module Main (main) where
import Graphics.UI.Gtk
import Graphics.UI.Gtk.WebKit.WebView
import System.FilePath
import System.Environment(getArgs)
main = do
[url, path] <- getArgs
webSnap url path
webSnap :: String -> FilePath -> IO ()
webSnap url output = do
initGUI
w <- offscreenWindowNew
wv <- webViewNew
set w
[ containerChild := wv
, windowDefaultWidth := 1024
, windowDefaultHeight := 768
]
webViewLoadUri wv url
widgetShowAll w
wv `on` loadFinished $ \_ -> savePage w output
mainGUI
savePage :: OffscreenWindow -> FilePath -> IO ()
savePage w f = do
p <- offscreenWindowGetPixbuf w
case p of
Nothing -> mainQuit
Just pixbuf -> do
pixbufSave pixbuf f "png" []
mainQuit
| jrb/websnap | src/Main.hs | bsd-3-clause | 756 | 0 | 13 | 188 | 255 | 127 | 128 | 29 | 2 |
module Main where
import Interpreter (interpret)
import Tokens (tokenize)
import Parser (parse)
import System.Environment (getArgs)
import Text.Show.Pretty (ppShow)
main :: IO ()
main = getArgs >>= run
run :: [String] -> IO ()
run ("c":args) = c args
run ("jit":args) = jit args
run ("i":args) = i args
run ("ast":args) = ast args
run _ = putStrLn "Usage:\n Interpret: rumex i FILE [INPUT]\n Compile: rumex c FILE\n JIT: rumex jit FILE\n Build AST: rumex ast FILE"
c :: [String] -> IO ()
c [file] = error "Compiler not implemented"
jit :: [String] -> IO ()
jit [file] = c [file, ""]
jit [file, inp] = error "JIT not implemented"
i :: [String] -> IO ()
i [file] = c [file, ""]
i [file, inp] = do
src <- readFile file
putStrLn $ interpret inp src
ast :: [String] -> IO ()
ast [file] = do
src <- readFile file
putStrLn $ ppShow $ parse $ tokenize src
| tcsavage/rumex | src/Main.hs | bsd-3-clause | 876 | 0 | 9 | 185 | 382 | 200 | 182 | 28 | 1 |
{-# OPTIONS -fno-warn-type-defaults #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Foundation layout elements. See
-- <http://foundation.zurb.com/> for more information.
module Lucid.Foundation
( module Lucid.Foundation.Structure
, module Lucid.Foundation.Navigation
, module Lucid.Foundation.Media
, module Lucid.Foundation.Forms
, module Lucid.Foundation.Buttons
, module Lucid.Foundation.Typography
, module Lucid.Foundation.Callouts
, module Lucid.Foundation.Content
) where
import Lucid.Foundation.Structure
import Lucid.Foundation.Navigation
import Lucid.Foundation.Media
import Lucid.Foundation.Forms
import Lucid.Foundation.Buttons
import Lucid.Foundation.Typography
import Lucid.Foundation.Callouts
import Lucid.Foundation.Content
import Lucid.Base
import Lucid.Html5
import qualified Data.Text as T
import Data.Monoid
| athanclark/lucid-foundation | src/Lucid/Foundation.hs | bsd-3-clause | 888 | 0 | 5 | 103 | 140 | 97 | 43 | 24 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Plugins.Monitors.Batt
-- Copyright : (c) 2010, 2011, 2012, 2013 Jose A Ortega
-- (c) 2010 Andrea Rossato, Petr Rockai
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Jose A. Ortega Ruiz <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- A battery monitor for Xmobar
--
-----------------------------------------------------------------------------
module Plugins.Monitors.Batt ( battConfig, runBatt, runBatt' ) where
import Control.Exception (SomeException, handle)
import Plugins.Monitors.Common
import System.FilePath ((</>))
import System.IO (IOMode(ReadMode), hGetLine, withFile)
import System.Posix.Files (fileExist)
import System.Console.GetOpt
data BattOpts = BattOpts
{ onString :: String
, offString :: String
, idleString :: String
, posColor :: Maybe String
, lowWColor :: Maybe String
, mediumWColor :: Maybe String
, highWColor :: Maybe String
, lowThreshold :: Float
, highThreshold :: Float
, onlineFile :: FilePath
, scale :: Float
}
defaultOpts :: BattOpts
defaultOpts = BattOpts
{ onString = "On"
, offString = "Off"
, idleString = "On"
, posColor = Nothing
, lowWColor = Nothing
, mediumWColor = Nothing
, highWColor = Nothing
, lowThreshold = -12
, highThreshold = -10
, onlineFile = "AC/online"
, scale = 1e6
}
options :: [OptDescr (BattOpts -> BattOpts)]
options =
[ Option "O" ["on"] (ReqArg (\x o -> o { onString = x }) "") ""
, Option "o" ["off"] (ReqArg (\x o -> o { offString = x }) "") ""
, Option "i" ["idle"] (ReqArg (\x o -> o { idleString = x }) "") ""
, Option "p" ["positive"] (ReqArg (\x o -> o { posColor = Just x }) "") ""
, Option "l" ["low"] (ReqArg (\x o -> o { lowWColor = Just x }) "") ""
, Option "m" ["medium"] (ReqArg (\x o -> o { mediumWColor = Just x }) "") ""
, Option "h" ["high"] (ReqArg (\x o -> o { highWColor = Just x }) "") ""
, Option "L" ["lowt"] (ReqArg (\x o -> o { lowThreshold = read x }) "") ""
, Option "H" ["hight"] (ReqArg (\x o -> o { highThreshold = read x }) "") ""
, Option "f" ["online"] (ReqArg (\x o -> o { onlineFile = x }) "") ""
, Option "s" ["scale"] (ReqArg (\x o -> o {scale = read x}) "") ""
]
parseOpts :: [String] -> IO BattOpts
parseOpts argv =
case getOpt Permute options argv of
(o, _, []) -> return $ foldr id defaultOpts o
(_, _, errs) -> ioError . userError $ concat errs
data Result = Result Float Float Float String | NA
sysDir :: FilePath
sysDir = "/sys/class/power_supply"
battConfig :: IO MConfig
battConfig = mkMConfig
"Batt: <watts>, <left>% / <timeleft>" -- template
["leftbar", "left", "acstatus", "timeleft", "watts"] -- replacements
data Files = Files
{ fFull :: String
, fNow :: String
, fVoltage :: String
, fCurrent :: String
, isCurrent :: Bool
} | NoFiles
data Battery = Battery
{ full :: !Float
, now :: !Float
, power :: !Float
}
safeFileExist :: String -> String -> IO Bool
safeFileExist d f = handle noErrors $ fileExist (d </> f)
where noErrors = const (return False) :: SomeException -> IO Bool
batteryFiles :: String -> IO Files
batteryFiles bat =
do is_charge <- exists "charge_now"
is_energy <- if is_charge then return False else exists "energy_now"
is_power <- exists "power_now"
plain <- if is_charge then exists "charge_full" else exists "energy_full"
let cf = if is_power then "power_now" else "current_now"
sf = if plain then "" else "_design"
return $ case (is_charge, is_energy) of
(True, _) -> files "charge" cf sf is_power
(_, True) -> files "energy" cf sf is_power
_ -> NoFiles
where prefix = sysDir </> bat
exists = safeFileExist prefix
files ch cf sf ip = Files { fFull = prefix </> ch ++ "_full" ++ sf
, fNow = prefix </> ch ++ "_now"
, fCurrent = prefix </> cf
, fVoltage = prefix </> "voltage_now"
, isCurrent = not ip}
haveAc :: FilePath -> IO Bool
haveAc f =
handle onError $ withFile (sysDir </> f) ReadMode (fmap (== "1") . hGetLine)
where onError = const (return False) :: SomeException -> IO Bool
readBattery :: Float -> Files -> IO Battery
readBattery _ NoFiles = return $ Battery 0 0 0
readBattery sc files =
do a <- grab $ fFull files
b <- grab $ fNow files
d <- grab $ fCurrent files
let sc' = if isCurrent files then sc / 10 else sc
return $ Battery (3600 * a / sc') -- wattseconds
(3600 * b / sc') -- wattseconds
(d / sc') -- watts
where grab f = handle onError $ withFile f ReadMode (fmap read . hGetLine)
onError = const (return (-1)) :: SomeException -> IO Float
readBatteries :: BattOpts -> [Files] -> IO Result
readBatteries opts bfs =
do bats <- mapM (readBattery (scale opts)) (take 3 bfs)
ac <- haveAc (onlineFile opts)
let sign = if ac then 1 else -1
ft = sum (map full bats)
left = if ft > 0 then sum (map now bats) / ft else 0
watts = sign * sum (map power bats)
idle = watts == 0
time = if idle then 0 else sum $ map time' bats
mwatts = if idle then 1 else sign * watts
time' b = (if ac then full b - now b else now b) / mwatts
acstr = if idle then idleString opts else
if ac then onString opts else offString opts
return $ if isNaN left then NA else Result left watts time acstr
runBatt :: [String] -> Monitor String
runBatt = runBatt' ["BAT0","BAT1","BAT2"]
runBatt' :: [String] -> [String] -> Monitor String
runBatt' bfs args = do
opts <- io $ parseOpts args
c <- io $ readBatteries opts =<< mapM batteryFiles bfs
suffix <- getConfigValue useSuffix
d <- getConfigValue decDigits
case c of
Result x w t s ->
do l <- fmtPercent x
ws <- fmtWatts w opts suffix d
parseTemplate (l ++ [s, fmtTime $ floor t, ws])
NA -> getConfigValue naString
where fmtPercent :: Float -> Monitor [String]
fmtPercent x = do
let x' = minimum [1, x]
p <- showPercentWithColors x'
b <- showPercentBar (100 * x') x'
return [b, p]
fmtWatts x o s d = do
ws <- showWithPadding $ showDigits d x ++ (if s then "W" else "")
return $ color x o ws
fmtTime :: Integer -> String
fmtTime x = hours ++ ":" ++ if length minutes == 2
then minutes else '0' : minutes
where hours = show (x `div` 3600)
minutes = show ((x `mod` 3600) `div` 60)
maybeColor Nothing str = str
maybeColor (Just c) str = "<fc=" ++ c ++ ">" ++ str ++ "</fc>"
color x o | x >= 0 = maybeColor (posColor o)
| -x >= highThreshold o = maybeColor (highWColor o)
| -x >= lowThreshold o = maybeColor (mediumWColor o)
| otherwise = maybeColor (lowWColor o)
| apoikos/pkg-xmobar | src/Plugins/Monitors/Batt.hs | bsd-3-clause | 7,148 | 0 | 17 | 2,046 | 2,519 | 1,343 | 1,176 | 161 | 9 |
{-|
Module : Pipeline
Description : Pipeline worker logic
Not working - needs refactor after last changes in Worker module.
-}
module Pipeline
where
import Protocol
import Reader
import Task as T
import qualified Worker as W
import qualified Data.List as DL
import qualified Data.Set as Set
import System.Directory (getCurrentDirectory)
import Control.Monad
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Maybe
import Control.Exception (handle)
--TODO functions in class Stage;
--input chain, output chain
--input_hook - allows user to specify the order in which the input labels should be iterated over
--done - function called after all the processing
-- The order of invocation of the task entry points of a stage
-- are: *input_hook*, *init*, *process*, and *done*, where *init* and
-- *done* are optional and called only once, while *process* is called once for every task input.
data Grouping
= Split
| Group_label
| Group_node
| Group_node_label
| Group_all
data Stage p = Stage {
process_fun :: T.Process p,
params :: p,
name :: String,
grouping :: Grouping
}
type Pipeline p = [Stage p]
-- | Stage names in a pipeline have to be unique.
--grouping should be one of split, group_label, group_all, group_node and group_node_label.
--That function will be used only for small lists.
check_pipeline :: Pipeline p -> Bool
check_pipeline pipeline = (Set.size $ Set.fromList $ map name pipeline) == length pipeline
stage_names :: Pipeline p -> [String]
stage_names pipeline =
map name pipeline
get_process_fun :: Pipeline p-> String -> Maybe (T.Process p)
get_process_fun pipeline stage_name =
DL.lookup stage_name assoc
where assoc = map (\p -> (name p, process_fun p)) pipeline
run_task :: FilePath -> String -> Task -> [Input] -> T.Process p -> MaybeT IO Master_msg
run_task pwd file_templ task list_inputs process_fun = do
-- TODO call init function
-- TODO call done function
outputs <- lift $ W.run_stage pwd file_templ task list_inputs process_fun --check combine, sort flags + input_hook fun
mapM W.send_outputs outputs
exchange_msg W_done
-- | Analogous to classic worker run
run :: Pipeline p -> MaybeT IO ()
run pipeline = do
pwd <- lift getCurrentDirectory
lift send_worker
W.expect_ok
M_task task <- exchange_msg W_task
let task_stage = stage task
let file_templ = task_stage ++ "_out_"
inputs_list <- W.get_inputs []
case (get_process_fun pipeline task_stage) of
Just process_fun -> do run_task pwd file_templ task inputs_list process_fun
Nothing -> do exchange_msg $ W_fatal "Non-existent stage"
return ()
-- | For example pipeline = [("map", Split), ("shuffle", Group_node), ("reduce", Group_all)]
start_pipeline :: Pipeline p -> IO ()
start_pipeline pipeline = do
result <- runMaybeT $ run pipeline
case result of
Nothing -> (runMaybeT $ exchange_msg $ W_fatal "Protocol error") >> return ()
Just _ -> return ()
| zuzia/haskell_worker | src/Pipeline.hs | bsd-3-clause | 3,020 | 0 | 13 | 612 | 663 | 345 | 318 | 57 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Pinchot.SyntaxTree.Wrappers where
import qualified Control.Lens as Lens
import Data.List.NonEmpty (NonEmpty)
import Data.Maybe (catMaybes)
import qualified Language.Haskell.TH as T
import Pinchot.Names
import Pinchot.Rules
import Pinchot.Types
-- # Wrapped
-- | Creates a 'Lens.Wrapped' instance for each 'Rule' and its
-- ancestors, if there is an instance.
-- Only 'Pinchot.terminal', 'Pinchot.wrap',
-- 'Pinchot.opt', 'Pinchot.star', and 'Pinchot.plus'
-- get instances of 'Lens.Wrapped'.
--
-- This must be
-- spliced in the same module in which the syntax tree types are
-- created; this way, no orphans are created. Since ancestors are
-- included, you can get the entire tree of types that you need by
-- applying this function to a single start symbol.
--
-- Example: "Pinchot.Examples.SyntaxTrees".
wrappedInstances
:: [Rule t]
-> T.DecsQ
wrappedInstances
= sequence
. catMaybes
. fmap singleWrappedInstance
. families
-- | Creates a 'Lens.Wrapped' instance for the 'Rule', if there is
-- one. Only 'Pinchot.terminal', 'Pinchot.wrap',
-- 'Pinchot.opt', 'Pinchot.star', and 'Pinchot.plus'
-- get instances of 'Wrapped'.
-- 'This must be spliced in the same module in which the
-- syntax tree types are created.
singleWrappedInstance
:: Rule t
-> Maybe (T.Q T.Dec)
singleWrappedInstance (Rule nm _ ty) = case ty of
Terminal _ -> Just $ wrappedTerminal nm
Wrap (Rule inner _ _) -> Just $ wrappedWrap inner nm
Opt (Rule inner _ _) -> Just $ wrappedOpt inner nm
Star (Rule inner _ _) -> Just $ wrappedStar inner nm
Plus (Rule inner _ _) -> Just $ wrappedPlus inner nm
_ -> Nothing
makeWrapped
:: T.TypeQ
-- ^ Name of wrapped type
-> String
-- ^ Name of wrapper type
-> T.Q T.Dec
makeWrapped wrappedType nm = T.instanceD (return []) typ decs
where
name = T.mkName nm
typ = (T.conT ''Lens.Wrapped) `T.appT`
((T.conT name)
`T.appT` (typeT)
`T.appT` (typeA))
decs = [assocType, wrapper]
where
assocType = T.tySynInstD ''Lens.Unwrapped
(T.tySynEqn [T.conT name
`T.appT` (typeT)
`T.appT` (typeA)]
wrappedType)
wrapper = T.funD 'Lens._Wrapped'
[T.clause [] (T.normalB body) []]
where
body = (T.varE 'Lens.iso)
`T.appE` unwrap
`T.appE` doWrap
where
unwrap = do
local <- T.newName "_local"
let lambPat = T.conP name [T.varP local]
T.lamE [lambPat] (T.varE local)
doWrap = do
local <- T.newName "_local"
let expn = (T.conE name) `T.appE` (T.varE local)
lambPat = T.varP local
T.lamE [lambPat] expn
wrappedOpt
:: String
-- ^ Wrapped rule name
-> String
-- ^ Wrapping Rule name
-> T.Q T.Dec
wrappedOpt wrappedName = makeWrapped maybeName
where
maybeName = (T.conT ''Maybe)
`T.appT`
((T.conT (T.mkName wrappedName))
`T.appT` (typeT)
`T.appT` (typeA))
wrappedTerminal
:: String
-- ^ Wrapper Rule name
-> T.Q T.Dec
wrappedTerminal = makeWrapped
[t| ( $(typeT), $(typeA) ) |]
wrappedTerminals
:: String
-- ^ Wrapper Rule name
-> T.Q T.Dec
wrappedTerminals = makeWrapped
[t| [ ($(typeT), $(typeA)) ] |]
wrappedStar
:: String
-- ^ Wrapped rule name
-> String
-- ^ Wrapping Rule name
-> T.Q T.Dec
wrappedStar wrappedName = makeWrapped innerName
where
innerName =
[t| [ $(T.conT (T.mkName wrappedName)) $(typeT)
$(typeA) ] |]
wrappedPlus
:: String
-- ^ Wrapped rule name
-> String
-- ^ Wrapping Rule name
-> T.Q T.Dec
wrappedPlus wrappedName = makeWrapped tupName
where
tupName = [t| NonEmpty ( $(T.conT (T.mkName wrappedName))
$(typeT)
$(typeA)) |]
wrappedWrap
:: String
-- ^ Wrapped rule name
-> String
-- ^ Wrapping Rule name
-> T.Q T.Dec
wrappedWrap wrappedName = makeWrapped innerName
where
innerName =
((T.conT (T.mkName wrappedName))
`T.appT` (typeT)
`T.appT` (typeA))
| massysett/pinchot | pinchot/lib/Pinchot/SyntaxTree/Wrappers.hs | bsd-3-clause | 4,267 | 0 | 21 | 1,219 | 1,025 | 560 | 465 | 102 | 6 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
import Prelude
import qualified Control.Exception as E
import Control.Monad
import Data.Aeson
import Data.Aeson.Types (Parser)
import qualified Data.ByteString.Lazy as BL
import Data.Char (isSpace, toLower)
import Data.List (isInfixOf, sort)
import qualified Data.Map as M
import System.Directory
import System.Environment (getArgs)
import System.Exit
import System.FilePath
import System.IO.Temp (withSystemTempDirectory)
import System.Process
import Text.CSL
import Text.CSL.Compat.Pandoc (writeHtmlString)
import Text.CSL.Reference
import Text.CSL.Style hiding (Number)
import Text.Pandoc (Block (..), Format (..), Inline (..),
Pandoc (..), bottomUp, nullMeta)
import qualified Text.Pandoc.UTF8 as UTF8
import Text.Printf
data TestCase = TestCase{
testMode :: Mode -- mode
, testBibopts :: BibOpts -- bibsection
, testCitations :: [CiteObject] -- citations
, testCitationItems :: Citations -- citation-items
, testCsl :: Style -- csl
, testAbbreviations :: Abbreviations -- abbreviations
, testReferences :: [Reference] -- input
, testResult :: String -- result
} deriving (Show)
data Mode = CitationMode
| CitationRTFMode
| BibliographyMode
| BibliographyHeaderMode
| BibliographyNoSortMode
deriving Show
instance FromJSON Mode where
parseJSON (String "citation") = return CitationMode
parseJSON (String "citation-rtf") = return CitationRTFMode
parseJSON (String "bibliography") = return BibliographyMode
parseJSON (String "bibliography-header") = return BibliographyHeaderMode
parseJSON (String "bibliography-nosort") = return BibliographyNoSortMode
parseJSON _ = fail "Unknown mode"
instance FromJSON TestCase where
parseJSON (Object v) = TestCase <$>
v .: "mode" <*>
v .:? "bibsection" .!= Select [] [] <*>
((v .: "citations") >>= parseCitations) <*>
v .:? "citation_items" .!= [] <*>
(parseCSL <$> (v .: "csl")) <*>
v .:? "abbreviations" .!= (Abbreviations M.empty) <*>
v .: "input" <*>
v .: "result"
where parseCitations :: Data.Aeson.Value -> Parser [CiteObject]
parseCitations x@Array{} = parseJSON x
parseCitations _ = return []
parseJSON _ = fail "Could not parse test case"
newtype CiteObject =
CiteObject { unCiteObject :: [Cite] } deriving Show
instance FromJSON CiteObject where
parseJSON (Array v) =
case fromJSON (Array v) of
Success [Object x, Array _, Array _] ->
CiteObject <$> x .: "citationItems"
Error e -> fail $ "Could not parse CiteObject: " ++ e
x -> fail $ "Could not parse CiteObject" ++ show x
parseJSON x = fail $ "Could not parse CiteObject " ++ show x
#if MIN_VERSION_aeson(0,10,0)
#else
instance FromJSON [CiteObject] where
parseJSON (Array v) = mapM parseJSON $ V.toList v
parseJSON _ = return []
#endif
data TestResult =
Passed
| Skipped
| Failed
| Errored
deriving (Show, Eq)
testDir :: FilePath
testDir = "citeproc-test" </> "processor-tests" </> "machines"
handler :: FilePath -> E.SomeException -> IO TestResult
handler path e = do
putStrLn $ "[ERROR] " ++ path ++ "\n" ++ show e
return Errored
runTest :: FilePath -> IO TestResult
runTest path = E.handle (handler path) $ do
raw <- BL.readFile path
let testCase = either error id $ eitherDecode raw
let procOpts' = ProcOpts (testBibopts testCase) False
style <- localizeCSL Nothing
$ (testCsl testCase) { styleAbbrevs = testAbbreviations testCase }
let refs = testReferences testCase
let cites = map unCiteObject (testCitations testCase) ++ testCitationItems testCase
let cites' = if null cites
then [map (\ref -> emptyCite{ citeId = unLiteral $ refId ref}) refs]
else cites
let expected = adjustEntities $ fixBegins $ trimEnd $ testResult testCase
let mode = testMode testCase
let assemble BibliographyMode xs =
"<div class=\"csl-bib-body\">\n" ++
unlines (map (\x -> " <div class=\"csl-entry\">" ++ x ++
"</div>") xs) ++ "</div>\n"
assemble _ xs = unlines xs
case mode of
BibliographyHeaderMode -> do
putStrLn $ "[SKIPPED] " ++ path ++ "\n"
return Skipped
BibliographyNoSortMode -> do
putStrLn $ "[SKIPPED] " ++ path ++ "\n"
return Skipped
_ -> do
let result = assemble mode
$ map (inlinesToString . renderPandoc style) $
(case mode of {CitationMode -> citations; _ -> bibliography})
$ citeproc procOpts' style refs cites'
if result == expected
then do
putStrLn $ "[PASSED] " ++ path ++ "\n"
return Passed
else do
putStrLn $ "[FAILED] " ++ path
showDiff expected result
putStrLn ""
return Failed
trimEnd :: String -> String
trimEnd = reverse . ('\n':) . dropWhile isSpace . reverse
-- this is designed to mimic the test suite's output:
inlinesToString :: [Inline] -> String
inlinesToString ils =
writeHtmlString
$ bottomUp (concatMap adjustSpans)
$ Pandoc nullMeta [Plain ils]
-- We want & instead of & etc.
adjustEntities :: String -> String
adjustEntities ('&':'#':'3':'8':';':xs) = "&" ++ adjustEntities xs
adjustEntities (x:xs) = x : adjustEntities xs
adjustEntities [] = []
-- citeproc-js test suite expects "citations" to be formatted like
-- .. [0] Smith (2007)
-- >> [1] Jones (2008)
-- To get a meaningful comparison, we remove this.
fixBegins :: String -> String
fixBegins = unlines . map fixLine . lines
where fixLine ('.':'.':'[':xs) = dropWhile isSpace $ dropWhile (not . isSpace) xs
fixLine ('>':'>':'[':xs) = dropWhile isSpace $ dropWhile (not . isSpace) xs
fixLine xs = xs
-- adjust the spans so we fit what the test suite expects.
adjustSpans :: Inline -> [Inline]
adjustSpans (Note [Para xs]) = xs
adjustSpans (Link _ ils _) = ils
adjustSpans (Span ("",[],[]) xs) = xs
adjustSpans (Span ("",["nocase"],[]) xs) = xs
adjustSpans (Span ("",["citeproc-no-output"],[]) _) =
[Str "[CSL STYLE ERROR: reference with no printed form.]"]
adjustSpans (Span (id',classes,kvs) ils) =
[Span (id',classes',kvs') ils]
where classes' = filter (`notElem` ["csl-no-emph","csl-no-strong","csl-no-smallcaps"]) classes
kvs' = if null styles then kvs else (("style", concat styles) : kvs)
styles = ["font-style:normal;" | "csl-no-emph" `elem` classes]
++ ["font-weight:normal;" | "csl-no-strong" `elem` classes]
++ ["font-variant:normal;" | "csl-no-smallcaps" `elem` classes]
adjustSpans (Emph xs) =
RawInline (Format "html") "<i>" : xs ++ [RawInline (Format "html") "</i>"]
adjustSpans (Strong xs) =
RawInline (Format "html") "<b>" : xs ++ [RawInline (Format "html") "</b>"]
adjustSpans (SmallCaps xs) =
RawInline (Format "html") "<span style=\"font-variant:small-caps;\">" : xs ++ [RawInline (Format "html") "</span>"]
adjustSpans x = [x]
showDiff :: String -> String -> IO ()
showDiff expected' result' =
withSystemTempDirectory "test-pandoc-citeproc-XXX" $ \fp -> do
let expectedf = fp </> "expected"
let actualf = fp </> "actual"
UTF8.writeFile expectedf expected'
UTF8.writeFile actualf result'
withDirectory fp $ void $ rawSystem "diff" ["-u","expected","actual"]
withDirectory :: FilePath -> IO a -> IO a
withDirectory fp action = do
oldDir <- getCurrentDirectory
setCurrentDirectory fp
result <- action
setCurrentDirectory oldDir
return result
main :: IO ()
main = do
args <- getArgs
let matchesPattern x
| null args = True
| otherwise = any (`isInfixOf` (map toLower x))
(map (map toLower . takeBaseName) args)
exists <- doesDirectoryExist testDir
unless exists $ do
putStrLn "Downloading test suite"
_ <- rawSystem "git" ["clone", "https://github.com/citation-style-language/test-suite.git", "citeproc-test"]
withDirectory "citeproc-test" $
void $ rawSystem "python" ["processor.py", "--grind"]
testFiles <- if any ('/' `elem`) args
then return args
else (map (testDir </>) . sort .
filter matchesPattern .
filter (\f -> takeExtension f == ".json"))
<$> getDirectoryContents testDir
results <- mapM runTest testFiles
let numpasses = length $ filter (== Passed) results
let numskipped = length $ filter (== Skipped) results
let numfailures = length $ filter (== Failed) results
let numerrors = length $ filter (== Errored) results
putStrLn $ show numpasses ++ " passed; " ++ show numfailures ++
" failed; " ++ show numskipped ++ " skipped; " ++
show numerrors ++ " errored."
let summary = unlines $ zipWith (\fp res -> printf "%-10s %s" (show res) fp) testFiles results
when (null args) $ do -- write log if complete test suite run
ex <- doesFileExist "test-citeproc.log"
when ex $ do
putStrLn "Copying existing test-citeproc.log to test-citeproc.log.old"
copyFile "test-citeproc.log" "test-citeproc.log.old"
putStrLn "Writing test-citeproc.log."
UTF8.writeFile "test-citeproc.log" summary
exitWith $ if numfailures == 0
then ExitSuccess
else ExitFailure $ numfailures + numerrors
| adunning/pandoc-citeproc | tests/test-citeproc.hs | bsd-3-clause | 10,230 | 0 | 22 | 2,876 | 2,808 | 1,447 | 1,361 | 221 | 7 |
{-# LANGUAGE ImplicitParams #-}
module Data.Macho.Types where
import Data.Int
import Data.Word
import Data.Bits
import Data.Binary.Get
import Data.Binary.Put
import Data.Bimap (Bimap)
import qualified Data.Bimap as Bimap
import Control.Monad
import Control.Applicative
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C
import qualified Data.ByteString.Internal as B
import qualified Data.ByteString.Lazy as L
data MH_MAGIC
= MH_MAGIC32
| MH_MAGIC64
| MH_CIGAM32
| MH_CIGAM64
deriving (Ord, Eq, Show, Enum)
magic :: Bimap Word32 MH_MAGIC
magic = Bimap.fromList
[ (0xfeedface, MH_MAGIC32)
, (0xfeedfacf, MH_MAGIC64)
, (0xcefaedfe, MH_CIGAM32)
, (0xcffaedfe, MH_CIGAM64)
]
macho_to_magic = (magic Bimap.!)
macho_from_magic = (magic Bimap.!>)
bitfield_le off sz word = (word `shiftL` (32 - off - sz)) `shiftR` (32 - sz)
bitfield_be off sz word = (word `shiftL` off) `shiftR` (32 - sz)
newtype Decoder a = Decoder { runDecoder :: MachoBinary -> Get a }
decode ds bs = do
b <- binary
return $ runGet (runDecoder ds b) bs
getWord :: Decoder Word64
getWord = do
is64 <- is64bit
if is64 then getWord64 else fromIntegral <$> getWord32
binary :: Decoder MachoBinary
binary = Decoder pure
lift :: Get a -> Decoder a
lift g = Decoder (\_ -> g)
instance Functor Decoder where
fmap = liftM
instance Applicative Decoder where
pure = return
(<*>) = ap
instance Monad Decoder where
return x = Decoder (\_ -> return x)
Decoder f >>= g = Decoder $ \h -> do x <- f h; runDecoder (g x) h
class MonadDecoder m where
is64bit :: m Bool
getWord16 :: m Word16
getWord32 :: m Word32
getWord64 :: m Word64
bitfield :: Int -> Int -> Word32 -> m Word32
instance MonadDecoder Decoder where
is64bit = Decoder (pure . _is64bit)
getWord16 = Decoder _getWord16
getWord32 = Decoder _getWord32
getWord64 = Decoder _getWord64
bitfield i j x = Decoder (\h -> pure $ _bitfield h i j x)
data MachoBinary = MachoBinary
{ _is64bit :: Bool
, _getWord16 :: Get Word16
, _getWord32 :: Get Word32
, _getWord64 :: Get Word64
, _putWord16 :: Word16 -> Put
, _putWord32 :: Word32 -> Put
, _putWord64 :: Word64 -> Put
, _bitfield :: Int -> Int -> Word32 -> Word32
}
le_binary = MachoBinary
{ _is64bit = False
, _getWord16 = getWord16le
, _getWord32 = getWord32le
, _getWord64 = getWord64le
, _putWord16 = putWord16le
, _putWord32 = putWord32le
, _putWord64 = putWord64le
, _bitfield = bitfield_le
}
be_binary = MachoBinary
{ _is64bit = False
, _getWord16 = getWord16be
, _getWord32 = getWord32be
, _getWord64 = getWord64be
, _putWord16 = putWord16be
, _putWord32 = putWord32be
, _putWord64 = putWord64be
, _bitfield = bitfield_be
}
macho_binary MH_MAGIC32 = le_binary
macho_binary MH_MAGIC64 = le_binary { _is64bit = True }
macho_binary MH_CIGAM32 = be_binary
macho_binary MH_CIGAM64 = be_binary { _is64bit = True }
data CPU_TYPE
= CPU_TYPE_X86
| CPU_TYPE_X86_64
| CPU_TYPE_ARM
| CPU_TYPE_POWERPC
| CPU_TYPE_POWERPC64
deriving (Ord, Show, Eq, Enum)
cputype :: Bimap Word32 CPU_TYPE
cputype = Bimap.fromList
[ (0x00000007, CPU_TYPE_X86)
, (0x01000007, CPU_TYPE_X86_64)
, (0x0000000c, CPU_TYPE_ARM)
, (0x00000012, CPU_TYPE_POWERPC)
, (0x01000012, CPU_TYPE_POWERPC64)
]
mach_to_cputype = (cputype Bimap.!)
mach_from_cputype = (cputype Bimap.!>)
data CPU_SUBTYPE
= CPU_SUBTYPE_INTEL
| CPU_SUBTYPE_I386_ALL
| CPU_SUBTYPE_386
| CPU_SUBTYPE_486
| CPU_SUBTYPE_486SX
| CPU_SUBTYPE_PENT
| CPU_SUBTYPE_PENTPRO
| CPU_SUBTYPE_PENTII_M3
| CPU_SUBTYPE_PENTII_M5
| CPU_SUBTYPE_CELERON
| CPU_SUBTYPE_CELERON_MOBILE
| CPU_SUBTYPE_PENTIUM_3
| CPU_SUBTYPE_PENTIUM_3_M
| CPU_SUBTYPE_PENTIUM_3_XEON
| CPU_SUBTYPE_PENTIUM_M
| CPU_SUBTYPE_PENTIUM_4
| CPU_SUBTYPE_PENTIUM_4_M
| CPU_SUBTYPE_ITANIUM
| CPU_SUBTYPE_ITANIUM_2
| CPU_SUBTYPE_XEON
| CPU_SUBTYPE_XEON_MP
| CPU_SUBTYPE_INTEL_FAMILY
| CPU_SUBTYPE_INTEL_FAMILY_MAX
| CPU_SUBTYPE_INTEL_MODEL
| CPU_SUBTYPE_INTEL_MODEL_ALL
| CPU_SUBTYPE_X86_ALL
| CPU_SUBTYPE_X86_64_ALL
| CPU_SUBTYPE_X86_ARCH1
| CPU_SUBTYPE_POWERPC_ALL
| CPU_SUBTYPE_POWERPC_601
| CPU_SUBTYPE_POWERPC_602
| CPU_SUBTYPE_POWERPC_603
| CPU_SUBTYPE_POWERPC_603e
| CPU_SUBTYPE_POWERPC_603ev
| CPU_SUBTYPE_POWERPC_604
| CPU_SUBTYPE_POWERPC_604e
| CPU_SUBTYPE_POWERPC_620
| CPU_SUBTYPE_POWERPC_750
| CPU_SUBTYPE_POWERPC_7400
| CPU_SUBTYPE_POWERPC_7450
| CPU_SUBTYPE_POWERPC_970
| CPU_SUBTYPE_ARM_ALL
| CPU_SUBTYPE_ARM_V4T
| CPU_SUBTYPE_ARM_V6
deriving (Ord, Show, Eq, Enum)
cpusubtype :: Bimap (CPU_TYPE, Word32) CPU_SUBTYPE
cpusubtype = Bimap.fromList
[ ((CPU_TYPE_X86, 132) , CPU_SUBTYPE_486SX)
, ((CPU_TYPE_X86, 5) , CPU_SUBTYPE_PENT)
, ((CPU_TYPE_X86, 22) , CPU_SUBTYPE_PENTPRO)
, ((CPU_TYPE_X86, 54) , CPU_SUBTYPE_PENTII_M3)
, ((CPU_TYPE_X86, 86) , CPU_SUBTYPE_PENTII_M5)
, ((CPU_TYPE_X86, 103) , CPU_SUBTYPE_CELERON)
, ((CPU_TYPE_X86, 119) , CPU_SUBTYPE_CELERON_MOBILE)
, ((CPU_TYPE_X86, 8) , CPU_SUBTYPE_PENTIUM_3)
, ((CPU_TYPE_X86, 24) , CPU_SUBTYPE_PENTIUM_3_M)
, ((CPU_TYPE_X86, 40) , CPU_SUBTYPE_PENTIUM_3_XEON)
, ((CPU_TYPE_X86, 9) , CPU_SUBTYPE_PENTIUM_M)
, ((CPU_TYPE_X86, 10) , CPU_SUBTYPE_PENTIUM_4)
, ((CPU_TYPE_X86, 26) , CPU_SUBTYPE_PENTIUM_4_M)
, ((CPU_TYPE_X86, 11) , CPU_SUBTYPE_ITANIUM)
, ((CPU_TYPE_X86, 27) , CPU_SUBTYPE_ITANIUM_2)
, ((CPU_TYPE_X86, 12) , CPU_SUBTYPE_XEON)
, ((CPU_TYPE_X86, 28) , CPU_SUBTYPE_XEON_MP)
, ((CPU_TYPE_X86, 3) , CPU_SUBTYPE_X86_ALL)
, ((CPU_TYPE_X86, 4) , CPU_SUBTYPE_X86_ARCH1)
, ((CPU_TYPE_X86_64, 3) , CPU_SUBTYPE_X86_64_ALL)
, ((CPU_TYPE_POWERPC, 0) , CPU_SUBTYPE_POWERPC_ALL)
, ((CPU_TYPE_POWERPC, 1) , CPU_SUBTYPE_POWERPC_601)
, ((CPU_TYPE_POWERPC, 2) , CPU_SUBTYPE_POWERPC_602)
, ((CPU_TYPE_POWERPC, 3) , CPU_SUBTYPE_POWERPC_603)
, ((CPU_TYPE_POWERPC, 4) , CPU_SUBTYPE_POWERPC_603e)
, ((CPU_TYPE_POWERPC, 5) , CPU_SUBTYPE_POWERPC_603ev)
, ((CPU_TYPE_POWERPC, 6) , CPU_SUBTYPE_POWERPC_604)
, ((CPU_TYPE_POWERPC, 7) , CPU_SUBTYPE_POWERPC_604e)
, ((CPU_TYPE_POWERPC, 8) , CPU_SUBTYPE_POWERPC_620)
, ((CPU_TYPE_POWERPC, 9) , CPU_SUBTYPE_POWERPC_750)
, ((CPU_TYPE_POWERPC, 10) , CPU_SUBTYPE_POWERPC_7400)
, ((CPU_TYPE_POWERPC, 11) , CPU_SUBTYPE_POWERPC_7450)
, ((CPU_TYPE_POWERPC, 100) , CPU_SUBTYPE_POWERPC_970)
, ((CPU_TYPE_POWERPC64, 0) , CPU_SUBTYPE_POWERPC_ALL)
, ((CPU_TYPE_POWERPC64, 1) , CPU_SUBTYPE_POWERPC_601)
, ((CPU_TYPE_POWERPC64, 2) , CPU_SUBTYPE_POWERPC_602)
, ((CPU_TYPE_POWERPC64, 3) , CPU_SUBTYPE_POWERPC_603)
, ((CPU_TYPE_POWERPC64, 4) , CPU_SUBTYPE_POWERPC_603e)
, ((CPU_TYPE_POWERPC64, 5) , CPU_SUBTYPE_POWERPC_603ev)
, ((CPU_TYPE_POWERPC64, 6) , CPU_SUBTYPE_POWERPC_604)
, ((CPU_TYPE_POWERPC64, 7) , CPU_SUBTYPE_POWERPC_604e)
, ((CPU_TYPE_POWERPC64, 8) , CPU_SUBTYPE_POWERPC_620)
, ((CPU_TYPE_POWERPC64, 9) , CPU_SUBTYPE_POWERPC_750)
, ((CPU_TYPE_POWERPC64, 10) , CPU_SUBTYPE_POWERPC_7400)
, ((CPU_TYPE_POWERPC64, 11) , CPU_SUBTYPE_POWERPC_7450)
, ((CPU_TYPE_POWERPC64, 100), CPU_SUBTYPE_POWERPC_970)
, ((CPU_TYPE_ARM, 0) , CPU_SUBTYPE_ARM_ALL)
, ((CPU_TYPE_ARM, 5) , CPU_SUBTYPE_ARM_V4T)
, ((CPU_TYPE_ARM, 6) , CPU_SUBTYPE_ARM_V6)
]
mach_to_cpusubtype = curry (cpusubtype Bimap.!)
mach_from_cpusubtype = (cpusubtype Bimap.!>)
data MachoHeader = MachoHeader
{ mh_cputype :: CPU_TYPE -- ^ CPU family the Mach-O executes on.
, mh_cpusubtype :: CPU_SUBTYPE -- ^ Specific CPU type the Mach-O executes on.
, mh_filetype :: MH_FILETYPE -- ^ Type of Mach-o file.
, mh_flags :: [MH_FLAGS] -- ^ Flags.
} deriving (Show, Eq)
data MH_FILETYPE
= MH_OBJECT -- ^ relocatable object file
| MH_EXECUTE -- ^ demand paged executable file
| MH_CORE -- ^ core file
| MH_PRELOAD -- ^ preloaded executable file
| MH_DYLIB -- ^ dynamically bound shared library
| MH_DYLINKER -- ^ dynamic link editor
| MH_BUNDLE -- ^ dynamically bound bundle file
| MH_DYLIB_STUB -- ^ shared library stub for static. linking only, no section contents
| MH_DSYM -- ^ companion file with only debug. sections
| MH_KEXT_BUNDLE
deriving (Ord, Show, Eq, Enum)
mach_filetype 0x1 = MH_OBJECT
mach_filetype 0x2 = MH_EXECUTE
mach_filetype 0x4 = MH_CORE
mach_filetype 0x5 = MH_PRELOAD
mach_filetype 0x6 = MH_DYLIB
mach_filetype 0x7 = MH_DYLINKER
mach_filetype 0x8 = MH_BUNDLE
mach_filetype 0x9 = MH_DYLIB_STUB
mach_filetype 0xa = MH_DSYM
mach_filetype 0xb = MH_KEXT_BUNDLE
data MH_FLAGS
= MH_NOUNDEFS -- ^ the object file has no undefined references
| MH_INCRLINK -- ^ the object file is the output of an incremental link against a base file and can't be link edited again
| MH_DYLDLINK -- ^ the object file is input for the dynamic linker and can't be staticly link edited again
| MH_BINDATLOAD -- ^ the object file's undefined references are bound by the dynamic linker when loaded.
| MH_PREBOUND -- ^ the file has its dynamic undefined references prebound.
| MH_SPLIT_SEGS -- ^ the file has its read-only and read-write segments split
| MH_LAZY_INIT
| MH_TWOLEVEL -- ^ the image is using two-level name space bindings
| MH_FORCE_FLAT -- ^ the executable is forcing all images to use flat name space bindings
| MH_NOMULTIDEFS -- ^ this umbrella guarantees no multiple defintions of symbols in its sub-images so the two-level namespace hints can always be used.
| MH_NOFIXPREBINDING -- ^ do not have dyld notify the prebinding agent about this executable
| MH_PREBINDABLE -- ^ the binary is not prebound but can have its prebinding redone. only used when MH_PREBOUND is not set.
| MH_ALLMODSBOUND -- ^ indicates that this binary binds to all two-level namespace modules of its dependent libraries. only used when MH_PREBINDABLE and MH_TWOLEVEL are both set.
| MH_SUBSECTIONS_VIA_SYMBOLS -- ^ safe to divide up the sections into sub-sections via symbols for dead code stripping
| MH_CANONICAL -- ^ the binary has been canonicalized via the unprebind operation
| MH_WEAK_DEFINES -- ^ the final linked image contains external weak symbols
| MH_BINDS_TO_WEAK -- ^ the final linked image uses weak symbols
| MH_ALLOW_STACK_EXECUTION -- ^ When this bit is set, all stacks in the task will be given stack execution privilege. Only used in MH_EXECUTE filetypes.
| MH_DEAD_STRIPPABLE_DYLIB
| MH_ROOT_SAFE -- ^ When this bit is set, the binary declares it is safe for use in processes with uid zero
| MH_SETUID_SAFE -- ^ When this bit is set, the binary declares it is safe for use in processes when issetugid() is true
| MH_NO_REEXPORTED_DYLIBS -- ^ When this bit is set on a dylib, the static linker does not need to examine dependent dylibs to see if any are re-exported
| MH_PIE -- ^ When this bit is set, the OS will load the main executable at a random address. Only used in MH_EXECUTE filetypes.
deriving (Ord, Show, Eq, Enum)
data LC_COMMAND
= LC_SEGMENT MachoSegment -- ^ segment of this file to be mapped
| LC_SYMTAB [MachoSymbol] B.ByteString -- ^ static link-edit symbol table and stab info
| LC_SYMSEG
| LC_THREAD [(Word32, [Word32])] -- ^ thread state information (list of (flavor, [long]) pairs)
| LC_UNIXTHREAD [(Word32, [Word32])] -- ^ unix thread state information (includes a stack) (list of (flavor, [long] pairs)
| LC_LOADFVMLIB
| LC_IDFVMLIB
| LC_IDENT
| LC_FVMFILE
| LC_PREPAGE
| LC_DYSYMTAB MachoDynamicSymbolTable -- ^ dynamic link-edit symbol table info
| LC_LOAD_DYLIB String Word32 Word32 Word32 -- ^ load a dynamically linked shared library (name, timestamp, current version, compatibility version)
| LC_ID_DYLIB String Word32 Word32 Word32 -- ^ dynamically linked shared lib ident (name, timestamp, current version, compatibility version)
| LC_LOAD_DYLINKER String -- ^ load a dynamic linker (name of dynamic linker)
| LC_ID_DYLINKER String -- ^ dynamic linker identification (name of dynamic linker)
| LC_PREBOUND_DYLIB String [Word8] -- ^ modules prebound for a dynamically linked shared library (name, list of module indices)
| LC_ROUTINES Word32 Word32 -- ^ image routines (virtual address of initialization routine, module index where it resides)
| LC_SUB_FRAMEWORK String -- ^ sub framework (name)
| LC_SUB_UMBRELLA String -- ^ sub umbrella (name)
| LC_SUB_CLIENT String -- ^ sub client (name)
| LC_SUB_LIBRARY String -- ^ sub library (name)
| LC_TWOLEVEL_HINTS [(Word32, Word32)] -- ^ two-level namespace lookup hints (list of (subimage index, symbol table index) pairs
| LC_PREBIND_CKSUM Word32 -- ^ prebind checksum (checksum)
| LC_LOAD_WEAK_DYLIB String Word32 Word32 Word32 -- ^ load a dynamically linked shared library that is allowed to be missing (symbols are weak imported) (name, timestamp, current version, compatibility version)
| LC_SEGMENT_64 MachoSegment -- ^ 64-bit segment of this file to mapped
| LC_ROUTINES_64 Word64 Word64 -- ^ 64-bit image routines (virtual address of initialization routine, module index where it resides)
| LC_UUID [Word8] -- ^ the uuid for an image or its corresponding dsym file (8 element list of bytes)
| LC_RPATH String -- ^ runpath additions (path)
| LC_CODE_SIGNATURE Word32 Word32 -- ^ local of code signature
| LC_SEGMENT_SPLIT_INFO Word32 Word32 -- ^ local of info to split segments
| LC_REEXPORT_DYLIB
| LC_LAZY_LOAD_DYLIB
| LC_ENCRYPTION_INFO Word32 B.ByteString
| LC_DYLD_INFO
| LC_DYLD_INFO_ONLY
deriving (Show, Eq)
data VM_PROT
= VM_PROT_READ -- ^ read permission
| VM_PROT_WRITE -- ^ write permission
| VM_PROT_EXECUTE -- ^ execute permission
deriving (Ord, Show, Eq, Enum)
data MachoSegment = MachoSegment
{ seg_segname :: String -- ^ segment name
, seg_vmaddr :: Word64 -- ^ virtual address where the segment is loaded
, seg_vmsize :: Word64 -- ^ size of segment at runtime
, seg_fileoff :: Word64 -- ^ file offset of the segment
, seg_filesize :: Word64 -- ^ size of segment in file
, seg_maxprot :: [VM_PROT] -- ^ maximum virtual memory protection
, seg_initprot :: [VM_PROT] -- ^ initial virtual memory protection
, seg_flags :: [SG_FLAG] -- ^ segment flags
, seg_sections :: [MachoSection] -- ^ sections owned by this segment
} deriving (Show, Eq)
data Macho = Macho
{ m_header :: MachoHeader -- ^ Header information.
, m_commands :: [LC_COMMAND] -- ^ List of load commands describing Mach-O contents.
} deriving (Show, Eq)
data SG_FLAG
= SG_HIGHVM -- ^ The file contents for this segment is for the high part of the VM space, the low part is zero filled (for stacks in core files).
| SG_NORELOC -- ^ This segment has nothing that was relocated in it and nothing relocated to it, that is it may be safely replaced without relocation.
deriving (Show, Eq)
data MachoSection = MachoSection
{ sec_sectname :: String -- ^ name of section
, sec_segname :: String -- ^ name of segment that should own this section
, sec_addr :: Word64 -- ^ virtual memoy address for section
, sec_size :: Word64 -- ^ size of section
, sec_align :: Int -- ^ alignment required by section (literal form, not power of two, e.g. 8 not 3)
, sec_relocs :: [Relocation] -- ^ relocations for this section
, sec_type :: S_TYPE -- ^ type of section
, sec_user_attrs :: [S_USER_ATTR] -- ^ user attributes of section
, sec_sys_attrs :: [S_SYS_ATTR] -- ^ system attibutes of section
} deriving (Show, Eq)
data S_TYPE
= S_REGULAR -- ^ regular section
| S_ZEROFILL -- ^ zero fill on demand section
| S_CSTRING_LITERALS -- ^ section with only literal C strings
| S_4BYTE_LITERALS -- ^ section with only 4 byte literals
| S_8BYTE_LITERALS -- ^ section with only 8 byte literals
| S_LITERAL_POINTERS -- ^ section with only pointers to literals
| S_NON_LAZY_SYMBOL_POINTERS -- ^ section with only non-lazy symbol pointers
| S_LAZY_SYMBOL_POINTERS -- ^ section with only lazy symbol pointers
| S_SYMBOL_STUBS -- ^ section with only symbol stubs, bte size of stub in the reserved2 field
| S_MOD_INIT_FUNC_POINTERS -- ^ section with only function pointers for initialization
| S_MOD_TERM_FUNC_POINTERS -- ^ section with only function pointers for termination
| S_COALESCED -- ^ section contains symbols that are to be coalesced
| S_GB_ZEROFILL -- ^ zero fill on demand section (that can be larger than 4 gigabytes)
| S_INTERPOSING -- ^ section with only pairs of function pointers for interposing
| S_16BYTE_LITERALS -- ^ section with only 16 byte literals
| S_DTRACE_DOF -- ^ section contains DTrace Object Format
| S_LAZY_DYLIB_SYMBOL_POINTERS -- ^ section with only lazy symbol pointers to lazy loaded dylibs
deriving (Show, Eq)
sectionType flags = case flags .&. 0x000000ff of
0x00 -> S_REGULAR
0x01 -> S_ZEROFILL
0x02 -> S_CSTRING_LITERALS
0x03 -> S_4BYTE_LITERALS
0x04 -> S_8BYTE_LITERALS
0x05 -> S_LITERAL_POINTERS
0x06 -> S_NON_LAZY_SYMBOL_POINTERS
0x07 -> S_LAZY_SYMBOL_POINTERS
0x08 -> S_SYMBOL_STUBS
0x09 -> S_MOD_INIT_FUNC_POINTERS
0x0a -> S_MOD_TERM_FUNC_POINTERS
0x0b -> S_COALESCED
0x0c -> S_GB_ZEROFILL
0x0d -> S_INTERPOSING
0x0e -> S_16BYTE_LITERALS
0x0f -> S_DTRACE_DOF
0x10 -> S_LAZY_DYLIB_SYMBOL_POINTERS
data S_USER_ATTR
= S_ATTR_PURE_INSTRUCTIONS -- ^ section contains only true machine instructions
| S_ATTR_NO_TOC -- ^ setion contains coalesced symbols that are not to be in a ranlib table of contents
| S_ATTR_STRIP_STATIC_SYMS -- ^ ok to strip static symbols in this section in files with the MH_DYLDLINK flag
| S_ATTR_NO_DEAD_STRIP -- ^ no dead stripping
| S_ATTR_LIVE_SUPPORT -- ^ blocks are live if they reference live blocks
| S_ATTR_SELF_MODIFYING_CODE -- ^ used with i386 code stubs written on by dyld
| S_ATTR_DEBUG -- ^ a debug section
deriving (Show, Eq)
data S_SYS_ATTR
= S_ATTR_SOME_INSTRUCTIONS -- ^ section contains soem machine instructions
| S_ATTR_EXT_RELOC -- ^ section has external relocation entries
| S_ATTR_LOC_RELOC -- ^ section has local relocation entries
deriving (Show, Eq)
data N_TYPE
= N_UNDF -- ^ undefined symbol, n_sect is 0
| N_ABS -- ^ absolute symbol, does not need relocation, n_sect is 0
| N_SECT -- ^ symbol is defined in section n_sect
| N_PBUD -- ^ symbol is undefined and the image is using a prebound value for the symbol, n_sect is 0
| N_INDR -- ^ symbol is defined to be the same as another symbol. n_value is a string table offset indicating the name of that symbol
| N_GSYM -- ^ stab global symbol: name,,0,type,0
| N_FNAME -- ^ stab procedure name (f77 kludge): name,,0,0,0
| N_FUN -- ^ stab procedure: name,,n_sect,linenumber,address
| N_STSYM -- ^ stab static symbol: name,,n_sect,type,address
| N_LCSYM -- ^ stab .lcomm symbol: name,,n_sect,type,address
| N_BNSYM -- ^ stab begin nsect sym: 0,,n_sect,0,address
| N_OPT -- ^ stab emitted with gcc2_compiled and in gcc source
| N_RSYM -- ^ stab register sym: name,,0,type,register
| N_SLINE -- ^ stab src line: 0,,n_sect,linenumber,address
| N_ENSYM -- ^ stab end nsect sym: 0,,n_sect,0,address
| N_SSYM -- ^ stab structure elt: name,,0,type,struct_offset
| N_SO -- ^ stab source file name: name,,n_sect,0,address
| N_OSO -- ^ stab object file name: name,,0,0,st_mtime
| N_LSYM -- ^ stab local sym: name,,0,type,offset
| N_BINCL -- ^ stab include file beginning: name,,0,0,sum
| N_SOL -- ^ stab #included file name: name,,n_sect,0,address
| N_PARAMS -- ^ stab compiler parameters: name,,0,0,0
| N_VERSION -- ^ stab compiler version: name,,0,0,0
| N_OLEVEL -- ^ stab compiler -O level: name,,0,0,0
| N_PSYM -- ^ stab parameter: name,,0,type,offset
| N_EINCL -- ^ stab include file end: name,,0,0,0
| N_ENTRY -- ^ stab alternate entry: name,,n_sect,linenumber,address
| N_LBRAC -- ^ stab left bracket: 0,,0,nesting level,address
| N_EXCL -- ^ stab deleted include file: name,,0,0,sum
| N_RBRAC -- ^ stab right bracket: 0,,0,nesting level,address
| N_BCOMM -- ^ stab begin common: name,,0,0,0
| N_ECOMM -- ^ stab end common: name,,n_sect,0,0
| N_ECOML -- ^ stab end common (local name): 0,,n_sect,0,address
| N_LENG -- ^ stab second stab entry with length information
| N_PC -- ^ stab global pascal symbol: name,,0,subtype,line
deriving (Show, Eq)
n_type 0x00 = N_UNDF
n_type 0x01 = N_ABS
n_type 0x07 = N_SECT
n_type 0x06 = N_PBUD
n_type 0x05 = N_INDR
n_type 0x20 = N_GSYM
n_type 0x22 = N_FNAME
n_type 0x24 = N_FUN
n_type 0x26 = N_STSYM
n_type 0x28 = N_LCSYM
n_type 0x2e = N_BNSYM
n_type 0x3c = N_OPT
n_type 0x40 = N_RSYM
n_type 0x44 = N_SLINE
n_type 0x4e = N_ENSYM
n_type 0x60 = N_SSYM
n_type 0x64 = N_SO
n_type 0x66 = N_OSO
n_type 0x80 = N_LSYM
n_type 0x82 = N_BINCL
n_type 0x84 = N_SOL
n_type 0x86 = N_PARAMS
n_type 0x88 = N_VERSION
n_type 0x8A = N_OLEVEL
n_type 0xa0 = N_PSYM
n_type 0xa2 = N_EINCL
n_type 0xa4 = N_ENTRY
n_type 0xc0 = N_LBRAC
n_type 0xc2 = N_EXCL
n_type 0xe0 = N_RBRAC
n_type 0xe2 = N_BCOMM
n_type 0xe4 = N_ECOMM
n_type 0xe8 = N_ECOML
n_type 0xfe = N_LENG
n_type 0x30 = N_PC
data REFERENCE_FLAG
= REFERENCE_FLAG_UNDEFINED_NON_LAZY -- ^ reference to an external non-lazy symbol
| REFERENCE_FLAG_UNDEFINED_LAZY -- ^ reference to an external lazy symbol
| REFERENCE_FLAG_DEFINED -- ^ symbol is defined in this module
| REFERENCE_FLAG_PRIVATE_DEFINED -- ^ symbol is defined in this module and visible only to modules within this shared library
| REFERENCE_FLAG_PRIVATE_UNDEFINED_NON_LAZY -- ^ reference to an external non-lazy symbol and visible only to modules within this shared library
| REFERENCE_FLAG_PRIVATE_UNDEFINED_LAZY -- ^ reference to an external lazy symbol and visible only to modules within this shared library
| REFERENCED_DYNAMICALLY -- ^ set for all symbols referenced by dynamic loader APIs
| N_WEAK_REF -- ^ indicates the symbol is a weak reference, set to 0 if definition cannot be found
| N_WEAK_DEF -- ^ indicates the symbol is a weak definition, will be overridden by a strong definition at link-time
| LIBRARY_ORDINAL Word16 -- ^ for two-level mach-o objects, specifies the index of the library in which this symbol is defined. zero specifies current image.
deriving (Show, Eq)
reference_flag_lo16 0 = REFERENCE_FLAG_UNDEFINED_NON_LAZY
reference_flag_lo16 1 = REFERENCE_FLAG_UNDEFINED_LAZY
reference_flag_lo16 2 = REFERENCE_FLAG_DEFINED
reference_flag_lo16 3 = REFERENCE_FLAG_PRIVATE_DEFINED
reference_flag_lo16 4 = REFERENCE_FLAG_PRIVATE_UNDEFINED_NON_LAZY
reference_flag_lo16 5 = REFERENCE_FLAG_PRIVATE_UNDEFINED_LAZY
reference_flag_hi16 word = reference_flag_hi16_ 16 word
where reference_flag_hi16_ 0 word = []
reference_flag_hi16_ 1 word | testBit word 0 = REFERENCED_DYNAMICALLY : reference_flag_hi16 0
reference_flag_hi16_ 3 word | testBit word 2 = N_WEAK_REF : reference_flag_hi16 1
reference_flag_hi16_ 4 word | testBit word 3 = N_WEAK_DEF : reference_flag_hi16 1
reference_flag_hi16_ n word = reference_flag_hi16_ (n-1) word
data MachoSymbol = MachoSymbol
{ sym_name :: String -- ^ symbol name
, sym_type :: N_TYPE -- ^ symbol type
, sym_pext :: Bool -- ^ true if limited global scope
, sym_ext :: Bool -- ^ true if external symbol
, sym_sect :: Word8 -- ^ section index where the symbol can be found
, sym_flags :: Either Word16 [REFERENCE_FLAG] -- ^ for stab entries, Left Word16 is the uninterpreted flags field, otherwise Right [REFERENCE_FLAG] are the symbol flags
, sym_value :: Word64 -- ^ symbol value, 32-bit symbol values are promoted to 64-bit for simpliciy
} deriving (Show, Eq)
data DylibModule = DylibModule
{ dylib_module_name_offset :: Word32 -- ^ module name string table offset
, dylib_ext_def_sym :: (Word32, Word32) -- ^ (initial, count) pair of symbol table indices for externally defined symbols
, dylib_ref_sym :: (Word32, Word32) -- ^ (initial, count) pair of symbol table indices for referenced symbols
, dylib_local_sym :: (Word32, Word32) -- ^ (initial, count) pair of symbol table indices for local symbols
, dylib_ext_rel :: (Word32, Word32) -- ^ (initial, count) pair of symbol table indices for externally referenced symbols
, dylib_init :: (Word32, Word32) -- ^ (initial, count) pair of symbol table indices for the index of the module init section and the number of init pointers
, dylib_term :: (Word32, Word32) -- ^ (initial, count) pair of symbol table indices for the index of the module term section and the number of term pointers
, dylib_objc_module_info_addr :: Word64 -- ^ statically linked address of the start of the data for this module in the __module_info section in the __OBJC segment
, dylib_objc_module_info_size :: Word32 -- ^ number of bytes of data for this module that are used in the __module_info section in the __OBJC segment
} deriving (Show, Eq)
-- | Platform-specific relocation types.
data R_TYPE
= GENERIC_RELOC_VANILLA
| GENERIC_RELOC_PAIR
| GENERIC_RELOC_SECTDIFF
| GENERIC_RELOC_LOCAL_SECTDIFF
| GENERIC_RELOC_PB_LA_PTR
| ARM_RELOC_VANILLA
| ARM_RELOC_PAIR
| ARM_RELOC_SECTDIFF
| ARM_RELOC_LOCAL_SECTDIFF
| ARM_RELOC_PB_LA_PTR
| ARM_RELOC_BR24
| ARM_THUMB_RELOC_BR22
| X86_64_RELOC_BRANCH
| X86_64_RELOC_GOT_LOAD
| X86_64_RELOC_GOT
| X86_64_RELOC_SIGNED
| X86_64_RELOC_UNSIGNED
| X86_64_RELOC_SUBTRACTOR
| X86_64_RELOC_SIGNED_1
| X86_64_RELOC_SIGNED_2
| X86_64_RELOC_SIGNED_4
| PPC_RELOC_VANILLA
| PPC_RELOC_PAIR
| PPC_RELOC_BR14
| PPC_RELOC_BR24
| PPC_RELOC_HI16
| PPC_RELOC_LO16
| PPC_RELOC_HA16
| PPC_RELOC_LO14
| PPC_RELOC_SECTDIFF
| PPC_RELOC_LOCAL_SECTDIFF
| PPC_RELOC_PB_LA_PTR
| PPC_RELOC_HI16_SECTDIFF
| PPC_RELOC_LO16_SECTDIFF
| PPC_RELOC_HA16_SECTDIFF
| PPC_RELOC_JBSR
| PPC_RELOC_LO14_SECTDIFF
deriving (Ord, Show, Eq, Enum)
r_type 0 CPU_TYPE_X86 = GENERIC_RELOC_VANILLA
r_type 1 CPU_TYPE_X86 = GENERIC_RELOC_PAIR
r_type 2 CPU_TYPE_X86 = GENERIC_RELOC_SECTDIFF
r_type 3 CPU_TYPE_X86 = GENERIC_RELOC_LOCAL_SECTDIFF
r_type 4 CPU_TYPE_X86 = GENERIC_RELOC_PB_LA_PTR
r_type 0 CPU_TYPE_ARM = ARM_RELOC_VANILLA
r_type 1 CPU_TYPE_ARM = ARM_RELOC_PAIR
r_type 2 CPU_TYPE_ARM = ARM_RELOC_SECTDIFF
r_type 3 CPU_TYPE_ARM = ARM_RELOC_LOCAL_SECTDIFF
r_type 4 CPU_TYPE_ARM = ARM_RELOC_PB_LA_PTR
r_type 5 CPU_TYPE_ARM = ARM_RELOC_BR24
r_type 6 CPU_TYPE_ARM = ARM_THUMB_RELOC_BR22
r_type 0 CPU_TYPE_X86_64 = X86_64_RELOC_UNSIGNED
r_type 1 CPU_TYPE_X86_64 = X86_64_RELOC_SIGNED
r_type 2 CPU_TYPE_X86_64 = X86_64_RELOC_BRANCH
r_type 3 CPU_TYPE_X86_64 = X86_64_RELOC_GOT_LOAD
r_type 4 CPU_TYPE_X86_64 = X86_64_RELOC_GOT
r_type 5 CPU_TYPE_X86_64 = X86_64_RELOC_SUBTRACTOR
r_type 6 CPU_TYPE_X86_64 = X86_64_RELOC_SIGNED_1
r_type 7 CPU_TYPE_X86_64 = X86_64_RELOC_SIGNED_2
r_type 8 CPU_TYPE_X86_64 = X86_64_RELOC_SIGNED_4
r_type 0 CPU_TYPE_POWERPC = PPC_RELOC_VANILLA
r_type 1 CPU_TYPE_POWERPC = PPC_RELOC_PAIR
r_type 2 CPU_TYPE_POWERPC = PPC_RELOC_BR14
r_type 3 CPU_TYPE_POWERPC = PPC_RELOC_BR24
r_type 4 CPU_TYPE_POWERPC = PPC_RELOC_HI16
r_type 5 CPU_TYPE_POWERPC = PPC_RELOC_LO16
r_type 6 CPU_TYPE_POWERPC = PPC_RELOC_HA16
r_type 7 CPU_TYPE_POWERPC = PPC_RELOC_LO14
r_type 8 CPU_TYPE_POWERPC = PPC_RELOC_SECTDIFF
r_type 9 CPU_TYPE_POWERPC = PPC_RELOC_PB_LA_PTR
r_type 10 CPU_TYPE_POWERPC = PPC_RELOC_HI16_SECTDIFF
r_type 11 CPU_TYPE_POWERPC = PPC_RELOC_LO16_SECTDIFF
r_type 12 CPU_TYPE_POWERPC = PPC_RELOC_HA16_SECTDIFF
r_type 13 CPU_TYPE_POWERPC = PPC_RELOC_JBSR
r_type 14 CPU_TYPE_POWERPC = PPC_RELOC_LO14_SECTDIFF
r_type 15 CPU_TYPE_POWERPC = PPC_RELOC_LOCAL_SECTDIFF
r_type 0 CPU_TYPE_POWERPC64 = PPC_RELOC_VANILLA
r_type 1 CPU_TYPE_POWERPC64 = PPC_RELOC_PAIR
r_type 2 CPU_TYPE_POWERPC64 = PPC_RELOC_BR14
r_type 3 CPU_TYPE_POWERPC64 = PPC_RELOC_BR24
r_type 4 CPU_TYPE_POWERPC64 = PPC_RELOC_HI16
r_type 5 CPU_TYPE_POWERPC64 = PPC_RELOC_LO16
r_type 6 CPU_TYPE_POWERPC64 = PPC_RELOC_HA16
r_type 7 CPU_TYPE_POWERPC64 = PPC_RELOC_LO14
r_type 8 CPU_TYPE_POWERPC64 = PPC_RELOC_SECTDIFF
r_type 9 CPU_TYPE_POWERPC64 = PPC_RELOC_PB_LA_PTR
r_type 10 CPU_TYPE_POWERPC64 = PPC_RELOC_HI16_SECTDIFF
r_type 11 CPU_TYPE_POWERPC64 = PPC_RELOC_LO16_SECTDIFF
r_type 12 CPU_TYPE_POWERPC64 = PPC_RELOC_HA16_SECTDIFF
r_type 13 CPU_TYPE_POWERPC64 = PPC_RELOC_JBSR
r_type 14 CPU_TYPE_POWERPC64 = PPC_RELOC_LO14_SECTDIFF
r_type 15 CPU_TYPE_POWERPC64 = PPC_RELOC_LOCAL_SECTDIFF
data Relocation
= RelocationInfo
{ ri_address :: Int32 -- ^ offset from start of section to place to be relocated
, ri_symbolnum :: Word32 -- ^ index into symbol or section table
, ri_pcrel :: Bool -- ^ indicates if the item to be relocated is part of an instruction containing PC-relative addressing
, ri_length :: Word32 -- ^ length of item containing address to be relocated (literal form (4) instead of power of two (2))
, ri_extern :: Bool -- ^ indicates whether symbolnum is an index into the symbol table (True) or section table (False)
, ri_type :: R_TYPE -- ^ relocation type
}
| ScatteredRelocationInfo
{ rs_pcrel :: Bool -- ^ indicates if the item to be relocated is part of an instruction containing PC-relative addressing
, rs_length :: Word32 -- ^ length of item containing address to be relocated (literal form (4) instead of power of two (2))
, rs_type :: R_TYPE -- ^ relocation type
, rs_address :: Word32 -- ^ offset from start of section to place to be relocated
, rs_value :: Int32 -- ^ address of the relocatable expression for the item in the file that needs to be updated if the address is changed
}
deriving (Show, Eq)
data MachoDynamicSymbolTable = MachoDynamicSymbolTable
{ localSyms :: (Word32, Word32) -- ^ symbol table index and count for local symbols
, extDefSyms :: (Word32, Word32) -- ^ symbol table index and count for externally defined symbols
, undefSyms :: (Word32, Word32) -- ^ symbol table index and count for undefined symbols
, tocEntries :: [(Word32, Word32)] -- ^ list of symbol index and module index pairs
, modules :: [DylibModule] -- ^ modules
, extRefSyms :: [Word32] -- ^ list of external reference symbol indices
, indirectSyms :: [Word32] -- ^ list of indirect symbol indices
, extRels :: [Relocation] -- ^ external locations
, locRels :: [Relocation] -- ^ local relocations
} deriving (Show, Eq)
| copumpkin/macho | src/Data/Macho/Types.hs | bsd-3-clause | 33,246 | 0 | 12 | 8,379 | 4,894 | 2,953 | 1,941 | 619 | 17 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcMonoType]{Typechecking user-specified @MonoTypes@}
-}
{-# LANGUAGE CPP, TupleSections, MultiWayIf #-}
module TcHsType (
-- Type signatures
kcHsSigType, tcClassSigType,
tcHsSigType, tcHsSigWcType,
funsSigCtxt, addSigCtxt,
tcHsClsInstType,
tcHsDeriv, tcHsVectInst,
tcHsTypeApp,
UserTypeCtxt(..),
tcImplicitTKBndrs, tcImplicitTKBndrsType, tcExplicitTKBndrs,
-- Type checking type and class decls
kcLookupKind, kcTyClTyVars, tcTyClTyVars,
tcHsConArgType, tcDataKindSig,
-- Kind-checking types
-- No kind generalisation, no checkValidType
tcWildCardBinders,
kcHsTyVarBndrs,
tcHsLiftedType, tcHsOpenType,
tcHsLiftedTypeNC, tcHsOpenTypeNC,
tcLHsType, tcCheckLHsType,
tcHsContext, tcLHsPredType, tcInferApps, tcInferArgs,
solveEqualities, -- useful re-export
kindGeneralize,
-- Sort-checking kinds
tcLHsKind,
-- Pattern type signatures
tcHsPatSigType, tcPatSig, funAppCtxt
) where
#include "HsVersions.h"
import HsSyn
import TcRnMonad
import TcEvidence
import TcEnv
import TcMType
import TcValidity
import TcUnify
import TcIface
import TcSimplify ( solveEqualities )
import TcType
import Inst ( tcInstBinders, tcInstBindersX )
import Type
import Kind
import RdrName( lookupLocalRdrOcc )
import Var
import VarSet
import TyCon
import ConLike
import DataCon
import TysPrim ( tYPE )
import Class
import Name
import NameEnv
import NameSet
import VarEnv
import TysWiredIn
import BasicTypes
import SrcLoc
import Constants ( mAX_CTUPLE_SIZE )
import ErrUtils( MsgDoc )
import Unique
import Util
import UniqSupply
import Outputable
import FastString
import PrelNames hiding ( wildCardName )
import qualified GHC.LanguageExtensions as LangExt
import Maybes
import Data.List ( partition )
import Control.Monad
{-
----------------------------
General notes
----------------------------
Unlike with expressions, type-checking types both does some checking and
desugars at the same time. This is necessary because we often want to perform
equality checks on the types right away, and it would be incredibly painful
to do this on un-desugared types. Luckily, desugared types are close enough
to HsTypes to make the error messages sane.
During type-checking, we perform as little validity checking as possible.
This is because some type-checking is done in a mutually-recursive knot, and
if we look too closely at the tycons, we'll loop. This is why we always must
use mkNakedTyConApp and mkNakedAppTys, etc., which never look at a tycon.
The mkNamed... functions don't uphold Type invariants, but zonkTcTypeToType
will repair this for us. Note that zonkTcType *is* safe within a knot, and
can be done repeatedly with no ill effect: it just squeezes out metavariables.
Generally, after type-checking, you will want to do validity checking, say
with TcValidity.checkValidType.
Validity checking
~~~~~~~~~~~~~~~~~
Some of the validity check could in principle be done by the kind checker,
but not all:
- During desugaring, we normalise by expanding type synonyms. Only
after this step can we check things like type-synonym saturation
e.g. type T k = k Int
type S a = a
Then (T S) is ok, because T is saturated; (T S) expands to (S Int);
and then S is saturated. This is a GHC extension.
- Similarly, also a GHC extension, we look through synonyms before complaining
about the form of a class or instance declaration
- Ambiguity checks involve functional dependencies, and it's easier to wait
until knots have been resolved before poking into them
Also, in a mutually recursive group of types, we can't look at the TyCon until we've
finished building the loop. So to keep things simple, we postpone most validity
checking until step (3).
Knot tying
~~~~~~~~~~
During step (1) we might fault in a TyCon defined in another module, and it might
(via a loop) refer back to a TyCon defined in this module. So when we tie a big
knot around type declarations with ARecThing, so that the fault-in code can get
the TyCon being defined.
%************************************************************************
%* *
Check types AND do validity checking
* *
************************************************************************
-}
funsSigCtxt :: [Located Name] -> UserTypeCtxt
-- Returns FunSigCtxt, with no redundant-context-reporting,
-- form a list of located names
funsSigCtxt (L _ name1 : _) = FunSigCtxt name1 False
funsSigCtxt [] = panic "funSigCtxt"
addSigCtxt :: UserTypeCtxt -> LHsType Name -> TcM a -> TcM a
addSigCtxt ctxt sig_ty thing_inside
= setSrcSpan (getLoc sig_ty) $
addErrCtxt (pprSigCtxt ctxt empty (ppr sig_ty)) $
thing_inside
tcHsSigWcType :: UserTypeCtxt -> LHsSigWcType Name -> TcM Type
-- This one is used when we have a LHsSigWcType, but in
-- a place where wildards aren't allowed. The renamer has
-- alrady checked this, so we can simply ignore it.
tcHsSigWcType ctxt sig_ty = tcHsSigType ctxt (dropWildCards sig_ty)
kcHsSigType :: [Located Name] -> LHsSigType Name -> TcM ()
kcHsSigType names (HsIB { hsib_body = hs_ty
, hsib_vars = sig_vars })
= addSigCtxt (funsSigCtxt names) hs_ty $
discardResult $
tcImplicitTKBndrsType sig_vars $
tc_lhs_type typeLevelMode hs_ty liftedTypeKind
tcClassSigType :: [Located Name] -> LHsSigType Name -> TcM Type
-- Does not do validity checking; this must be done outside
-- the recursive class declaration "knot"
tcClassSigType names sig_ty
= addSigCtxt (funsSigCtxt names) (hsSigType sig_ty) $
do { ty <- tc_hs_sig_type sig_ty liftedTypeKind
; kindGeneralizeType ty }
tcHsSigType :: UserTypeCtxt -> LHsSigType Name -> TcM Type
-- Does validity checking
tcHsSigType ctxt sig_ty
= addSigCtxt ctxt (hsSigType sig_ty) $
do { kind <- case expectedKindInCtxt ctxt of
AnythingKind -> newMetaKindVar
TheKind k -> return k
OpenKind -> do { rr <- newFlexiTyVarTy runtimeRepTy
; return $ tYPE rr }
-- The kind is checked by checkValidType, and isn't necessarily
-- of kind * in a Template Haskell quote eg [t| Maybe |]
; ty <- tc_hs_sig_type sig_ty kind
-- Generalise here: see Note [Kind generalisation]
; do_kind_gen <- decideKindGeneralisationPlan ty
; ty <- if do_kind_gen
then kindGeneralizeType ty
else zonkTcType ty
; checkValidType ctxt ty
; return ty }
tc_hs_sig_type :: LHsSigType Name -> Kind -> TcM Type
-- Does not do validity checking or zonking
tc_hs_sig_type (HsIB { hsib_body = hs_ty
, hsib_vars = sig_vars }) kind
= do { (tkvs, ty) <- solveEqualities $
tcImplicitTKBndrsType sig_vars $
tc_lhs_type typeLevelMode hs_ty kind
; return (mkSpecForAllTys tkvs ty) }
-----------------
tcHsDeriv :: LHsSigType Name -> TcM ([TyVar], Class, [Type], Kind)
-- Like tcHsSigType, but for the ...deriving( C t1 ty2 ) clause
-- Returns the C, [ty1, ty2, and the kind of C's *next* argument
-- E.g. class C (a::*) (b::k->k)
-- data T a b = ... deriving( C Int )
-- returns ([k], C, [k, Int], k->k)
-- Also checks that (C ty1 ty2 arg) :: Constraint
-- if arg has a suitable kind
tcHsDeriv hs_ty
= do { arg_kind <- newMetaKindVar
-- always safe to kind-generalize, because there
-- can be no covars in an outer scope
; ty <- checkNoErrs $
-- avoid redundant error report with "illegal deriving", below
tc_hs_sig_type hs_ty (mkFunTy arg_kind constraintKind)
; ty <- kindGeneralizeType ty -- also zonks
; arg_kind <- zonkTcType arg_kind
; let (tvs, pred) = splitForAllTys ty
; case getClassPredTys_maybe pred of
Just (cls, tys) -> return (tvs, cls, tys, arg_kind)
Nothing -> failWithTc (text "Illegal deriving item" <+> quotes (ppr hs_ty)) }
tcHsClsInstType :: UserTypeCtxt -- InstDeclCtxt or SpecInstCtxt
-> LHsSigType Name
-> TcM ([TyVar], ThetaType, Class, [Type])
-- Like tcHsSigType, but for a class instance declaration
tcHsClsInstType user_ctxt hs_inst_ty
= setSrcSpan (getLoc (hsSigType hs_inst_ty)) $
do { inst_ty <- tc_hs_sig_type hs_inst_ty constraintKind
; inst_ty <- kindGeneralizeType inst_ty
; checkValidInstance user_ctxt hs_inst_ty inst_ty }
-- Used for 'VECTORISE [SCALAR] instance' declarations
tcHsVectInst :: LHsSigType Name -> TcM (Class, [Type])
tcHsVectInst ty
| Just (L _ cls_name, tys) <- hsTyGetAppHead_maybe (hsSigType ty)
-- Ignoring the binders looks pretty dodgy to me
= do { (cls, cls_kind) <- tcClass cls_name
; (applied_class, _res_kind)
<- tcInferApps typeLevelMode cls_name (mkClassPred cls []) cls_kind tys
; case tcSplitTyConApp_maybe applied_class of
Just (_tc, args) -> ASSERT( _tc == classTyCon cls )
return (cls, args)
_ -> failWithTc (text "Too many arguments passed to" <+> ppr cls_name) }
| otherwise
= failWithTc $ text "Malformed instance type"
----------------------------------------------
-- | Type-check a visible type application
tcHsTypeApp :: LHsWcType Name -> Kind -> TcM Type
tcHsTypeApp wc_ty kind
| HsWC { hswc_wcs = sig_wcs, hswc_ctx = extra, hswc_body = hs_ty } <- wc_ty
= ASSERT( isNothing extra ) -- handled in RnTypes.rnExtraConstraintWildCard
tcWildCardBinders sig_wcs $ \ _ ->
do { ty <- tcCheckLHsType hs_ty kind
; ty <- zonkTcType ty
; checkValidType TypeAppCtxt ty
; return ty }
-- NB: we don't call emitWildcardHoleConstraints here, because
-- we want any holes in visible type applications to be used
-- without fuss. No errors, warnings, extensions, etc.
{-
These functions are used during knot-tying in
type and class declarations, when we have to
separate kind-checking, desugaring, and validity checking
************************************************************************
* *
The main kind checker: no validity checks here
%* *
%************************************************************************
First a couple of simple wrappers for kcHsType
-}
tcHsConArgType :: NewOrData -> LHsType Name -> TcM Type
-- Permit a bang, but discard it
tcHsConArgType NewType bty = tcHsLiftedType (getBangType bty)
-- Newtypes can't have bangs, but we don't check that
-- until checkValidDataCon, so do not want to crash here
tcHsConArgType DataType bty = tcHsOpenType (getBangType bty)
-- Can't allow an unlifted type for newtypes, because we're effectively
-- going to remove the constructor while coercing it to a lifted type.
-- And newtypes can't be bang'd
---------------------------
tcHsOpenType, tcHsLiftedType,
tcHsOpenTypeNC, tcHsLiftedTypeNC :: LHsType Name -> TcM TcType
-- Used for type signatures
-- Do not do validity checking
tcHsOpenType ty = addTypeCtxt ty $ tcHsOpenTypeNC ty
tcHsLiftedType ty = addTypeCtxt ty $ tcHsLiftedTypeNC ty
tcHsOpenTypeNC ty = do { ek <- ekOpen
; tc_lhs_type typeLevelMode ty ek }
tcHsLiftedTypeNC ty = tc_lhs_type typeLevelMode ty liftedTypeKind
-- Like tcHsType, but takes an expected kind
tcCheckLHsType :: LHsType Name -> Kind -> TcM Type
tcCheckLHsType hs_ty exp_kind
= addTypeCtxt hs_ty $
tc_lhs_type typeLevelMode hs_ty exp_kind
tcLHsType :: LHsType Name -> TcM (TcType, TcKind)
-- Called from outside: set the context
tcLHsType ty = addTypeCtxt ty (tc_infer_lhs_type typeLevelMode ty)
---------------------------
-- | Should we generalise the kind of this type signature?
-- We *should* generalise if the type is mentions no scoped type variables
-- or if NoMonoLocalBinds is set. Otherwise, nope.
decideKindGeneralisationPlan :: Type -> TcM Bool
decideKindGeneralisationPlan ty
= do { mono_locals <- xoptM LangExt.MonoLocalBinds
; in_scope <- getInLocalScope
; let fvs = tyCoVarsOfTypeList ty
should_gen = not mono_locals || all (not . in_scope . getName) fvs
; traceTc "decideKindGeneralisationPlan"
(vcat [ text "type:" <+> ppr ty
, text "ftvs:" <+> ppr fvs
, text "should gen?" <+> ppr should_gen ])
; return should_gen }
{-
************************************************************************
* *
Type-checking modes
* *
************************************************************************
The kind-checker is parameterised by a TcTyMode, which contains some
information about where we're checking a type.
The renamer issues errors about what it can. All errors issued here must
concern things that the renamer can't handle.
-}
-- | Info about the context in which we're checking a type. Currently,
-- differentiates only between types and kinds, but this will likely
-- grow, at least to include the distinction between patterns and
-- not-patterns.
newtype TcTyMode
= TcTyMode { mode_level :: TypeOrKind -- True <=> type, False <=> kind
}
typeLevelMode :: TcTyMode
typeLevelMode = TcTyMode { mode_level = TypeLevel }
kindLevelMode :: TcTyMode
kindLevelMode = TcTyMode { mode_level = KindLevel }
-- switch to kind level
kindLevel :: TcTyMode -> TcTyMode
kindLevel mode = mode { mode_level = KindLevel }
instance Outputable TcTyMode where
ppr = ppr . mode_level
{-
Note [Bidirectional type checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In expressions, whenever we see a polymorphic identifier, say `id`, we are
free to instantiate it with metavariables, knowing that we can always
re-generalize with type-lambdas when necessary. For example:
rank2 :: (forall a. a -> a) -> ()
x = rank2 id
When checking the body of `x`, we can instantiate `id` with a metavariable.
Then, when we're checking the application of `rank2`, we notice that we really
need a polymorphic `id`, and then re-generalize over the unconstrained
metavariable.
In types, however, we're not so lucky, because *we cannot re-generalize*!
There is no lambda. So, we must be careful only to instantiate at the last
possible moment, when we're sure we're never going to want the lost polymorphism
again. This is done in calls to tcInstBinders and tcInstBindersX.
To implement this behavior, we use bidirectional type checking, where we
explicitly think about whether we know the kind of the type we're checking
or not. Note that there is a difference between not knowing a kind and
knowing a metavariable kind: the metavariables are TauTvs, and cannot become
forall-quantified kinds. Previously (before dependent types), there were
no higher-rank kinds, and so we could instantiate early and be sure that
no types would have polymorphic kinds, and so we could always assume that
the kind of a type was a fresh metavariable. Not so anymore, thus the
need for two algorithms.
For HsType forms that can never be kind-polymorphic, we implement only the
"down" direction, where we safely assume a metavariable kind. For HsType forms
that *can* be kind-polymorphic, we implement just the "up" (functions with
"infer" in their name) version, as we gain nothing by also implementing the
"down" version.
Note [Future-proofing the type checker]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As discussed in Note [Bidirectional type checking], each HsType form is
handled in *either* tc_infer_hs_type *or* tc_hs_type. These functions
are mutually recursive, so that either one can work for any type former.
But, we want to make sure that our pattern-matches are complete. So,
we have a bunch of repetitive code just so that we get warnings if we're
missing any patterns.
-}
-- | Check and desugar a type, returning the core type and its
-- possibly-polymorphic kind. Much like 'tcInferRho' at the expression
-- level.
tc_infer_lhs_type :: TcTyMode -> LHsType Name -> TcM (TcType, TcKind)
tc_infer_lhs_type mode (L span ty)
= setSrcSpan span $
do { traceTc "tc_infer_lhs_type:" (ppr ty)
; tc_infer_hs_type mode ty }
-- | Infer the kind of a type and desugar. This is the "up" type-checker,
-- as described in Note [Bidirectional type checking]
tc_infer_hs_type :: TcTyMode -> HsType Name -> TcM (TcType, TcKind)
tc_infer_hs_type mode (HsTyVar (L _ tv)) = tcTyVar mode tv
tc_infer_hs_type mode (HsAppTy ty1 ty2)
= do { let (fun_ty, arg_tys) = splitHsAppTys ty1 [ty2]
; (fun_ty', fun_kind) <- tc_infer_lhs_type mode fun_ty
; fun_kind' <- zonkTcType fun_kind
; tcInferApps mode fun_ty fun_ty' fun_kind' arg_tys }
tc_infer_hs_type mode (HsParTy t) = tc_infer_lhs_type mode t
tc_infer_hs_type mode (HsOpTy lhs (L _ op) rhs)
| not (op `hasKey` funTyConKey)
= do { (op', op_kind) <- tcTyVar mode op
; op_kind' <- zonkTcType op_kind
; tcInferApps mode op op' op_kind' [lhs, rhs] }
tc_infer_hs_type mode (HsKindSig ty sig)
= do { sig' <- tc_lhs_kind (kindLevel mode) sig
; ty' <- tc_lhs_type mode ty sig'
; return (ty', sig') }
tc_infer_hs_type mode (HsDocTy ty _) = tc_infer_lhs_type mode ty
tc_infer_hs_type _ (HsCoreTy ty) = return (ty, typeKind ty)
tc_infer_hs_type mode other_ty
= do { kv <- newMetaKindVar
; ty' <- tc_hs_type mode other_ty kv
; return (ty', kv) }
tc_lhs_type :: TcTyMode -> LHsType Name -> TcKind -> TcM TcType
tc_lhs_type mode (L span ty) exp_kind
= setSrcSpan span $
do { traceTc "tc_lhs_type:" (ppr ty $$ ppr exp_kind)
; tc_hs_type mode ty exp_kind }
------------------------------------------
tc_fun_type :: TcTyMode -> LHsType Name -> LHsType Name -> TcKind -> TcM TcType
tc_fun_type mode ty1 ty2 exp_kind = case mode_level mode of
TypeLevel ->
do { arg_rr <- newFlexiTyVarTy runtimeRepTy
; res_rr <- newFlexiTyVarTy runtimeRepTy
; ty1' <- tc_lhs_type mode ty1 (tYPE arg_rr)
; ty2' <- tc_lhs_type mode ty2 (tYPE res_rr)
; checkExpectedKind (mkFunTy ty1' ty2') liftedTypeKind exp_kind }
KindLevel -> -- no representation polymorphism in kinds. yet.
do { ty1' <- tc_lhs_type mode ty1 liftedTypeKind
; ty2' <- tc_lhs_type mode ty2 liftedTypeKind
; checkExpectedKind (mkFunTy ty1' ty2') liftedTypeKind exp_kind }
------------------------------------------
-- See also Note [Bidirectional type checking]
tc_hs_type :: TcTyMode -> HsType Name -> TcKind -> TcM TcType
tc_hs_type mode (HsParTy ty) exp_kind = tc_lhs_type mode ty exp_kind
tc_hs_type mode (HsDocTy ty _) exp_kind = tc_lhs_type mode ty exp_kind
tc_hs_type _ ty@(HsBangTy {}) _
-- While top-level bangs at this point are eliminated (eg !(Maybe Int)),
-- other kinds of bangs are not (eg ((!Maybe) Int)). These kinds of
-- bangs are invalid, so fail. (#7210)
= failWithTc (text "Unexpected strictness annotation:" <+> ppr ty)
tc_hs_type _ ty@(HsRecTy _) _
-- Record types (which only show up temporarily in constructor
-- signatures) should have been removed by now
= failWithTc (text "Record syntax is illegal here:" <+> ppr ty)
-- This should never happen; type splices are expanded by the renamer
tc_hs_type _ ty@(HsSpliceTy {}) _exp_kind
= failWithTc (text "Unexpected type splice:" <+> ppr ty)
---------- Functions and applications
tc_hs_type mode (HsFunTy ty1 ty2) exp_kind
= tc_fun_type mode ty1 ty2 exp_kind
tc_hs_type mode (HsOpTy ty1 (L _ op) ty2) exp_kind
| op `hasKey` funTyConKey
= tc_fun_type mode ty1 ty2 exp_kind
--------- Foralls
tc_hs_type mode (HsForAllTy { hst_bndrs = hs_tvs, hst_body = ty }) exp_kind
= fmap fst $
tcExplicitTKBndrs hs_tvs $ \ tvs' ->
-- Do not kind-generalise here! See Note [Kind generalisation]
-- Why exp_kind? See Note [Body kind of HsForAllTy]
do { ty' <- tc_lhs_type mode ty exp_kind
; let bound_vars = allBoundVariables ty'
bndrs = mkNamedBinders Specified tvs'
; return (mkForAllTys bndrs ty', bound_vars) }
tc_hs_type mode (HsQualTy { hst_ctxt = ctxt, hst_body = ty }) exp_kind
| null (unLoc ctxt)
= tc_lhs_type mode ty exp_kind
| otherwise
= do { ctxt' <- tc_hs_context mode ctxt
-- See Note [Body kind of a HsQualTy]
; ty' <- if isConstraintKind exp_kind
then tc_lhs_type mode ty constraintKind
else do { ek <- ekOpen -- The body kind (result of the
-- function) can be * or #, hence ekOpen
; ty <- tc_lhs_type mode ty ek
; checkExpectedKind ty liftedTypeKind exp_kind }
; return (mkPhiTy ctxt' ty') }
--------- Lists, arrays, and tuples
tc_hs_type mode (HsListTy elt_ty) exp_kind
= do { tau_ty <- tc_lhs_type mode elt_ty liftedTypeKind
; checkWiredInTyCon listTyCon
; checkExpectedKind (mkListTy tau_ty) liftedTypeKind exp_kind }
tc_hs_type mode (HsPArrTy elt_ty) exp_kind
= do { MASSERT( isTypeLevel (mode_level mode) )
; tau_ty <- tc_lhs_type mode elt_ty liftedTypeKind
; checkWiredInTyCon parrTyCon
; checkExpectedKind (mkPArrTy tau_ty) liftedTypeKind exp_kind }
-- See Note [Distinguishing tuple kinds] in HsTypes
-- See Note [Inferring tuple kinds]
tc_hs_type mode (HsTupleTy HsBoxedOrConstraintTuple hs_tys) exp_kind
-- (NB: not zonking before looking at exp_k, to avoid left-right bias)
| Just tup_sort <- tupKindSort_maybe exp_kind
= traceTc "tc_hs_type tuple" (ppr hs_tys) >>
tc_tuple mode tup_sort hs_tys exp_kind
| otherwise
= do { traceTc "tc_hs_type tuple 2" (ppr hs_tys)
; (tys, kinds) <- mapAndUnzipM (tc_infer_lhs_type mode) hs_tys
; kinds <- mapM zonkTcType kinds
-- Infer each arg type separately, because errors can be
-- confusing if we give them a shared kind. Eg Trac #7410
-- (Either Int, Int), we do not want to get an error saying
-- "the second argument of a tuple should have kind *->*"
; let (arg_kind, tup_sort)
= case [ (k,s) | k <- kinds
, Just s <- [tupKindSort_maybe k] ] of
((k,s) : _) -> (k,s)
[] -> (liftedTypeKind, BoxedTuple)
-- In the [] case, it's not clear what the kind is, so guess *
; tys' <- sequence [ setSrcSpan loc $
checkExpectedKind ty kind arg_kind
| ((L loc _),ty,kind) <- zip3 hs_tys tys kinds ]
; finish_tuple tup_sort tys' (map (const arg_kind) tys') exp_kind }
tc_hs_type mode (HsTupleTy hs_tup_sort tys) exp_kind
= tc_tuple mode tup_sort tys exp_kind
where
tup_sort = case hs_tup_sort of -- Fourth case dealt with above
HsUnboxedTuple -> UnboxedTuple
HsBoxedTuple -> BoxedTuple
HsConstraintTuple -> ConstraintTuple
_ -> panic "tc_hs_type HsTupleTy"
--------- Promoted lists and tuples
tc_hs_type mode (HsExplicitListTy _k tys) exp_kind
= do { tks <- mapM (tc_infer_lhs_type mode) tys
; (taus', kind) <- unifyKinds tks
; let ty = (foldr (mk_cons kind) (mk_nil kind) taus')
; checkExpectedKind ty (mkListTy kind) exp_kind }
where
mk_cons k a b = mkTyConApp (promoteDataCon consDataCon) [k, a, b]
mk_nil k = mkTyConApp (promoteDataCon nilDataCon) [k]
tc_hs_type mode (HsExplicitTupleTy _ tys) exp_kind
-- using newMetaKindVar means that we force instantiations of any polykinded
-- types. At first, I just used tc_infer_lhs_type, but that led to #11255.
= do { ks <- replicateM arity newMetaKindVar
; taus <- zipWithM (tc_lhs_type mode) tys ks
; let kind_con = tupleTyCon Boxed arity
ty_con = promotedTupleDataCon Boxed arity
tup_k = mkTyConApp kind_con ks
; checkExpectedKind (mkTyConApp ty_con (ks ++ taus)) tup_k exp_kind }
where
arity = length tys
--------- Constraint types
tc_hs_type mode (HsIParamTy n ty) exp_kind
= do { MASSERT( isTypeLevel (mode_level mode) )
; ty' <- tc_lhs_type mode ty liftedTypeKind
; let n' = mkStrLitTy $ hsIPNameFS n
; ipClass <- tcLookupClass ipClassName
; checkExpectedKind (mkClassPred ipClass [n',ty'])
constraintKind exp_kind }
tc_hs_type mode (HsEqTy ty1 ty2) exp_kind
= do { (ty1', kind1) <- tc_infer_lhs_type mode ty1
; (ty2', kind2) <- tc_infer_lhs_type mode ty2
; ty2'' <- checkExpectedKind ty2' kind2 kind1
; eq_tc <- tcLookupTyCon eqTyConName
; let ty' = mkNakedTyConApp eq_tc [kind1, ty1', ty2'']
; checkExpectedKind ty' constraintKind exp_kind }
--------- Literals
tc_hs_type _ (HsTyLit (HsNumTy _ n)) exp_kind
= do { checkWiredInTyCon typeNatKindCon
; checkExpectedKind (mkNumLitTy n) typeNatKind exp_kind }
tc_hs_type _ (HsTyLit (HsStrTy _ s)) exp_kind
= do { checkWiredInTyCon typeSymbolKindCon
; checkExpectedKind (mkStrLitTy s) typeSymbolKind exp_kind }
--------- Potentially kind-polymorphic types: call the "up" checker
-- See Note [Future-proofing the type checker]
tc_hs_type mode ty@(HsTyVar {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(HsAppTy {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(HsOpTy {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(HsKindSig {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type mode ty@(HsCoreTy {}) ek = tc_infer_hs_type_ek mode ty ek
tc_hs_type _ (HsWildCardTy wc) exp_kind
= do { let name = wildCardName wc
; tv <- tcLookupTyVar name
; checkExpectedKind (mkTyVarTy tv) (tyVarKind tv) exp_kind }
-- disposed of by renamer
tc_hs_type _ ty@(HsAppsTy {}) _
= pprPanic "tc_hs_tyep HsAppsTy" (ppr ty)
---------------------------
-- | Call 'tc_infer_hs_type' and check its result against an expected kind.
tc_infer_hs_type_ek :: TcTyMode -> HsType Name -> TcKind -> TcM TcType
tc_infer_hs_type_ek mode ty ek
= do { (ty', k) <- tc_infer_hs_type mode ty
; checkExpectedKind ty' k ek }
---------------------------
tupKindSort_maybe :: TcKind -> Maybe TupleSort
tupKindSort_maybe k
| Just (k', _) <- splitCastTy_maybe k = tupKindSort_maybe k'
| Just k' <- coreView k = tupKindSort_maybe k'
| isConstraintKind k = Just ConstraintTuple
| isLiftedTypeKind k = Just BoxedTuple
| otherwise = Nothing
tc_tuple :: TcTyMode -> TupleSort -> [LHsType Name] -> TcKind -> TcM TcType
tc_tuple mode tup_sort tys exp_kind
= do { arg_kinds <- case tup_sort of
BoxedTuple -> return (nOfThem arity liftedTypeKind)
UnboxedTuple -> do { rrs <- newFlexiTyVarTys arity runtimeRepTy
; return $ map tYPE rrs }
ConstraintTuple -> return (nOfThem arity constraintKind)
; tau_tys <- zipWithM (tc_lhs_type mode) tys arg_kinds
; finish_tuple tup_sort tau_tys arg_kinds exp_kind }
where
arity = length tys
finish_tuple :: TupleSort
-> [TcType] -- ^ argument types
-> [TcKind] -- ^ of these kinds
-> TcKind -- ^ expected kind of the whole tuple
-> TcM TcType
finish_tuple tup_sort tau_tys tau_kinds exp_kind
= do { traceTc "finish_tuple" (ppr res_kind $$ ppr tau_kinds $$ ppr exp_kind)
; let arg_tys = case tup_sort of
-- See also Note [Unboxed tuple RuntimeRep vars] in TyCon
UnboxedTuple -> map (getRuntimeRepFromKind "finish_tuple") tau_kinds
++ tau_tys
BoxedTuple -> tau_tys
ConstraintTuple -> tau_tys
; tycon <- case tup_sort of
ConstraintTuple
| arity > mAX_CTUPLE_SIZE
-> failWith (bigConstraintTuple arity)
| otherwise -> tcLookupTyCon (cTupleTyConName arity)
BoxedTuple -> do { let tc = tupleTyCon Boxed arity
; checkWiredInTyCon tc
; return tc }
UnboxedTuple -> return (tupleTyCon Unboxed arity)
; checkExpectedKind (mkTyConApp tycon arg_tys) res_kind exp_kind }
where
arity = length tau_tys
res_kind = case tup_sort of
UnboxedTuple -> tYPE unboxedTupleRepDataConTy
BoxedTuple -> liftedTypeKind
ConstraintTuple -> constraintKind
bigConstraintTuple :: Arity -> MsgDoc
bigConstraintTuple arity
= hang (text "Constraint tuple arity too large:" <+> int arity
<+> parens (text "max arity =" <+> int mAX_CTUPLE_SIZE))
2 (text "Instead, use a nested tuple")
---------------------------
-- | Apply a type of a given kind to a list of arguments. This instantiates
-- invisible parameters as necessary. However, it does *not* necessarily
-- apply all the arguments, if the kind runs out of binders.
-- This takes an optional @VarEnv Kind@ which maps kind variables to kinds.
-- These kinds should be used to instantiate invisible kind variables;
-- they come from an enclosing class for an associated type/data family.
-- This version will instantiate all invisible arguments left over after
-- the visible ones.
tcInferArgs :: Outputable fun
=> fun -- ^ the function
-> [TyBinder] -- ^ function kind's binders
-> Maybe (VarEnv Kind) -- ^ possibly, kind info (see above)
-> [LHsType Name] -- ^ args
-> TcM (TCvSubst, [TyBinder], [TcType], [LHsType Name], Int)
-- ^ (instantiating subst, un-insted leftover binders,
-- typechecked args, untypechecked args, n)
tcInferArgs fun binders mb_kind_info args
= do { (subst, leftover_binders, args', leftovers, n)
<- tc_infer_args typeLevelMode fun binders mb_kind_info args 1
-- now, we need to instantiate any remaining invisible arguments
; let (invis_bndrs, other_binders) = span isInvisibleBinder leftover_binders
; (subst', invis_args)
<- tcInstBindersX subst mb_kind_info invis_bndrs
; return ( subst'
, other_binders
, args' `chkAppend` invis_args
, leftovers, n ) }
-- | See comments for 'tcInferArgs'. But this version does not instantiate
-- any remaining invisible arguments.
tc_infer_args :: Outputable fun
=> TcTyMode
-> fun -- ^ the function
-> [TyBinder] -- ^ function kind's binders (zonked)
-> Maybe (VarEnv Kind) -- ^ possibly, kind info (see above)
-> [LHsType Name] -- ^ args
-> Int -- ^ number to start arg counter at
-> TcM (TCvSubst, [TyBinder], [TcType], [LHsType Name], Int)
tc_infer_args mode orig_ty binders mb_kind_info orig_args n0
= do { traceTc "tcInferApps" (ppr binders $$ ppr orig_args)
; go emptyTCvSubst binders orig_args n0 [] }
where
go subst binders [] n acc
= return ( subst, binders, reverse acc, [], n )
-- when we call this when checking type family patterns, we really
-- do want to instantiate all invisible arguments. During other
-- typechecking, we don't.
go subst binders all_args n acc
| (inv_binders, other_binders) <- span isInvisibleBinder binders
, not (null inv_binders)
= do { (subst', args') <- tcInstBindersX subst mb_kind_info inv_binders
; go subst' other_binders all_args n (reverse args' ++ acc) }
go subst (binder:binders) (arg:args) n acc
= ASSERT( isVisibleBinder binder )
do { arg' <- addErrCtxt (funAppCtxt orig_ty arg n) $
tc_lhs_type mode arg (substTyUnchecked subst $ binderType binder)
; let subst' = case binderVar_maybe binder of
Just tv -> extendTvSubst subst tv arg'
Nothing -> subst
; go subst' binders args (n+1) (arg' : acc) }
go subst [] all_args n acc
= return (subst, [], reverse acc, all_args, n)
-- | Applies a type to a list of arguments. Always consumes all the
-- arguments.
tcInferApps :: Outputable fun
=> TcTyMode
-> fun -- ^ Function (for printing only)
-> TcType -- ^ Function (could be knot-tied)
-> TcKind -- ^ Function kind (zonked)
-> [LHsType Name] -- ^ Args
-> TcM (TcType, TcKind) -- ^ (f args, result kind)
tcInferApps mode orig_ty ty ki args = go ty ki args 1
where
go fun fun_kind [] _ = return (fun, fun_kind)
go fun fun_kind args n
| let (binders, res_kind) = splitPiTys fun_kind
, not (null binders)
= do { (subst, leftover_binders, args', leftover_args, n')
<- tc_infer_args mode orig_ty binders Nothing args n
; let fun_kind' = substTyUnchecked subst $
mkForAllTys leftover_binders res_kind
; go (mkNakedAppTys fun args') fun_kind' leftover_args n' }
go fun fun_kind all_args@(arg:args) n
= do { (co, arg_k, res_k) <- matchExpectedFunKind (length all_args)
fun fun_kind
; arg' <- addErrCtxt (funAppCtxt orig_ty arg n) $
tc_lhs_type mode arg arg_k
; go (mkNakedAppTy (fun `mkNakedCastTy` co) arg')
res_k args (n+1) }
--------------------------
checkExpectedKind :: TcType -- the type whose kind we're checking
-> TcKind -- the known kind of that type, k
-> TcKind -- the expected kind, exp_kind
-> TcM TcType -- a possibly-inst'ed, casted type :: exp_kind
-- Instantiate a kind (if necessary) and then call unifyType
-- (checkExpectedKind ty act_kind exp_kind)
-- checks that the actual kind act_kind is compatible
-- with the expected kind exp_kind
checkExpectedKind ty act_kind exp_kind
= do { (ty', act_kind') <- instantiate ty act_kind exp_kind
; let origin = TypeEqOrigin { uo_actual = act_kind'
, uo_expected = mkCheckExpType exp_kind
, uo_thing = Just $ mkTypeErrorThing ty'
}
; co_k <- uType origin KindLevel act_kind' exp_kind
; traceTc "checkExpectedKind" (vcat [ ppr act_kind
, ppr exp_kind
, ppr co_k ])
; let result_ty = ty' `mkNakedCastTy` co_k
; return result_ty }
where
-- we need to make sure that both kinds have the same number of implicit
-- foralls out front. If the actual kind has more, instantiate accordingly.
-- Otherwise, just pass the type & kind through -- the errors are caught
-- in unifyType.
instantiate :: TcType -- the type
-> TcKind -- of this kind
-> TcKind -- but expected to be of this one
-> TcM ( TcType -- the inst'ed type
, TcKind ) -- its new kind
instantiate ty act_ki exp_ki
= let (exp_bndrs, _) = splitPiTysInvisible exp_ki in
instantiateTyN (length exp_bndrs) ty act_ki
-- | Instantiate a type to have at most @n@ invisible arguments.
instantiateTyN :: Int -- ^ @n@
-> TcType -- ^ the type
-> TcKind -- ^ its kind
-> TcM (TcType, TcKind) -- ^ The inst'ed type with kind
instantiateTyN n ty ki
= let (bndrs, inner_ki) = splitPiTysInvisible ki
num_to_inst = length bndrs - n
-- NB: splitAt is forgiving with invalid numbers
(inst_bndrs, leftover_bndrs) = splitAt num_to_inst bndrs
in
if num_to_inst <= 0 then return (ty, ki) else
do { (subst, inst_args) <- tcInstBinders inst_bndrs
; let rebuilt_ki = mkForAllTys leftover_bndrs inner_ki
ki' = substTy subst rebuilt_ki
; return (mkNakedAppTys ty inst_args, ki') }
---------------------------
tcHsContext :: LHsContext Name -> TcM [PredType]
tcHsContext = tc_hs_context typeLevelMode
tcLHsPredType :: LHsType Name -> TcM PredType
tcLHsPredType = tc_lhs_pred typeLevelMode
tc_hs_context :: TcTyMode -> LHsContext Name -> TcM [PredType]
tc_hs_context mode ctxt = mapM (tc_lhs_pred mode) (unLoc ctxt)
tc_lhs_pred :: TcTyMode -> LHsType Name -> TcM PredType
tc_lhs_pred mode pred = tc_lhs_type mode pred constraintKind
---------------------------
tcTyVar :: TcTyMode -> Name -> TcM (TcType, TcKind)
-- See Note [Type checking recursive type and class declarations]
-- in TcTyClsDecls
tcTyVar mode name -- Could be a tyvar, a tycon, or a datacon
= do { traceTc "lk1" (ppr name)
; thing <- tcLookup name
; case thing of
ATyVar _ tv -> return (mkTyVarTy tv, tyVarKind tv)
ATcTyCon tc_tc -> do { check_tc tc_tc
; tc <- get_loopy_tc name tc_tc
; handle_tyfams tc tc_tc }
-- mkNakedTyConApp: see Note [Type-checking inside the knot]
-- NB: we really should check if we're at the kind level
-- and if the tycon is promotable if -XNoTypeInType is set.
-- But this is a terribly large amount of work! Not worth it.
AGlobal (ATyCon tc)
-> do { check_tc tc
; handle_tyfams tc tc }
AGlobal (AConLike (RealDataCon dc))
-> do { data_kinds <- xoptM LangExt.DataKinds
; unless (data_kinds || specialPromotedDc dc) $
promotionErr name NoDataKindsDC
; type_in_type <- xoptM LangExt.TypeInType
; unless ( type_in_type ||
( isTypeLevel (mode_level mode) &&
isLegacyPromotableDataCon dc ) ||
( isKindLevel (mode_level mode) &&
specialPromotedDc dc ) ) $
promotionErr name NoTypeInTypeDC
; let tc = promoteDataCon dc
; return (mkNakedTyConApp tc [], tyConKind tc) }
APromotionErr err -> promotionErr name err
_ -> wrongThingErr "type" thing name }
where
check_tc :: TyCon -> TcM ()
check_tc tc = do { type_in_type <- xoptM LangExt.TypeInType
; data_kinds <- xoptM LangExt.DataKinds
; unless (isTypeLevel (mode_level mode) ||
data_kinds ||
isKindTyCon tc) $
promotionErr name NoDataKindsTC
; unless (isTypeLevel (mode_level mode) ||
type_in_type ||
isLegacyPromotableTyCon tc) $
promotionErr name NoTypeInTypeTC }
-- if we are type-checking a type family tycon, we must instantiate
-- any invisible arguments right away. Otherwise, we get #11246
handle_tyfams :: TyCon -- the tycon to instantiate (might be loopy)
-> TyCon -- a non-loopy version of the tycon
-> TcM (TcType, TcKind)
handle_tyfams tc tc_tc
| mightBeUnsaturatedTyCon tc_tc
= do { traceTc "tcTyVar2a" (ppr tc_tc $$ ppr tc_kind)
; return (ty, tc_kind) }
| otherwise
= do { (tc_ty, kind) <- instantiateTyN 0 ty tc_kind
-- tc and tc_ty must not be traced here, because that would
-- force the evaluation of a potentially knot-tied variable (tc),
-- and the typechecker would hang, as per #11708
; traceTc "tcTyVar2b" (vcat [ ppr tc_tc <+> dcolon <+> ppr tc_kind
, ppr kind ])
; return (tc_ty, kind) }
where
ty = mkNakedTyConApp tc []
tc_kind = tyConKind tc_tc
get_loopy_tc :: Name -> TyCon -> TcM TyCon
-- Return the knot-tied global TyCon if there is one
-- Otherwise the local TcTyCon; we must be doing kind checking
-- but we still want to return a TyCon of some sort to use in
-- error messages
get_loopy_tc name tc_tc
= do { env <- getGblEnv
; case lookupNameEnv (tcg_type_env env) name of
Just (ATyCon tc) -> return tc
_ -> do { traceTc "lk1 (loopy)" (ppr name)
; return tc_tc } }
tcClass :: Name -> TcM (Class, TcKind)
tcClass cls -- Must be a class
= do { thing <- tcLookup cls
; case thing of
ATcTyCon tc -> return (aThingErr "tcClass" cls, tyConKind tc)
AGlobal (ATyCon tc)
| Just cls <- tyConClass_maybe tc
-> return (cls, tyConKind tc)
_ -> wrongThingErr "class" thing cls }
aThingErr :: String -> Name -> b
-- The type checker for types is sometimes called simply to
-- do *kind* checking; and in that case it ignores the type
-- returned. Which is a good thing since it may not be available yet!
aThingErr str x = pprPanic "AThing evaluated unexpectedly" (text str <+> ppr x)
{-
Note [Type-checking inside the knot]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we are checking the argument types of a data constructor. We
must zonk the types before making the DataCon, because once built we
can't change it. So we must traverse the type.
BUT the parent TyCon is knot-tied, so we can't look at it yet.
So we must be careful not to use "smart constructors" for types that
look at the TyCon or Class involved.
* Hence the use of mkNakedXXX functions. These do *not* enforce
the invariants (for example that we use (ForAllTy (Anon s) t) rather
than (TyConApp (->) [s,t])).
* The zonking functions establish invariants (even zonkTcType, a change from
previous behaviour). So we must never inspect the result of a
zonk that might mention a knot-tied TyCon. This is generally OK
because we zonk *kinds* while kind-checking types. And the TyCons
in kinds shouldn't be knot-tied, because they come from a previous
mutually recursive group.
* TcHsSyn.zonkTcTypeToType also can safely check/establish
invariants.
This is horribly delicate. I hate it. A good example of how
delicate it is can be seen in Trac #7903.
Note [Body kind of a HsForAllTy]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The body of a forall is usually a type, but in principle
there's no reason to prohibit *unlifted* types.
In fact, GHC can itself construct a function with an
unboxed tuple inside a for-all (via CPR analyis; see
typecheck/should_compile/tc170).
Moreover in instance heads we get forall-types with
kind Constraint.
It's tempting to check that the body kind is either * or #. But this is
wrong. For example:
class C a b
newtype N = Mk Foo deriving (C a)
We're doing newtype-deriving for C. But notice how `a` isn't in scope in
the predicate `C a`. So we quantify, yielding `forall a. C a` even though
`C a` has kind `* -> Constraint`. The `forall a. C a` is a bit cheeky, but
convenient. Bottom line: don't check for * or # here.
Note [Body kind of a HsQualTy]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If ctxt is non-empty, the HsQualTy really is a /function/, so the
kind of the result really is '*', and in that case the kind of the
body-type can be lifted or unlifted.
However, consider
instance Eq a => Eq [a] where ...
or
f :: (Eq a => Eq [a]) => blah
Here both body-kind of the HsQualTy is Constraint rather than *.
Rather crudely we tell the difference by looking at exp_kind. It's
very convenient to typecheck instance types like any other HsSigType.
Admittedly the '(Eq a => Eq [a]) => blah' case is erroneous, but it's
better to reject in checkValidType. If we say that the body kind
should be '*' we risk getting TWO error messages, one saying that Eq
[a] doens't have kind '*', and one saying that we need a Constraint to
the left of the outer (=>).
How do we figure out the right body kind? Well, it's a bit of a
kludge: I just look at the expected kind. If it's Constraint, we
must be in this instance situation context. It's a kludge because it
wouldn't work if any unification was involved to compute that result
kind -- but it isn't. (The true way might be to use the 'mode'
parameter, but that seemed like a sledgehammer to crack a nut.)
Note [Inferring tuple kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Give a tuple type (a,b,c), which the parser labels as HsBoxedOrConstraintTuple,
we try to figure out whether it's a tuple of kind * or Constraint.
Step 1: look at the expected kind
Step 2: infer argument kinds
If after Step 2 it's not clear from the arguments that it's
Constraint, then it must be *. Once having decided that we re-check
the Check the arguments again to give good error messages
in eg. `(Maybe, Maybe)`
Note that we will still fail to infer the correct kind in this case:
type T a = ((a,a), D a)
type family D :: Constraint -> Constraint
While kind checking T, we do not yet know the kind of D, so we will default the
kind of T to * -> *. It works if we annotate `a` with kind `Constraint`.
Note [Desugaring types]
~~~~~~~~~~~~~~~~~~~~~~~
The type desugarer is phase 2 of dealing with HsTypes. Specifically:
* It transforms from HsType to Type
* It zonks any kinds. The returned type should have no mutable kind
or type variables (hence returning Type not TcType):
- any unconstrained kind variables are defaulted to (Any *) just
as in TcHsSyn.
- there are no mutable type variables because we are
kind-checking a type
Reason: the returned type may be put in a TyCon or DataCon where
it will never subsequently be zonked.
You might worry about nested scopes:
..a:kappa in scope..
let f :: forall b. T '[a,b] -> Int
In this case, f's type could have a mutable kind variable kappa in it;
and we might then default it to (Any *) when dealing with f's type
signature. But we don't expect this to happen because we can't get a
lexically scoped type variable with a mutable kind variable in it. A
delicate point, this. If it becomes an issue we might need to
distinguish top-level from nested uses.
Moreover
* it cannot fail,
* it does no unifications
* it does no validity checking, except for structural matters, such as
(a) spurious ! annotations.
(b) a class used as a type
Note [Kind of a type splice]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider these terms, each with TH type splice inside:
[| e1 :: Maybe $(..blah..) |]
[| e2 :: $(..blah..) |]
When kind-checking the type signature, we'll kind-check the splice
$(..blah..); we want to give it a kind that can fit in any context,
as if $(..blah..) :: forall k. k.
In the e1 example, the context of the splice fixes kappa to *. But
in the e2 example, we'll desugar the type, zonking the kind unification
variables as we go. When we encounter the unconstrained kappa, we
want to default it to '*', not to (Any *).
Help functions for type applications
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-}
addTypeCtxt :: LHsType Name -> TcM a -> TcM a
-- Wrap a context around only if we want to show that contexts.
-- Omit invisble ones and ones user's won't grok
addTypeCtxt (L _ ty) thing
= addErrCtxt doc thing
where
doc = text "In the type" <+> quotes (ppr ty)
{-
************************************************************************
* *
Type-variable binders
%* *
%************************************************************************
Note [Scope-check inferred kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data SameKind :: k -> k -> *
foo :: forall a (b :: Proxy a) (c :: Proxy d). SameKind b c
d has no binding site. So it gets bound implicitly, at the top. The
problem is that d's kind mentions `a`. So it's all ill-scoped.
The way we check for this is to gather all variables *bound* in a
type variable's scope. The type variable's kind should not mention
any of these variables. That is, d's kind can't mention a, b, or c.
We can't just check to make sure that d's kind is in scope, because
we might be about to kindGeneralize.
A little messy, but it works.
Note [Dependent LHsQTyVars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We track (in the renamer) which explicitly bound variables in a
LHsQTyVars are manifestly dependent; only precisely these variables
may be used within the LHsQTyVars. We must do this so that kcHsTyVarBndrs
can produce the right TcTyBinders, and tell Anon vs. Named. Earlier,
I thought it would work simply to do a free-variable check during
kcHsTyVarBndrs, but this is bogus, because there may be unsolved
equalities about. And we don't want to eagerly solve the equalities,
because we may get further information after kcHsTyVarBndrs is called.
(Recall that kcHsTyVarBndrs is usually called from getInitialKind.
The only other case is in kcConDecl.) This is what implements the rule
that all variables intended to be dependent must be manifestly so.
Sidenote: It's quite possible that later, we'll consider (t -> s)
as a degenerate case of some (pi (x :: t) -> s) and then this will
all get more permissive.
-}
tcWildCardBinders :: [Name]
-> ([(Name, TcTyVar)] -> TcM a)
-> TcM a
-- Use the Unique form the specified Name; don't clone it. There is
-- no need to clone, and not doing so avoids the need to return a list
-- of pairs to bring into scope.
tcWildCardBinders wcs thing_inside
= do { wcs <- mapM new_wildcard wcs
; tcExtendTyVarEnv2 wcs $
thing_inside wcs }
where
new_wildcard :: Name -> TcM (Name, TcTyVar)
new_wildcard name = do { kind <- newMetaKindVar
; tv <- newFlexiTyVar kind
; return (name, tv) }
-- | Kind-check a 'LHsQTyVars'. If the decl under consideration has a complete,
-- user-supplied kind signature (CUSK), generalise the result.
-- Used in 'getInitialKind' (for tycon kinds and other kinds)
-- and in kind-checking (but not for tycon kinds, which are checked with
-- tcTyClDecls). See also Note [Complete user-supplied kind signatures] in
-- HsDecls.
--
-- This function does not do telescope checking.
kcHsTyVarBndrs :: Bool -- ^ True <=> the decl being checked has a CUSK
-> Bool -- ^ True <=> the decl is an open type/data family
-> Bool -- ^ True <=> all the hsq_implicit are *kind* vars
-- (will give these kind * if -XNoTypeInType)
-> LHsQTyVars Name
-> TcM (Kind, r) -- ^ the result kind, possibly with other info
-> TcM ([TcTyBinder], TcKind, r)
-- ^ The bound variables in the kind, the result kind,
-- with the other info.
-- Always returns Named binders; sort out Named vs. Anon
-- yourself.
kcHsTyVarBndrs cusk open_fam all_kind_vars
(HsQTvs { hsq_implicit = kv_ns, hsq_explicit = hs_tvs
, hsq_dependent = dep_names }) thing_inside
| cusk
= do { kv_kinds <- mk_kv_kinds
; let scoped_kvs = zipWith new_skolem_tv kv_ns kv_kinds
; tcExtendTyVarEnv2 (kv_ns `zip` scoped_kvs) $
do { (tvs, binders, res_kind, stuff) <- solveEqualities $
bind_telescope hs_tvs thing_inside
-- Now, because we're in a CUSK, quantify over the mentioned
-- kind vars, in dependency order.
; binders <- mapM zonkTcTyBinder binders
; res_kind <- zonkTcType res_kind
; let qkvs = tyCoVarsOfTypeWellScoped (mkForAllTys binders res_kind)
-- the visibility of tvs doesn't matter here; we just
-- want the free variables not to include the tvs
-- if there are any meta-tvs left, the user has lied about having
-- a CUSK. Error.
; let (meta_tvs, good_tvs) = partition isMetaTyVar qkvs
; when (not (null meta_tvs)) $
report_non_cusk_tvs (qkvs ++ tvs)
; return ( mkNamedBinders Specified good_tvs ++ binders
, res_kind, stuff ) }}
| otherwise
= do { kv_kinds <- mk_kv_kinds
; scoped_kvs <- zipWithM newSigTyVar kv_ns kv_kinds
-- the names must line up in splitTelescopeTvs
; (_, binders, res_kind, stuff)
<- tcExtendTyVarEnv2 (kv_ns `zip` scoped_kvs) $
bind_telescope hs_tvs thing_inside
; return (binders, res_kind, stuff) }
where
-- if -XNoTypeInType and we know all the implicits are kind vars,
-- just give the kind *. This prevents test
-- dependent/should_fail/KindLevelsB from compiling, as it should
mk_kv_kinds :: TcM [Kind]
mk_kv_kinds = do { typeintype <- xoptM LangExt.TypeInType
; if not typeintype && all_kind_vars
then return (map (const liftedTypeKind) kv_ns)
else mapM (const newMetaKindVar) kv_ns }
-- there may be dependency between the explicit "ty" vars. So, we have
-- to handle them one at a time.
bind_telescope :: [LHsTyVarBndr Name]
-> TcM (Kind, r)
-> TcM ([TcTyVar], [TyBinder], TcKind, r)
bind_telescope [] thing
= do { (res_kind, stuff) <- thing
; return ([], [], res_kind, stuff) }
bind_telescope (L _ hs_tv : hs_tvs) thing
= do { tv_pair@(tv, _) <- kc_hs_tv hs_tv
-- NB: Bring all tvs into scope, even non-dependent ones,
-- as they're needed in type synonyms, data constructors, etc.
; (tvs, binders, res_kind, stuff) <- bind_unless_scoped tv_pair $
bind_telescope hs_tvs $
thing
-- See Note [Dependent LHsQTyVars]
; let new_binder | hsTyVarName hs_tv `elemNameSet` dep_names
= mkNamedBinder Visible tv
| otherwise
= mkAnonBinder (tyVarKind tv)
; return ( tv : tvs
, new_binder : binders
, res_kind, stuff ) }
-- | Bind the tyvar in the env't unless the bool is True
bind_unless_scoped :: (TcTyVar, Bool) -> TcM a -> TcM a
bind_unless_scoped (_, True) thing_inside = thing_inside
bind_unless_scoped (tv, False) thing_inside
= tcExtendTyVarEnv [tv] thing_inside
kc_hs_tv :: HsTyVarBndr Name -> TcM (TcTyVar, Bool)
kc_hs_tv (UserTyVar (L _ name))
= do { tv_pair@(tv, scoped) <- tcHsTyVarName Nothing name
-- Open type/data families default their variables to kind *.
; when (open_fam && not scoped) $ -- (don't default class tyvars)
discardResult $ unifyKind (Just (mkTyVarTy tv)) liftedTypeKind
(tyVarKind tv)
; return tv_pair }
kc_hs_tv (KindedTyVar (L _ name) lhs_kind)
= do { kind <- tcLHsKind lhs_kind
; tcHsTyVarName (Just kind) name }
report_non_cusk_tvs all_tvs
= do { all_tvs <- mapM zonkTyCoVarKind all_tvs
; let (_, tidy_tvs) = tidyOpenTyCoVars emptyTidyEnv all_tvs
(meta_tvs, other_tvs) = partition isMetaTyVar tidy_tvs
; addErr $
vcat [ text "You have written a *complete user-suppled kind signature*,"
, hang (text "but the following variable" <> plural meta_tvs <+>
isOrAre meta_tvs <+> text "undetermined:")
2 (vcat (map pp_tv meta_tvs))
, text "Perhaps add a kind signature."
, hang (text "Inferred kinds of user-written variables:")
2 (vcat (map pp_tv other_tvs)) ] }
where
pp_tv tv = ppr tv <+> dcolon <+> ppr (tyVarKind tv)
tcImplicitTKBndrs :: [Name]
-> TcM (a, TyVarSet) -- vars are bound somewhere in the scope
-- see Note [Scope-check inferred kinds]
-> TcM ([TcTyVar], a)
tcImplicitTKBndrs = tcImplicitTKBndrsX (tcHsTyVarName Nothing)
-- | Convenient specialization
tcImplicitTKBndrsType :: [Name]
-> TcM Type
-> TcM ([TcTyVar], Type)
tcImplicitTKBndrsType var_ns thing_inside
= tcImplicitTKBndrs var_ns $
do { res_ty <- thing_inside
; return (res_ty, allBoundVariables res_ty) }
-- this more general variant is needed in tcHsPatSigType.
-- See Note [Pattern signature binders]
tcImplicitTKBndrsX :: (Name -> TcM (TcTyVar, Bool)) -- new_tv function
-> [Name]
-> TcM (a, TyVarSet)
-> TcM ([TcTyVar], a)
-- Returned TcTyVars have the supplied Names
-- i.e. no cloning of fresh names
tcImplicitTKBndrsX new_tv var_ns thing_inside
= do { tkvs_pairs <- mapM new_tv var_ns
; let must_scope_tkvs = [ tkv | (tkv, False) <- tkvs_pairs ]
tkvs = map fst tkvs_pairs
; (result, bound_tvs) <- tcExtendTyVarEnv must_scope_tkvs $
thing_inside
-- it's possible that we guessed the ordering of variables
-- wrongly. Adjust.
; tkvs <- mapM zonkTyCoVarKind tkvs
; let extra = text "NB: Implicitly-bound variables always come" <+>
text "before other ones."
; checkValidInferredKinds tkvs bound_tvs extra
; let final_tvs = toposortTyVars tkvs
; traceTc "tcImplicitTKBndrs" (ppr var_ns $$ ppr final_tvs)
; return (final_tvs, result) }
tcExplicitTKBndrs :: [LHsTyVarBndr Name]
-> ([TyVar] -> TcM (a, TyVarSet))
-- ^ Thing inside returns the set of variables bound
-- in the scope. See Note [Scope-check inferred kinds]
-> TcM (a, TyVarSet) -- ^ returns augmented bound vars
-- No cloning: returned TyVars have the same Name as the incoming LHsTyVarBndrs
tcExplicitTKBndrs orig_hs_tvs thing_inside
= go orig_hs_tvs $ \ tvs ->
do { (result, bound_tvs) <- thing_inside tvs
-- Issue an error if the ordering is bogus.
-- See Note [Bad telescopes] in TcValidity.
; tvs <- checkZonkValidTelescope (interppSP orig_hs_tvs) tvs empty
; checkValidInferredKinds tvs bound_tvs empty
; traceTc "tcExplicitTKBndrs" $
vcat [ text "Hs vars:" <+> ppr orig_hs_tvs
, text "tvs:" <+> sep (map pprTvBndr tvs) ]
; return (result, bound_tvs `unionVarSet` mkVarSet tvs)
}
where
go [] thing = thing []
go (L _ hs_tv : hs_tvs) thing
= do { tv <- tcHsTyVarBndr hs_tv
; tcExtendTyVarEnv [tv] $
go hs_tvs $ \ tvs ->
thing (tv : tvs) }
tcHsTyVarBndr :: HsTyVarBndr Name -> TcM TcTyVar
-- Return a SkolemTv TcTyVar, initialised with a kind variable.
-- Typically the Kind inside the HsTyVarBndr will be a tyvar
-- with a mutable kind in it.
-- NB: These variables must not be in scope. This function
-- is not appropriate for use with associated types, for example.
--
-- Returned TcTyVar has the same name; no cloning
--
-- See also Note [Associated type tyvar names] in Class
tcHsTyVarBndr (UserTyVar (L _ name))
= do { kind <- newMetaKindVar
; return (mkTcTyVar name kind (SkolemTv False)) }
tcHsTyVarBndr (KindedTyVar (L _ name) kind)
= do { kind <- tcLHsKind kind
; return (mkTcTyVar name kind (SkolemTv False)) }
-- | Produce a tyvar of the given name (with the kind provided, or
-- otherwise a meta-var kind). If
-- the name is already in scope, return the scoped variable, checking
-- to make sure the known kind matches any kind provided. The
-- second return value says whether the variable is in scope (True)
-- or not (False). (Use this for associated types, for example.)
tcHsTyVarName :: Maybe Kind -> Name -> TcM (TcTyVar, Bool)
tcHsTyVarName m_kind name
= do { mb_tv <- tcLookupLcl_maybe name
; case mb_tv of
Just (ATyVar _ tv)
-> do { whenIsJust m_kind $ \ kind ->
discardResult $
unifyKind (Just (mkTyVarTy tv)) kind (tyVarKind tv)
; return (tv, True) }
_ -> do { kind <- maybe newMetaKindVar return m_kind
; return (mkTcTyVar name kind vanillaSkolemTv, False) }}
-- makes a new skolem tv
new_skolem_tv :: Name -> Kind -> TcTyVar
new_skolem_tv n k = mkTcTyVar n k vanillaSkolemTv
------------------
kindGeneralizeType :: Type -> TcM Type
-- Result is zonked
kindGeneralizeType ty
= do { kvs <- kindGeneralize ty
; zonkTcType (mkInvForAllTys kvs ty) }
kindGeneralize :: TcType -> TcM [KindVar]
-- Quantify the free kind variables of a kind or type
-- In the latter case the type is closed, so it has no free
-- type variables. So in both cases, all the free vars are kind vars
kindGeneralize kind_or_type
= do { kvs <- zonkTcTypeAndFV kind_or_type
; let dvs = DV { dv_kvs = kvs, dv_tvs = emptyVarSet }
; gbl_tvs <- tcGetGlobalTyCoVars -- Already zonked
; quantifyZonkedTyVars gbl_tvs dvs }
{-
Note [Kind generalisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We do kind generalisation only at the outer level of a type signature.
For example, consider
T :: forall k. k -> *
f :: (forall a. T a -> Int) -> Int
When kind-checking f's type signature we generalise the kind at
the outermost level, thus:
f1 :: forall k. (forall (a:k). T k a -> Int) -> Int -- YES!
and *not* at the inner forall:
f2 :: (forall k. forall (a:k). T k a -> Int) -> Int -- NO!
Reason: same as for HM inference on value level declarations,
we want to infer the most general type. The f2 type signature
would be *less applicable* than f1, because it requires a more
polymorphic argument.
NB: There are no explicit kind variables written in f's signature.
When there are, the renamer adds these kind variables to the list of
variables bound by the forall, so you can indeed have a type that's
higher-rank in its kind. But only by explicit request.
Note [Kinds of quantified type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcTyVarBndrsGen quantifies over a specified list of type variables,
*and* over the kind variables mentioned in the kinds of those tyvars.
Note that we must zonk those kinds (obviously) but less obviously, we
must return type variables whose kinds are zonked too. Example
(a :: k7) where k7 := k9 -> k9
We must return
[k9, a:k9->k9]
and NOT
[k9, a:k7]
Reason: we're going to turn this into a for-all type,
forall k9. forall (a:k7). blah
which the type checker will then instantiate, and instantiate does not
look through unification variables!
Hence using zonked_kinds when forming tvs'.
Note [Typechecking telescopes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The function tcTyClTyVars has to bind the scoped type and kind
variables in a telescope. For example:
class Foo k (t :: Proxy k -> k2) where ...
By the time [kt]cTyClTyVars is called, we know *something* about the kind of Foo,
at least that it has the form
Foo :: forall (k2 :: mk2). forall (k :: mk1) -> (Proxy mk1 k -> k2) -> Constraint
if it has a CUSK (Foo does not, in point of fact) or
Foo :: forall (k :: mk1) -> (Proxy mk1 k -> k2) -> Constraint
if it does not, where mk1 and mk2 are meta-kind variables (mk1, mk2 :: *).
When calling tcTyClTyVars, this kind is further generalized w.r.t. any
free variables appearing in mk1 or mk2. So, mk_tvs must handle
that possibility. Perhaps we discover that mk1 := Maybe k3 and mk2 := *,
so we have
Foo :: forall (k3 :: *). forall (k2 :: *). forall (k :: Maybe k3) ->
(Proxy (Maybe k3) k -> k2) -> Constraint
We now have several sorts of variables to think about:
1) The variable k3 is not mentioned in the source at all. It is neither
explicitly bound nor ever used. It is *not* a scoped kind variable,
and should not be bound when type-checking the scope of the telescope.
2) The variable k2 is mentioned in the source, but it is not explicitly
bound. It *is* a scoped kind variable, and will appear in the
hsq_implicit field of a LHsTyVarBndrs.
2a) In the non-CUSK case, these variables won't have been generalized
yet and don't appear in the looked-up kind. So we just return these
in a NameSet.
3) The variable k is mentioned in the source with an explicit binding.
It *is* a scoped type variable, and will appear in the
hsq_explicit field of a LHsTyVarBndrs.
4) The variable t is bound in the source, but it is never mentioned later
in the kind. It *is* a scoped variable (it can appear in the telescope
scope, even though it is non-dependent), and will appear in the
hsq_explicit field of a LHsTyVarBndrs.
splitTelescopeTvs walks through the output of a splitPiTys on the
telescope head's kind (Foo, in our example), creating a list of tyvars
to be bound within the telescope scope. It must simultaneously walk
through the hsq_implicit and hsq_explicit fields of a LHsTyVarBndrs.
Comments in the code refer back to the cases in this Note.
Cases (1) and (2) can be mixed together, but these cases must appear before
cases (3) and (4) (the implicitly bound vars always precede the explicitly
bound ones). So, we handle the lists in two stages (mk_tvs and
mk_tvs2).
As a further wrinkle, it's possible that the variables in case (2) have
been reordered. This is because hsq_implicit is ordered by the renamer,
but there may be dependency among the variables. Of course, the order in
the kind must take dependency into account. So we use a NameSet to keep
these straightened out.
Note [Free-floating kind vars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T = MkT (forall (a :: k). Proxy a)
-- from test ghci/scripts/T7873
This is not an existential datatype, but a higher-rank one. Note that
the forall to the right of MkT. Also consider
data S a = MkS (Proxy (a :: k))
According to the rules around implicitly-bound kind variables, those
k's scope over the whole declarations. The renamer grabs it and adds it
to the hsq_implicits field of the HsQTyVars of the tycon. So it must
be in scope during type-checking, but we want to reject T while accepting
S.
Why reject T? Because the kind variable isn't fixed by anything. For
a variable like k to be implicit, it needs to be mentioned in the kind
of a tycon tyvar. But it isn't.
Why accept S? Because kind inference tells us that a has kind k, so it's
all OK.
Here's the approach: in the first pass ("kind-checking") we just bring
k into scope. In the second pass, we certainly hope that k has been
integrated into the type's (generalized) kind, and so it should be found
by splitTelescopeTvs. If it's not, then we must have a definition like
T, and we reject. (But see Note [Tiresome kind checking] about some extra
processing necessary in the second pass.)
Note [Tiresome kind matching]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because of the use of SigTvs in kind inference (see #11203, for example)
sometimes kind variables come into tcTyClTyVars (the second, desugaring
pass in TcTyClDecls) with the wrong names. The best way to fix this up
is just to unify the kinds, again. So we return HsKind/Kind pairs from
splitTelescopeTvs that can get unified in tcTyClTyVars, but only if there
are kind vars the didn't link up in splitTelescopeTvs.
-}
--------------------
-- getInitialKind has made a suitably-shaped kind for the type or class
-- Unpack it, and attribute those kinds to the type variables
-- Extend the env with bindings for the tyvars, taken from
-- the kind of the tycon/class. Give it to the thing inside, and
-- check the result kind matches
kcLookupKind :: Name -> TcM ([TyBinder], Kind)
kcLookupKind nm
= do { tc_ty_thing <- tcLookup nm
; case tc_ty_thing of
ATcTyCon tc -> return (tyConBinders tc, tyConResKind tc)
AGlobal (ATyCon tc) -> return (tyConBinders tc, tyConResKind tc)
_ -> pprPanic "kcLookupKind" (ppr tc_ty_thing) }
-- See Note [Typechecking telescopes]
splitTelescopeTvs :: [TyBinder] -- telescope binders
-> LHsQTyVars Name
-> ( [TyVar] -- scoped type variables
, NameSet -- ungeneralized implicit variables (case 2a)
, [TyVar] -- implicit type variables (cases 1 & 2)
, [TyVar] -- explicit type variables (cases 3 & 4)
, [(LHsKind Name, Kind)] ) -- see Note [Tiresome kind matching]
splitTelescopeTvs bndrs tvbs@(HsQTvs { hsq_implicit = hs_kvs
, hsq_explicit = hs_tvs })
= let (scoped_tvs, non_cusk_imp_names, imp_tvs, exp_tvs, kind_matches)
= mk_tvs [] [] bndrs (mkNameSet hs_kvs) hs_tvs
in
(scoped_tvs, non_cusk_imp_names, imp_tvs, exp_tvs, kind_matches)
where
mk_tvs :: [TyVar] -- scoped tv accum (reversed)
-> [TyVar] -- implicit tv accum (reversed)
-> [TyBinder]
-> NameSet -- implicit variables
-> [LHsTyVarBndr Name] -- explicit variables
-> ( [TyVar] -- the tyvars to be lexically bound
, NameSet -- Case 2a names
, [TyVar] -- implicit tyvars
, [TyVar] -- explicit tyvars
, [(LHsKind Name, Kind)] ) -- tiresome kind matches
mk_tvs scoped_tv_acc imp_tv_acc (bndr : bndrs) all_hs_kvs all_hs_tvs
| Just tv <- binderVar_maybe bndr
, isInvisibleBinder bndr
, let tv_name = getName tv
, tv_name `elemNameSet` all_hs_kvs
= mk_tvs (tv : scoped_tv_acc) (tv : imp_tv_acc)
bndrs (all_hs_kvs `delFromNameSet` tv_name) all_hs_tvs -- Case (2)
| Just tv <- binderVar_maybe bndr
, isInvisibleBinder bndr
= mk_tvs scoped_tv_acc (tv : imp_tv_acc)
bndrs all_hs_kvs all_hs_tvs -- Case (1)
-- there may actually still be some hs_kvs, if we're kind checking
-- a non-CUSK. The kinds *aren't* generalized, so we won't see them
-- here.
mk_tvs scoped_tv_acc imp_tv_acc all_bndrs all_hs_kvs all_hs_tvs
= let (scoped, exp_tvs, kind_matches)
= mk_tvs2 scoped_tv_acc [] [] all_bndrs all_hs_tvs in
(scoped, all_hs_kvs, reverse imp_tv_acc, exp_tvs, kind_matches)
-- no more Case (1) or (2)
-- This can't handle Case (1) or Case (2) from [Typechecking telescopes]
mk_tvs2 :: [TyVar]
-> [TyVar] -- new parameter: explicit tv accum (reversed)
-> [(LHsKind Name, Kind)] -- tiresome kind matches accum
-> [TyBinder]
-> [LHsTyVarBndr Name]
-> ( [TyVar]
, [TyVar] -- explicit tvs only
, [(LHsKind Name, Kind)] ) -- tiresome kind matches
mk_tvs2 scoped_tv_acc exp_tv_acc kind_match_acc (bndr : bndrs) (hs_tv : hs_tvs)
| Just tv <- binderVar_maybe bndr
= ASSERT2( isVisibleBinder bndr, err_doc )
ASSERT( getName tv == hsLTyVarName hs_tv )
mk_tvs2 (tv : scoped_tv_acc) (tv : exp_tv_acc) kind_match_acc' bndrs hs_tvs
-- Case (3)
| otherwise
= ASSERT( isVisibleBinder bndr )
let tv = mkTyVar (hsLTyVarName hs_tv) (binderType bndr) in
mk_tvs2 (tv : scoped_tv_acc) (tv : exp_tv_acc) kind_match_acc' bndrs hs_tvs
-- Case (4)
where
err_doc = vcat [ ppr (bndr : bndrs)
, ppr (hs_tv : hs_tvs)
, ppr tvbs ]
kind_match_acc' = case hs_tv of
L _ (UserTyVar {}) -> kind_match_acc
L _ (KindedTyVar _ hs_kind) -> (hs_kind, kind) : kind_match_acc
where kind = binderType bndr
mk_tvs2 scoped_tv_acc exp_tv_acc kind_match_acc [] [] -- All done!
= ( reverse scoped_tv_acc
, reverse exp_tv_acc
, kind_match_acc ) -- no need to reverse; it's not ordered
mk_tvs2 _ _ _ all_bndrs all_hs_tvs
= pprPanic "splitTelescopeTvs 2" (vcat [ ppr all_bndrs
, ppr all_hs_tvs ])
-----------------------
-- | "Kind check" the tyvars to a tycon. This is used during the "kind-checking"
-- pass in TcTyClsDecls. (Never in getInitialKind, never in the
-- "type-checking"/desugaring pass.) It works only for LHsQTyVars associated
-- with a tycon, whose kind is known (partially) via getInitialKinds.
-- Never emits constraints, though the thing_inside might.
kcTyClTyVars :: Name -- ^ of the tycon
-> LHsQTyVars Name
-> TcM a -> TcM a
kcTyClTyVars tycon hs_tvs thing_inside
= do { (binders, res_kind) <- kcLookupKind tycon
; let (scoped_tvs, non_cusk_kv_name_set, all_kvs, all_tvs, _)
= splitTelescopeTvs binders hs_tvs
; traceTc "kcTyClTyVars splitTelescopeTvs:"
(vcat [ text "Tycon:" <+> ppr tycon
, text "Binders:" <+> ppr binders
, text "res_kind:" <+> ppr res_kind
, text "hs_tvs:" <+> ppr hs_tvs
, text "scoped tvs:" <+> pprWithCommas pprTvBndr scoped_tvs
, text "implicit tvs:" <+> pprWithCommas pprTvBndr all_kvs
, text "explicit tvs:" <+> pprWithCommas pprTvBndr all_tvs
, text "non-CUSK kvs:" <+> ppr non_cusk_kv_name_set ] )
-- need to look up the non-cusk kvs in order to get their
-- kinds right, in case the kinds were informed by
-- the getInitialKinds pass
; let non_cusk_kv_names = nameSetElems non_cusk_kv_name_set
free_kvs = tyCoVarsOfTypes $
map tyVarKind (all_kvs ++ all_tvs)
mk_kv kv_name = case lookupVarSetByName free_kvs kv_name of
Just kv -> return kv
Nothing ->
-- this kv isn't mentioned in the
-- LHsQTyVars at all. But maybe
-- it's mentioned in the body
-- In any case, just gin up a
-- meta-kind for it
do { kv_kind <- newMetaKindVar
; return (new_skolem_tv kv_name kv_kind) }
; non_cusk_kvs <- mapM mk_kv non_cusk_kv_names
-- The non_cusk_kvs are still scoped; they are mentioned by
-- name by the user
; tcExtendTyVarEnv (non_cusk_kvs ++ scoped_tvs) $
thing_inside }
tcTyClTyVars :: Name -> LHsQTyVars Name -- LHS of the type or class decl
-> ([TyVar] -> [TyVar] -> [TyBinder] -> Kind -> TcM a) -> TcM a
-- ^ Used for the type variables of a type or class decl
-- on the second full pass (type-checking/desugaring) in TcTyClDecls.
-- This is *not* used in the initial-kind run, nor in the "kind-checking" pass.
-- Accordingly, everything passed to the continuation is fully zonked.
--
-- (tcTyClTyVars T [a,b] thing_inside)
-- where T : forall k1 k2 (a:k1 -> *) (b:k1). k2 -> *
-- calls thing_inside with arguments
-- [k1,k2] [a,b] [k1:*, k2:*, a:k1 -> *, b:k1] (k2 -> *)
-- having also extended the type environment with bindings
-- for k1,k2,a,b
--
-- Never emits constraints.
--
-- The LHsTyVarBndrs is always user-written, and the full, generalised
-- kind of the tycon is available in the local env.
tcTyClTyVars tycon hs_tvs thing_inside
= do { (binders, res_kind) <- kcLookupKind tycon
; let ( scoped_tvs, float_kv_name_set, all_kvs
, all_tvs, kind_matches )
= splitTelescopeTvs binders hs_tvs
; traceTc "tcTyClTyVars splitTelescopeTvs:"
(vcat [ text "Tycon:" <+> ppr tycon
, text "Binders:" <+> ppr binders
, text "res_kind:" <+> ppr res_kind
, text "hs_tvs.hsq_implicit:" <+> ppr (hsq_implicit hs_tvs)
, text "hs_tvs.hsq_explicit:" <+> ppr (hsq_explicit hs_tvs)
, text "scoped tvs:" <+> pprWithCommas pprTvBndr scoped_tvs
, text "implicit tvs:" <+> pprWithCommas pprTvBndr all_kvs
, text "explicit tvs:" <+> pprWithCommas pprTvBndr all_tvs
, text "floating kvs:" <+> ppr float_kv_name_set
, text "Tiresome kind matches:" <+> ppr kind_matches ] )
; float_kvs <- deal_with_float_kvs float_kv_name_set kind_matches
scoped_tvs all_tvs
; tcExtendTyVarEnv (float_kvs ++ scoped_tvs) $
-- the float_kvs are already in the all_kvs
thing_inside all_kvs all_tvs binders res_kind }
where
-- See Note [Free-floating kind vars]
deal_with_float_kvs float_kv_name_set kind_matches scoped_tvs all_tvs
| isEmptyNameSet float_kv_name_set
= return []
| otherwise
= do { -- the floating kvs might just be renamed
-- see Note [Tiresome kind matching]
; let float_kv_names = nameSetElems float_kv_name_set
; float_kv_kinds <- mapM (const newMetaKindVar) float_kv_names
; float_kvs <- zipWithM newSigTyVar float_kv_names float_kv_kinds
; discardResult $
tcExtendTyVarEnv (float_kvs ++ scoped_tvs) $
solveEqualities $
forM kind_matches $ \ (hs_kind, kind) ->
do { tc_kind <- tcLHsKind hs_kind
; unifyKind noThing tc_kind kind }
; zonked_kvs <- mapM ((return . tcGetTyVar "tcTyClTyVars") <=< zonkTcTyVar)
float_kvs
; let (still_sigs, matched_up) = partition isSigTyVar zonked_kvs
-- the still_sigs didn't match with anything. They must be
-- "free-floating", as in Note [Free-floating kind vars]
; checkNoErrs $ mapM_ (report_floating_kv all_tvs) still_sigs
-- the matched up kvs are proper scoped kvs.
; return matched_up }
report_floating_kv all_tvs kv
= addErr $
vcat [ text "Kind variable" <+> quotes (ppr kv) <+>
text "is implicitly bound in datatype"
, quotes (ppr tycon) <> comma <+>
text "but does not appear as the kind of any"
, text "of its type variables. Perhaps you meant"
, text "to bind it (with TypeInType) explicitly somewhere?"
, if null all_tvs then empty else
hang (text "Type variables with inferred kinds:")
2 (pprTvBndrs all_tvs) ]
-----------------------------------
tcDataKindSig :: Kind -> TcM ([TyVar], [TyBinder], Kind)
-- GADT decls can have a (perhaps partial) kind signature
-- e.g. data T :: * -> * -> * where ...
-- This function makes up suitable (kinded) type variables for
-- the argument kinds, and checks that the result kind is indeed *.
-- We use it also to make up argument type variables for for data instances.
-- Never emits constraints.
-- Returns the new TyVars, the extracted TyBinders, and the new, reduced
-- result kind (which should always be Type or a synonym thereof)
tcDataKindSig kind
= do { checkTc (isLiftedTypeKind res_kind) (badKindSig kind)
; span <- getSrcSpanM
; us <- newUniqueSupply
; rdr_env <- getLocalRdrEnv
; let uniqs = uniqsFromSupply us
occs = [ occ | str <- allNameStrings
, let occ = mkOccName tvName str
, isNothing (lookupLocalRdrOcc rdr_env occ) ]
-- Note [Avoid name clashes for associated data types]
-- NB: Use the tv from a binder if there is one. Otherwise,
-- we end up inventing a new Unique for it, and any other tv
-- that mentions the first ends up with the wrong kind.
; return ( [ tv
| ((bndr, occ), uniq) <- bndrs `zip` occs `zip` uniqs
, let tv | Just bndr_tv <- binderVar_maybe bndr
= bndr_tv
| otherwise
= mk_tv span uniq occ (binderType bndr) ]
, bndrs, res_kind ) }
where
(bndrs, res_kind) = splitPiTys kind
mk_tv loc uniq occ kind
= mkTyVar (mkInternalName uniq occ loc) kind
badKindSig :: Kind -> SDoc
badKindSig kind
= hang (text "Kind signature on data type declaration has non-* return kind")
2 (ppr kind)
{-
Note [Avoid name clashes for associated data types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider class C a b where
data D b :: * -> *
When typechecking the decl for D, we'll invent an extra type variable
for D, to fill out its kind. Ideally we don't want this type variable
to be 'a', because when pretty printing we'll get
class C a b where
data D b a0
(NB: the tidying happens in the conversion to IfaceSyn, which happens
as part of pretty-printing a TyThing.)
That's why we look in the LocalRdrEnv to see what's in scope. This is
important only to get nice-looking output when doing ":info C" in GHCi.
It isn't essential for correctness.
************************************************************************
* *
Scoped type variables
* *
************************************************************************
tcAddScopedTyVars is used for scoped type variables added by pattern
type signatures
e.g. \ ((x::a), (y::a)) -> x+y
They never have explicit kinds (because this is source-code only)
They are mutable (because they can get bound to a more specific type).
Usually we kind-infer and expand type splices, and then
tupecheck/desugar the type. That doesn't work well for scoped type
variables, because they scope left-right in patterns. (e.g. in the
example above, the 'a' in (y::a) is bound by the 'a' in (x::a).
The current not-very-good plan is to
* find all the types in the patterns
* find their free tyvars
* do kind inference
* bring the kinded type vars into scope
* BUT throw away the kind-checked type
(we'll kind-check it again when we type-check the pattern)
This is bad because throwing away the kind checked type throws away
its splices. But too bad for now. [July 03]
Historical note:
We no longer specify that these type variables must be universally
quantified (lots of email on the subject). If you want to put that
back in, you need to
a) Do a checkSigTyVars after thing_inside
b) More insidiously, don't pass in expected_ty, else
we unify with it too early and checkSigTyVars barfs
Instead you have to pass in a fresh ty var, and unify
it with expected_ty afterwards
-}
tcHsPatSigType :: UserTypeCtxt
-> LHsSigWcType Name -- The type signature
-> TcM ( Type -- The signature
, [TcTyVar] -- The new bit of type environment, binding
-- the scoped type variables
, [(Name, TcTyVar)] ) -- The wildcards
-- Used for type-checking type signatures in
-- (a) patterns e.g f (x::Int) = e
-- (b) RULE forall bndrs e.g. forall (x::Int). f x = x
--
-- This may emit constraints
tcHsPatSigType ctxt sig_ty
| HsIB { hsib_vars = sig_vars, hsib_body = wc_ty } <- sig_ty
, HsWC { hswc_wcs = sig_wcs, hswc_ctx = extra, hswc_body = hs_ty } <- wc_ty
= ASSERT( isNothing extra ) -- No extra-constraint wildcard in pattern sigs
addSigCtxt ctxt hs_ty $
tcWildCardBinders sig_wcs $ \ wcs ->
do { emitWildCardHoleConstraints wcs
; (vars, sig_ty) <- tcImplicitTKBndrsX new_tkv sig_vars $
do { ty <- tcHsLiftedType hs_ty
; return (ty, allBoundVariables ty) }
; sig_ty <- zonkTcType sig_ty
-- don't use zonkTcTypeToType; it mistreats wildcards
; checkValidType ctxt sig_ty
; traceTc "tcHsPatSigType" (ppr sig_vars)
; return (sig_ty, vars, wcs) }
where
new_tkv name -- See Note [Pattern signature binders]
= (, False) <$> -- "False" means that these tyvars aren't yet in scope
do { kind <- newMetaKindVar
; case ctxt of
RuleSigCtxt {} -> return $ new_skolem_tv name kind
_ -> newSigTyVar name kind }
-- See Note [Unifying SigTvs]
tcPatSig :: Bool -- True <=> pattern binding
-> LHsSigWcType Name
-> ExpSigmaType
-> TcM (TcType, -- The type to use for "inside" the signature
[TcTyVar], -- The new bit of type environment, binding
-- the scoped type variables
[(Name, TcTyVar)], -- The wildcards
HsWrapper) -- Coercion due to unification with actual ty
-- Of shape: res_ty ~ sig_ty
tcPatSig in_pat_bind sig res_ty
= do { (sig_ty, sig_tvs, sig_wcs) <- tcHsPatSigType PatSigCtxt sig
-- sig_tvs are the type variables free in 'sig',
-- and not already in scope. These are the ones
-- that should be brought into scope
; if null sig_tvs then do {
-- Just do the subsumption check and return
wrap <- addErrCtxtM (mk_msg sig_ty) $
tcSubTypeET_NC PatSigCtxt res_ty sig_ty
; return (sig_ty, [], sig_wcs, wrap)
} else do
-- Type signature binds at least one scoped type variable
-- A pattern binding cannot bind scoped type variables
-- It is more convenient to make the test here
-- than in the renamer
{ when in_pat_bind (addErr (patBindSigErr sig_tvs))
-- Check that all newly-in-scope tyvars are in fact
-- constrained by the pattern. This catches tiresome
-- cases like
-- type T a = Int
-- f :: Int -> Int
-- f (x :: T a) = ...
-- Here 'a' doesn't get a binding. Sigh
; let bad_tvs = [ tv | tv <- sig_tvs
, not (tv `elemVarSet` exactTyCoVarsOfType sig_ty) ]
; checkTc (null bad_tvs) (badPatSigTvs sig_ty bad_tvs)
-- Now do a subsumption check of the pattern signature against res_ty
; wrap <- addErrCtxtM (mk_msg sig_ty) $
tcSubTypeET_NC PatSigCtxt res_ty sig_ty
-- Phew!
; return (sig_ty, sig_tvs, sig_wcs, wrap)
} }
where
mk_msg sig_ty tidy_env
= do { (tidy_env, sig_ty) <- zonkTidyTcType tidy_env sig_ty
; res_ty <- readExpType res_ty -- should be filled in by now
; (tidy_env, res_ty) <- zonkTidyTcType tidy_env res_ty
; let msg = vcat [ hang (text "When checking that the pattern signature:")
4 (ppr sig_ty)
, nest 2 (hang (text "fits the type of its context:")
2 (ppr res_ty)) ]
; return (tidy_env, msg) }
patBindSigErr :: [TcTyVar] -> SDoc
patBindSigErr sig_tvs
= hang (text "You cannot bind scoped type variable" <> plural sig_tvs
<+> pprQuotedList sig_tvs)
2 (text "in a pattern binding signature")
{-
Note [Pattern signature binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T = forall a. T a (a->Int)
f (T x (f :: a->Int) = blah)
Here
* The pattern (T p1 p2) creates a *skolem* type variable 'a_sk',
It must be a skolem so that that it retains its identity, and
TcErrors.getSkolemInfo can thereby find the binding site for the skolem.
* The type signature pattern (f :: a->Int) binds "a" -> a_sig in the envt
* Then unification makes a_sig := a_sk
That's why we must make a_sig a MetaTv (albeit a SigTv),
not a SkolemTv, so that it can unify to a_sk.
For RULE binders, though, things are a bit different (yuk).
RULE "foo" forall (x::a) (y::[a]). f x y = ...
Here this really is the binding site of the type variable so we'd like
to use a skolem, so that we get a complaint if we unify two of them
together.
Note [Unifying SigTvs]
~~~~~~~~~~~~~~~~~~~~~~
ALAS we have no decent way of avoiding two SigTvs getting unified.
Consider
f (x::(a,b)) (y::c)) = [fst x, y]
Here we'd really like to complain that 'a' and 'c' are unified. But
for the reasons above we can't make a,b,c into skolems, so they
are just SigTvs that can unify. And indeed, this would be ok,
f x (y::c) = case x of
(x1 :: a1, True) -> [x,y]
(x1 :: a2, False) -> [x,y,y]
Here the type of x's first component is called 'a1' in one branch and
'a2' in the other. We could try insisting on the same OccName, but
they definitely won't have the sane lexical Name.
I think we could solve this by recording in a SigTv a list of all the
in-scope variables that it should not unify with, but it's fiddly.
************************************************************************
* *
Checking kinds
* *
************************************************************************
-}
-- | Produce an 'TcKind' suitable for a checking a type that can be * or #.
ekOpen :: TcM TcKind
ekOpen = do { rr <- newFlexiTyVarTy runtimeRepTy
; return (tYPE rr) }
unifyKinds :: [(TcType, TcKind)] -> TcM ([TcType], TcKind)
unifyKinds act_kinds
= do { kind <- newMetaKindVar
; let check (ty, act_kind) = checkExpectedKind ty act_kind kind
; tys' <- mapM check act_kinds
; return (tys', kind) }
{-
************************************************************************
* *
Sort checking kinds
* *
************************************************************************
tcLHsKind converts a user-written kind to an internal, sort-checked kind.
It does sort checking and desugaring at the same time, in one single pass.
-}
tcLHsKind :: LHsKind Name -> TcM Kind
tcLHsKind = tc_lhs_kind kindLevelMode
tc_lhs_kind :: TcTyMode -> LHsKind Name -> TcM Kind
tc_lhs_kind mode k
= addErrCtxt (text "In the kind" <+> quotes (ppr k)) $
tc_lhs_type (kindLevel mode) k liftedTypeKind
promotionErr :: Name -> PromotionErr -> TcM a
promotionErr name err
= failWithTc (hang (pprPECategory err <+> quotes (ppr name) <+> text "cannot be used here")
2 (parens reason))
where
reason = case err of
FamDataConPE -> text "it comes from a data family instance"
NoDataKindsTC -> text "Perhaps you intended to use DataKinds"
NoDataKindsDC -> text "Perhaps you intended to use DataKinds"
NoTypeInTypeTC -> text "Perhaps you intended to use TypeInType"
NoTypeInTypeDC -> text "Perhaps you intended to use TypeInType"
PatSynPE -> text "Pattern synonyms cannot be promoted"
_ -> text "it is defined and used in the same recursive group"
{-
************************************************************************
* *
Scoped type variables
* *
************************************************************************
-}
badPatSigTvs :: TcType -> [TyVar] -> SDoc
badPatSigTvs sig_ty bad_tvs
= vcat [ fsep [text "The type variable" <> plural bad_tvs,
quotes (pprWithCommas ppr bad_tvs),
text "should be bound by the pattern signature" <+> quotes (ppr sig_ty),
text "but are actually discarded by a type synonym" ]
, text "To fix this, expand the type synonym"
, text "[Note: I hope to lift this restriction in due course]" ]
{-
************************************************************************
* *
Error messages and such
* *
************************************************************************
-}
-- | Make an appropriate message for an error in a function argument.
-- Used for both expressions and types.
funAppCtxt :: (Outputable fun, Outputable arg) => fun -> arg -> Int -> SDoc
funAppCtxt fun arg arg_no
= hang (hsep [ text "In the", speakNth arg_no, ptext (sLit "argument of"),
quotes (ppr fun) <> text ", namely"])
2 (quotes (ppr arg))
| tjakway/ghcjvm | compiler/typecheck/TcHsType.hs | bsd-3-clause | 94,245 | 369 | 38 | 26,999 | 12,753 | 7,421 | 5,332 | -1 | -1 |
module Db (Band(..), Connection, createConnection, destroyConnection, migrate, selectBands) where
import Data.Text (Text)
import Database.PostgreSQL.Simple (Connection, close, connectPostgreSQL, query_, withTransaction)
import Database.PostgreSQL.Simple.FromRow (FromRow, fromRow, field)
import Database.PostgreSQL.Simple.Migration (MigrationCommand(..), MigrationContext(..), MigrationResult(..),
runMigration)
import Config
data Band = Band {
bandId :: Int,
bandName :: Text
} deriving Show
instance FromRow Band where
fromRow = Band <$> field <*> field
createConnection dbConfig = do
url <- require dbConfig "url"
connectPostgreSQL url
destroyConnection = close
migrate :: Config -> Connection -> IO (MigrationResult String)
migrate config connection =
do
migrationsPath <- require config "migrations_path"
withTransaction connection $ runMigration $
MigrationContext MigrationInitialization True connection
withTransaction connection $ runMigration $
MigrationContext (MigrationDirectory migrationsPath) True connection
selectBands :: Connection -> IO [Band]
selectBands connection =
query_ connection "SELECT id, name FROM bands"
| b0oh/heroku-docker-haskell-stack-example | src/Db.hs | isc | 1,228 | 0 | 10 | 218 | 311 | 173 | 138 | 28 | 1 |
-- module Practice where
times3PlusY = x * 3 + y
where x = 3
y = 1000
times5 = x * 5
where y = 10
x = 10 * 5 + y
divideProblem = z / x + y
where x = 7
y = negate x
z = y * 10
mult1 = x * y
where x = 5
y = 6
| brodyberg/Notes | ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/Practice.hs | mit | 278 | 0 | 8 | 137 | 114 | 63 | 51 | 13 | 1 |
module Pos.Crypto.Signing.Types.Tag
( SignTag(..)
) where
import Universum
import Formatting (bprint, shown)
import qualified Formatting.Buildable
-- | To protect agains replay attacks (i.e. when an attacker intercepts a
-- signed piece of data and later sends it again), we add a tag to all data
-- that we sign. This ensures that even if some bytestring can be
-- deserialized into two different types of messages (A and B), the attacker
-- can't take message A and send it as message B.
--
-- We also automatically add the network tag ('protocolMagic') whenever it
-- makes sense, to ensure that things intended for testnet won't work for
-- mainnet.
data SignTag
= SignForTestingOnly -- ^ Anything (to be used for testing only)
| SignTx -- ^ Tx: @TxSigData@
| SignRedeemTx -- ^ Redeem tx: @TxSigData@
| SignVssCert -- ^ Vss certificate: @(VssPublicKey, EpochIndex)@
| SignUSProposal -- ^ Update proposal: @UpdateProposalToSign@
| SignCommitment -- ^ Commitment: @(EpochIndex, Commitment)@
| SignUSVote -- ^ US proposal vote: @(UpId, Bool)@
| SignMainBlock -- ^ Main block: @MainToSign@
| SignMainBlockLight
| SignMainBlockHeavy
| SignProxySK -- ^ Proxy key: @ProxySecretKey@
deriving (Eq, Ord, Show, Generic, Typeable)
-- TODO: it would be nice if we couldn't use 'SignTag' with wrong
-- types. Maybe something with GADTs and data families?
instance Buildable SignTag where
build = bprint shown
| input-output-hk/pos-haskell-prototype | crypto/Pos/Crypto/Signing/Types/Tag.hs | mit | 1,594 | 0 | 6 | 415 | 134 | 90 | 44 | 20 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.ReportInstanceStatus
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Submits feedback about the status of an instance. The instance must be in the 'running' state. If your experience with the instance differs from the instance
-- status returned by 'DescribeInstanceStatus', use 'ReportInstanceStatus' to report
-- your experience with the instance. Amazon EC2 collects this information to
-- improve the accuracy of status checks.
--
-- Use of this action does not change the value returned by 'DescribeInstanceStatus'.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-ReportInstanceStatus.html>
module Network.AWS.EC2.ReportInstanceStatus
(
-- * Request
ReportInstanceStatus
-- ** Request constructor
, reportInstanceStatus
-- ** Request lenses
, risDescription
, risDryRun
, risEndTime
, risInstances
, risReasonCodes
, risStartTime
, risStatus
-- * Response
, ReportInstanceStatusResponse
-- ** Response constructor
, reportInstanceStatusResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data ReportInstanceStatus = ReportInstanceStatus
{ _risDescription :: Maybe Text
, _risDryRun :: Maybe Bool
, _risEndTime :: Maybe ISO8601
, _risInstances :: List "InstanceId" Text
, _risReasonCodes :: List "item" ReportInstanceReasonCodes
, _risStartTime :: Maybe ISO8601
, _risStatus :: ReportStatusType
} deriving (Eq, Read, Show)
-- | 'ReportInstanceStatus' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'risDescription' @::@ 'Maybe' 'Text'
--
-- * 'risDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'risEndTime' @::@ 'Maybe' 'UTCTime'
--
-- * 'risInstances' @::@ ['Text']
--
-- * 'risReasonCodes' @::@ ['ReportInstanceReasonCodes']
--
-- * 'risStartTime' @::@ 'Maybe' 'UTCTime'
--
-- * 'risStatus' @::@ 'ReportStatusType'
--
reportInstanceStatus :: ReportStatusType -- ^ 'risStatus'
-> ReportInstanceStatus
reportInstanceStatus p1 = ReportInstanceStatus
{ _risStatus = p1
, _risDryRun = Nothing
, _risInstances = mempty
, _risStartTime = Nothing
, _risEndTime = Nothing
, _risReasonCodes = mempty
, _risDescription = Nothing
}
-- | Descriptive text about the health state of your instance.
risDescription :: Lens' ReportInstanceStatus (Maybe Text)
risDescription = lens _risDescription (\s a -> s { _risDescription = a })
risDryRun :: Lens' ReportInstanceStatus (Maybe Bool)
risDryRun = lens _risDryRun (\s a -> s { _risDryRun = a })
-- | The time at which the reported instance health state ended.
risEndTime :: Lens' ReportInstanceStatus (Maybe UTCTime)
risEndTime = lens _risEndTime (\s a -> s { _risEndTime = a }) . mapping _Time
-- | One or more instances.
risInstances :: Lens' ReportInstanceStatus [Text]
risInstances = lens _risInstances (\s a -> s { _risInstances = a }) . _List
-- | One or more reason codes that describes the health state of your instance.
--
-- 'instance-stuck-in-state': My instance is stuck in a state.
--
-- 'unresponsive': My instance is unresponsive.
--
-- 'not-accepting-credentials': My instance is not accepting my credentials.
--
-- 'password-not-available': A password is not available for my instance.
--
-- 'performance-network': My instance is experiencing performance problems which
-- I believe are network related.
--
-- 'performance-instance-store': My instance is experiencing performance problems
-- which I believe are related to the instance stores.
--
-- 'performance-ebs-volume': My instance is experiencing performance problems
-- which I believe are related to an EBS volume.
--
-- 'performance-other': My instance is experiencing performance problems.
--
-- 'other': [explain using the description parameter]
--
--
risReasonCodes :: Lens' ReportInstanceStatus [ReportInstanceReasonCodes]
risReasonCodes = lens _risReasonCodes (\s a -> s { _risReasonCodes = a }) . _List
-- | The time at which the reported instance health state began.
risStartTime :: Lens' ReportInstanceStatus (Maybe UTCTime)
risStartTime = lens _risStartTime (\s a -> s { _risStartTime = a }) . mapping _Time
-- | The status of all instances listed.
risStatus :: Lens' ReportInstanceStatus ReportStatusType
risStatus = lens _risStatus (\s a -> s { _risStatus = a })
data ReportInstanceStatusResponse = ReportInstanceStatusResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'ReportInstanceStatusResponse' constructor.
reportInstanceStatusResponse :: ReportInstanceStatusResponse
reportInstanceStatusResponse = ReportInstanceStatusResponse
instance ToPath ReportInstanceStatus where
toPath = const "/"
instance ToQuery ReportInstanceStatus where
toQuery ReportInstanceStatus{..} = mconcat
[ "Description" =? _risDescription
, "DryRun" =? _risDryRun
, "EndTime" =? _risEndTime
, "InstanceId" `toQueryList` _risInstances
, "ReasonCode" `toQueryList` _risReasonCodes
, "StartTime" =? _risStartTime
, "Status" =? _risStatus
]
instance ToHeaders ReportInstanceStatus
instance AWSRequest ReportInstanceStatus where
type Sv ReportInstanceStatus = EC2
type Rs ReportInstanceStatus = ReportInstanceStatusResponse
request = post "ReportInstanceStatus"
response = nullResponse ReportInstanceStatusResponse
| kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/ReportInstanceStatus.hs | mpl-2.0 | 6,453 | 0 | 10 | 1,287 | 802 | 489 | 313 | 81 | 1 |
{-# OPTIONS_GHC -Wall -Wno-unticked-promoted-constructors -Wno-orphans -Wno-type-defaults #-}
{-# OPTIONS_GHC -ddump-tc-trace #-}
module Holo.Record
( Vocab(..)
, Definition(..)
)
where
import ExternalImports
import qualified Graphics.Cairo as Cr
import Holo.Instances
import Holo.Input
import Holo.Item
import qualified Holo.Port as Port
import Holo.Widget
-- * Lifted records (depends on Widgety Text instance)
--
instance SOP.Generic Port.Settings
instance SOP.HasDatatypeInfo Port.Settings
instance SOP.Generic Cr.FontSpec
instance SOP.HasDatatypeInfo Cr.FontSpec
instance SOP.Generic Cr.FontSizeRequest
instance SOP.HasDatatypeInfo Cr.FontSizeRequest
setAE ∷ AElt → Widget i a → Widget i a
setAE ae (Widget' (_,a,b,c)) = Widget' (ae,a,b,c)
instance {-# OVERLAPPABLE #-}
( Typeable a
, SOP.Generic a, SOP.HasDatatypeInfo a, SOP.Code a ~ xss
, SOP.All2 (Present i) xss
, MonadW i t r m
) ⇒ Widgety i a where
dynWidget' ae tok voc da = do
lbs ← getSubLBinds @i ae
w ← runWidgetMLBinds @i lbs $ do
recover (Proxy @(Present i)) (Proxy @(i, a))
(\_p _dti → pure 0) -- XXX: stub that'll obviously break sums -- should be a dynamic for choice
(recoverFieldWidget (tok, voc, da))
setAE ae <$> finaliseNodeWidget w
instance {-# OVERLAPPABLE #-}
(Typeable a
, SOP.Generic a, SOP.HasDatatypeInfo a, SOP.Code a ~ xss
, SOP.All2 (Present i) xss
, MonadW i t r m
) ⇒ Present i a where
present ae voc initial = do
lbs ← getSubLBinds @i ae
w ← runWidgetMLBinds @i lbs $ do
recover (Proxy @(Present i)) (Proxy @(i, a))
(\_p _dti → pure 0) -- XXX: stub that'll obviously break sums -- should be a dynamic for choice
(\a b c d e f g → finaliseNodeWidget =<<
recoverFieldPresent (voc, initial) a b c d e f g)
setAE ae <$> finaliseNodeWidget w
dynPresent ae voc da = do
lbs ← getSubLBinds @i ae
w ← runWidgetMLBinds @i lbs $ do
recover (Proxy @(Present i)) (Proxy @(i, a))
(\_px _dti→ pure 0) -- XXX: stub that'll obviously break sums -- should be a dynamic for choice
(\a b c d e f g → --finaliseNodeWidget =<<
recoverFieldPresentDynamic (voc, da) a b c d e f g)
setAE ae <$> finaliseNodeWidget w
recoverFieldWidget ∷ ∀ i t r m u f xss xs.
( MonadW i t r m
, SOP.HasDatatypeInfo u, SOP.Code u ~ xss
, As TextLine, Present i Text
, Typeable f
, Present i f
)
⇒ (Port.IdToken, Vocab i (Present i), Dynamic t u)
→ ReadFieldT (Present i) i m u f xss xs
recoverFieldWidget (tok, voc, dRec) _pC _pIAF _dtinfo _consNr _cinfo (FieldInfo fname) proj =
mapDesig @i @f voc
\(_ ∷ n)→ do
Widget' (ae,sD,iD,vD) ← dynWidget' @i @(Denoted n) (AElt $ pack fname) tok voc (forget ∘ proj <$> dRec)
ivD ← interpretate @i vD
pure $ Widget' (ae,sD,iD,ivD)
recoverFieldPresent ∷ ∀ i t r m u a xss xs.
( MonadW i t r m
, SOP.HasDatatypeInfo u, SOP.Code u ~ xss
, As TextLine, Present i Text
, Typeable a
, Present i a
)
⇒ (Vocab i (Present i), u)
→ ReadFieldT (Present i) i m u a xss xs
recoverFieldPresent (voc, initV ∷ u) _pC _pIAF _dtinfo _consNr _cinfo (FieldInfo fname) proj = do
let fname' = pack fname
tok ← Port.newId $ "record label '" <> fname' <> "'"
let addLabel "" x = x
addLabel lab x = hbox [ (defLeaf ∷ (x ~ TextLine, As x, Top (Denoted x))
⇒ Port.IdToken → x → Denoted x → Blank i)
tok TextLine (pack lab <> ": ")
, x
]
Widget' (ae, subsD, item, val) ← present @i (AElt $ pack fname) voc (proj initV)
pure $ Widget' (ae, subsD, addLabel fname <$> item, val)
recoverFieldPresentDynamic
∷ ∀ i t r m a f xss xs.
( MonadW i t r m
, HasCallStack, Typeable f
, Named a
, SOP.Generic a
, SOP.HasDatatypeInfo a
, SOP.Code a ~ xss, SOP.All2 (Present i) xss
)
⇒ (Vocab i (Present i), Dynamic t a)
→ ReadFieldT (Present i) i m a f xss xs
recoverFieldPresentDynamic (voc, dRec) _pC _pIAF _dtinfo _consNr _cinfo (FieldInfo fname) proj = do
let fname' = pack fname
tok ← Port.newId $ "record label '" <> fname' <> "'"
let addLabel "" x = x
addLabel lab x = hbox [ (defLeaf ∷ (x ~ TextLine, As x, Top (Denoted x))
⇒ Port.IdToken → x → Denoted x → Blank i)
tok TextLine (pack lab <> ": ")
, x
]
Widget' (ae, subsD, item, val) ← dynPresent @i (AElt $ pack fname) voc (proj <$> dRec)
pure $ Widget' (ae, subsD, addLabel fname <$> item, val)
| deepfire/mood | src/Holo/Record.hs | agpl-3.0 | 4,843 | 0 | 18 | 1,400 | 1,857 | 956 | 901 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Gabay sa pagsisimula</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/gettingStarted/src/main/javahelp/org/zaproxy/zap/extension/gettingStarted/resources/help_fil_PH/helpset_fil_PH.hs | apache-2.0 | 980 | 84 | 52 | 160 | 400 | 210 | 190 | -1 | -1 |
import qualified Tuura.PGminer.Main
main :: IO ()
main = Tuura.PGminer.Main.main
| tuura/process-mining | pgminer/Main.hs | bsd-3-clause | 82 | 0 | 6 | 11 | 27 | 16 | 11 | 3 | 1 |
module Web.Types where
import Web.Spock.Safe
import Formalize.Types
type SessionVal = Maybe SessionId
type FormalizeApp ctx = SpockCtxM ctx () SessionVal AppState ()
type FormalizeAction ctx a = SpockActionCtx ctx () SessionVal AppState a
data AppConfig = AppConfig
{ cPort :: Int
, cPath :: FilePath
, cSMTP :: SMTPInfo
}
data AppState = AppState
{ sPath :: FilePath
, sSMTP :: SMTPInfo
}
| Lepovirta/Crystallize | app/Web/Types.hs | bsd-3-clause | 443 | 0 | 8 | 116 | 117 | 70 | 47 | 13 | 0 |
-- | Contains actions for voting on posts and comments. There are functions
-- for upvoting ('upvotePost', 'upvoteComment'), downvoting ('downvotePost',
-- 'downVoteComment') as well as removing votes that have already been cast
-- ('unvotePost', 'unvoteComment').
--
-- Please note that automated voting (i.e. by a bot, as opposed to being
-- specifically ordered to by a person) is strictly against the Reddit rules,
-- and is a very effective way of getting your bot shadowbanned.
module Reddit.Actions.Voting
( upvotePost
, downvotePost
, unvotePost
, upvoteComment
, downvoteComment
, unvoteComment ) where
import Reddit.Routes.Vote (VoteDirection(..))
import Reddit.Types
import Reddit.Types.Empty
import Reddit.Types.Reddit
import qualified Reddit.Routes as Route
vote :: (Monad m, Thing a) => VoteDirection -> a -> RedditT m ()
vote dir = nothing . runRoute . Route.vote dir
-- | Upvote a post.
upvotePost :: Monad m => PostID -> RedditT m ()
upvotePost = vote UpVote
-- | Downvote a post.
downvotePost :: Monad m => PostID -> RedditT m ()
downvotePost = vote DownVote
-- | Remove a vote from a post.
unvotePost :: Monad m => PostID -> RedditT m ()
unvotePost = vote RemoveVote
-- | Upvote a comment.
upvoteComment :: Monad m => CommentID -> RedditT m ()
upvoteComment = vote UpVote
-- | Downvote a comment.
downvoteComment :: Monad m => CommentID -> RedditT m ()
downvoteComment = vote RemoveVote
-- | Remove a previously-cast vote from a comment.
unvoteComment :: Monad m => CommentID -> RedditT m ()
unvoteComment = vote DownVote
| intolerable/reddit | src/Reddit/Actions/Voting.hs | bsd-2-clause | 1,573 | 0 | 9 | 281 | 323 | 177 | 146 | 26 | 1 |
-- | This module reexports the modules that every program using SubHask will need.
-- You should import it instead of Prelude.
module SubHask
( module SubHask.Algebra
, module SubHask.Category
, module SubHask.Compatibility.Base
, module SubHask.Internal.Prelude
, module SubHask.Monad
, module SubHask.SubType
) where
import SubHask.Algebra
import SubHask.Category
import SubHask.Compatibility.Base
import SubHask.Internal.Prelude
import SubHask.Monad
import SubHask.SubType
| abailly/subhask | src/SubHask.hs | bsd-3-clause | 505 | 0 | 5 | 85 | 78 | 52 | 26 | 13 | 0 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Core.DeepSeq where
import Idris.Core.TT
import Idris.Core.CaseTree
import Idris.Core.Evaluate
import Control.DeepSeq
instance NFData Name where
rnf (UN x1) = rnf x1 `seq` ()
rnf (NS x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (MN x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf NErased = ()
rnf (SN x1) = rnf x1 `seq` ()
rnf (SymRef x1) = rnf x1 `seq` ()
instance NFData SpecialName where
rnf (WhereN x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (WithN x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (InstanceN x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (ParentN x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (MethodN x1) = rnf x1 `seq` ()
rnf (CaseN x1) = rnf x1 `seq` ()
rnf (ElimN x1) = rnf x1 `seq` ()
rnf (InstanceCtorN x1) = rnf x1 `seq` ()
rnf (MetaN x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
instance NFData Universe where
rnf NullType = ()
rnf UniqueType = ()
rnf AllTypes = ()
instance NFData Raw where
rnf (Var x1) = rnf x1 `seq` ()
rnf (RBind x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (RApp x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf RType = ()
rnf (RUType x1) = rnf x1 `seq` ()
rnf (RForce x1) = rnf x1 `seq` ()
rnf (RConstant x1) = x1 `seq` ()
instance NFData FC where
rnf (FC x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf NoFC = ()
rnf (FileFC f) = rnf f `seq` ()
instance NFData Provenance where
rnf ExpectedType = ()
rnf InferredVal = ()
rnf GivenVal = ()
rnf (SourceTerm x1) = rnf x1 `seq` ()
rnf (TooManyArgs x1) = rnf x1 `seq` ()
instance NFData UConstraint where
rnf (ULT x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (ULE x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
instance NFData ConstraintFC where
rnf (ConstraintFC x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
instance NFData Err where
rnf (Msg x1) = rnf x1 `seq` ()
rnf (InternalMsg x1) = rnf x1 `seq` ()
rnf (CantUnify x1 x2 x3 x4 x5 x6)
= rnf x1 `seq`
rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` rnf x5 `seq` rnf x6 `seq` ()
rnf (InfiniteUnify x1 x2 x3)
= rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (CantConvert x1 x2 x3)
= rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (UnifyScope x1 x2 x3 x4)
= rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` ()
rnf (ElaboratingArg x1 x2 x3 x4)
= rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` ()
rnf (CantInferType x1) = rnf x1 `seq` ()
rnf (CantMatch x1) = rnf x1 `seq` ()
rnf (ReflectionError x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (ReflectionFailed x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (CantSolveGoal x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (UniqueError x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (UniqueKindError x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (NotEquality x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (NonFunctionType x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (CantIntroduce x1) = rnf x1 `seq` ()
rnf (TooManyArguments x1) = rnf x1 `seq` ()
rnf (WithFnType x1) = rnf x1 `seq` ()
rnf (NoSuchVariable x1) = rnf x1 `seq` ()
rnf (NoTypeDecl x1) = rnf x1 `seq` ()
rnf (NotInjective x1 x2 x3)
= rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (CantResolve x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (InvalidTCArg x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (CantResolveAlts x1) = rnf x1 `seq` ()
rnf (NoValidAlts x1) = rnf x1 `seq` ()
rnf (IncompleteTerm x1) = rnf x1 `seq` ()
rnf (NoEliminator x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (UniverseError x1 x2 x3 x4 x5) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` rnf x5 `seq` ()
rnf ProgramLineComment = ()
rnf (Inaccessible x1) = rnf x1 `seq` ()
rnf (UnknownImplicit x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (NonCollapsiblePostulate x1) = rnf x1 `seq` ()
rnf (AlreadyDefined x1) = rnf x1 `seq` ()
rnf (ProofSearchFail x1) = rnf x1 `seq` ()
rnf (NoRewriting x1) = rnf x1 `seq` ()
rnf (At x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (Elaborating x1 x2 x3)
= rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (ProviderError x1) = rnf x1 `seq` ()
rnf (LoadingFailed x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (ElabScriptDebug x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (ElabScriptStuck x1) = rnf x1 `seq` ()
rnf (RunningElabScript x1) = rnf x1 `seq` ()
instance NFData ErrorReportPart where
rnf (TextPart x1) = rnf x1 `seq` ()
rnf (TermPart x1) = rnf x1 `seq` ()
rnf (RawPart x1) = rnf x1 `seq` ()
rnf (NamePart x1) = rnf x1 `seq` ()
rnf (SubReport x1) = rnf x1 `seq` ()
instance NFData ImplicitInfo where
rnf (Impl x1) = rnf x1 `seq` ()
instance (NFData b) => NFData (Binder b) where
rnf (Lam x1) = rnf x1 `seq` ()
rnf (Pi x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (Let x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (NLet x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (Hole x1) = rnf x1 `seq` ()
rnf (GHole x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (Guess x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (PVar x1) = rnf x1 `seq` ()
rnf (PVTy x1) = rnf x1 `seq` ()
instance NFData UExp where
rnf (UVar x1) = rnf x1 `seq` ()
rnf (UVal x1) = rnf x1 `seq` ()
instance NFData NameType where
rnf Bound = ()
rnf Ref = ()
rnf (DCon x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (TCon x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
instance NFData NativeTy where
rnf IT8 = ()
rnf IT16 = ()
rnf IT32 = ()
rnf IT64 = ()
instance NFData IntTy where
rnf (ITFixed x1) = rnf x1 `seq` ()
rnf ITNative = ()
rnf ITBig = ()
rnf ITChar = ()
instance NFData ArithTy where
rnf (ATInt x1) = rnf x1 `seq` ()
rnf ATFloat = ()
instance NFData Const where
rnf (I x1) = rnf x1 `seq` ()
rnf (BI x1) = rnf x1 `seq` ()
rnf (Fl x1) = rnf x1 `seq` ()
rnf (Ch x1) = rnf x1 `seq` ()
rnf (Str x1) = rnf x1 `seq` ()
rnf (B8 x1) = rnf x1 `seq` ()
rnf (B16 x1) = rnf x1 `seq` ()
rnf (B32 x1) = rnf x1 `seq` ()
rnf (B64 x1) = rnf x1 `seq` ()
rnf (AType x1) = rnf x1 `seq` ()
rnf WorldType = ()
rnf TheWorld = ()
rnf StrType = ()
rnf VoidType = ()
rnf Forgot = ()
instance (NFData n) => NFData (TT n) where
rnf (P x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (V x1) = rnf x1 `seq` ()
rnf (Bind x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (App x1 x2 x3) = rnf x2 `seq` rnf x3 `seq` ()
rnf (Constant x1) = rnf x1 `seq` ()
rnf (Proj x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf Erased = ()
rnf Impossible = ()
rnf (TType x1) = rnf x1 `seq` ()
rnf (UType _) = ()
instance NFData Accessibility where
rnf Public = ()
rnf Frozen = ()
rnf Hidden = ()
instance NFData Totality where
rnf (Total x1) = rnf x1 `seq` ()
rnf Productive = ()
rnf (Partial x1) = rnf x1 `seq` ()
rnf Unchecked = ()
rnf Generated = ()
instance NFData PReason where
rnf (Other x1) = rnf x1 `seq` ()
rnf Itself = ()
rnf NotCovering = ()
rnf NotPositive = ()
rnf (UseUndef x1) = rnf x1 `seq` ()
rnf ExternalIO = ()
rnf BelieveMe = ()
rnf (Mutual x1) = rnf x1 `seq` ()
rnf NotProductive = ()
instance NFData MetaInformation where
rnf EmptyMI = ()
rnf (DataMI x1) = rnf x1 `seq` ()
instance NFData Def where
rnf (Function x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (TyDecl x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (Operator x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
rnf (CaseOp x1 x2 x3 x4 x5 x6)
= rnf x1 `seq`
rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` rnf x5 `seq` rnf x6 `seq` ()
instance NFData CaseInfo where
rnf (CaseInfo x1 x2 x3) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` ()
instance NFData CaseDefs where
rnf (CaseDefs x1 x2 x3 x4)
= rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` ()
instance (NFData t) => NFData (SC' t) where
rnf (Case _ x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (ProjCase x1 x2) = rnf x1 `seq` rnf x2 `seq` ()
rnf (STerm x1) = rnf x1 `seq` ()
rnf (UnmatchedCase x1) = rnf x1 `seq` ()
rnf ImpossibleCase = ()
instance (NFData t) => NFData (CaseAlt' t) where
rnf (ConCase x1 x2 x3 x4)
= x1 `seq` x2 `seq` x3 `seq` rnf x4 `seq` ()
rnf (FnCase x1 x2 x3) = x1 `seq` x2 `seq` rnf x3 `seq` ()
rnf (ConstCase x1 x2) = x1 `seq` rnf x2 `seq` ()
rnf (SucCase x1 x2) = x1 `seq` rnf x2 `seq` ()
rnf (DefaultCase x1) = rnf x1 `seq` ()
| osa1/Idris-dev | src/Idris/Core/DeepSeq.hs | bsd-3-clause | 9,370 | 0 | 12 | 3,184 | 4,944 | 2,594 | 2,350 | 216 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module System.Taffybar.DBus.Client.UPower where
import DBus.Generation
import System.FilePath
import System.Taffybar.DBus.Client.Params
import System.Taffybar.DBus.Client.Util
generateClientFromFile
defaultRecordGenerationParams { recordName = Just "UPowerInfo"
, recordPrefix = "upi"
}
uPowerGenerationParams { genObjectPath = Just uPowerBaseObjectPath }
False $
"dbus-xml" </> "org.freedesktop.UPower.xml"
| teleshoes/taffybar | src/System/Taffybar/DBus/Client/UPower.hs | bsd-3-clause | 515 | 0 | 10 | 113 | 82 | 50 | 32 | 12 | 0 |
{-# LANGUAGE CPP #-}
{-
Copyright (C) 2013-2015 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.OPML
Copyright : Copyright (C) 2013-2015 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to OPML XML.
-}
module Text.Pandoc.Writers.OPML ( writeOPML) where
import Text.Pandoc.Definition
import Text.Pandoc.XML
import Text.Pandoc.Writers.Shared
import Text.Pandoc.Shared
import Text.Pandoc.Options
import Text.Pandoc.Templates (renderTemplate')
import Text.Pandoc.Writers.HTML (writeHtmlString)
import Text.Pandoc.Writers.Markdown (writeMarkdown)
import Text.Pandoc.Pretty
import Text.Pandoc.Compat.Time
import qualified Text.Pandoc.Builder as B
-- | Convert Pandoc document to string in OPML format.
writeOPML :: WriterOptions -> Pandoc -> String
writeOPML opts (Pandoc meta blocks) =
let elements = hierarchicalize blocks
colwidth = if writerWrapText opts == WrapAuto
then Just $ writerColumns opts
else Nothing
meta' = B.setMeta "date" (B.str $ convertDate $ docDate meta) meta
Just metadata = metaToJSON opts
(Just . writeMarkdown def . Pandoc nullMeta)
(Just . trimr . writeMarkdown def . Pandoc nullMeta .
(\ils -> [Plain ils]))
meta'
main = render colwidth $ vcat (map (elementToOPML opts) elements)
context = defField "body" main metadata
in if writerStandalone opts
then renderTemplate' (writerTemplate opts) context
else main
writeHtmlInlines :: [Inline] -> String
writeHtmlInlines ils = trim $ writeHtmlString def
$ Pandoc nullMeta [Plain ils]
-- date format: RFC 822: Thu, 14 Jul 2005 23:41:05 GMT
showDateTimeRFC822 :: UTCTime -> String
showDateTimeRFC822 = formatTime defaultTimeLocale "%a, %d %b %Y %X %Z"
convertDate :: [Inline] -> String
convertDate ils = maybe "" showDateTimeRFC822 $
#if MIN_VERSION_time(1,5,0)
parseTimeM True
#else
parseTime
#endif
defaultTimeLocale "%F" =<< (normalizeDate $ stringify ils)
-- | Convert an Element to OPML.
elementToOPML :: WriterOptions -> Element -> Doc
elementToOPML _ (Blk _) = empty
elementToOPML opts (Sec _ _num _ title elements) =
let isBlk (Blk _) = True
isBlk _ = False
fromBlk (Blk x) = x
fromBlk _ = error "fromBlk called on non-block"
(blocks, rest) = span isBlk elements
attrs = [("text", writeHtmlInlines title)] ++
[("_note", writeMarkdown def (Pandoc nullMeta
(map fromBlk blocks)))
| not (null blocks)]
in inTags True "outline" attrs $
vcat (map (elementToOPML opts) rest)
| janschulz/pandoc | src/Text/Pandoc/Writers/OPML.hs | gpl-2.0 | 3,536 | 0 | 16 | 833 | 675 | 360 | 315 | 53 | 3 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>Code Dx | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/codedx/src/main/javahelp/org/zaproxy/zap/extension/codedx/resources/help_bs_BA/helpset_bs_BA.hs | apache-2.0 | 968 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
module B2 where
import Control.Parallel.Strategies (rseq, rpar,
runEval)
qsort ((x : xs))
= lsort_2 ++ ([x] ++ hsort)
where
lsort = qsort (filter (<) x xs)
hsort = qsort (filter (>=) x xs)
lsort_2
=
runEval
(do lsort_2 <- rpar lsort
return lsort_2)
| RefactoringTools/HaRe | old/testing/evalMonad/B2AST.hs | bsd-3-clause | 374 | 0 | 12 | 168 | 122 | 67 | 55 | 12 | 1 |
module Main where
import Criterion.Main
import Algo.ListRank (listRank)
import Algo.Rootfix (rootfix)
import Algo.Leaffix (leaffix)
import Algo.AwShCC (awshcc)
import Algo.HybCC (hybcc)
import Algo.Quickhull (quickhull)
import Algo.Spectral ( spectral )
import Algo.Tridiag ( tridiag )
import TestData.ParenTree ( parenTree )
import TestData.Graph ( randomGraph )
import TestData.Random ( randomVector )
import Data.Vector.Unboxed ( Vector )
size :: Int
size = 100000
main = lparens `seq` rparens `seq`
nodes `seq` edges1 `seq` edges2 `seq`
do
as <- randomVector size :: IO (Vector Double)
bs <- randomVector size :: IO (Vector Double)
cs <- randomVector size :: IO (Vector Double)
ds <- randomVector size :: IO (Vector Double)
sp <- randomVector (floor $ sqrt $ fromIntegral size)
:: IO (Vector Double)
as `seq` bs `seq` cs `seq` ds `seq` sp `seq`
defaultMain [ bench "listRank" $ whnf listRank size
, bench "rootfix" $ whnf rootfix (lparens, rparens)
, bench "leaffix" $ whnf leaffix (lparens, rparens)
, bench "awshcc" $ whnf awshcc (nodes, edges1, edges2)
, bench "hybcc" $ whnf hybcc (nodes, edges1, edges2)
, bench "quickhull" $ whnf quickhull (as,bs)
, bench "spectral" $ whnf spectral sp
, bench "tridiag" $ whnf tridiag (as,bs,cs,ds)
]
where
(lparens, rparens) = parenTree size
(nodes, edges1, edges2) = randomGraph size
| dolio/vector | benchmarks/Main.hs | bsd-3-clause | 1,686 | 0 | 13 | 559 | 538 | 294 | 244 | 36 | 1 |
{- |
Module : Numeric.GSL
Copyright : (c) Alberto Ruiz 2006-7
License : GPL-style
Maintainer : Alberto Ruiz (aruiz at um dot es)
Stability : provisional
Portability : uses -fffi and -fglasgow-exts
This module reexports all available GSL functions.
The GSL special functions are in the separate package hmatrix-special.
-}
module Numeric.GSL (
module Numeric.GSL.Integration
, module Numeric.GSL.Differentiation
, module Numeric.GSL.Fourier
, module Numeric.GSL.Polynomials
, module Numeric.GSL.Minimization
, module Numeric.GSL.Root
, module Numeric.GSL.ODE
, module Numeric.GSL.Fitting
, module Data.Complex
, setErrorHandlerOff
) where
import Numeric.GSL.Integration
import Numeric.GSL.Differentiation
import Numeric.GSL.Fourier
import Numeric.GSL.Polynomials
import Numeric.GSL.Minimization
import Numeric.GSL.Root
import Numeric.GSL.ODE
import Numeric.GSL.Fitting
import Data.Complex
-- | This action removes the GSL default error handler (which aborts the program), so that
-- GSL errors can be handled by Haskell (using Control.Exception) and ghci doesn't abort.
foreign import ccall unsafe "GSL/gsl-aux.h no_abort_on_error" setErrorHandlerOff :: IO ()
| abakst/liquidhaskell | benchmarks/hmatrix-0.15.0.1/lib/Numeric/GSL.hs | bsd-3-clause | 1,190 | 0 | 7 | 172 | 148 | 99 | 49 | 21 | 0 |
{-# LANGUAGE FlexibleContexts #-}
module PostgREST.App (app, sqlError, isSqlError) where
import Control.Monad (join)
import Control.Arrow ((***), second)
import Control.Applicative
import Data.Text hiding (map)
import Data.Maybe (fromMaybe, mapMaybe)
import Text.Regex.TDFA ((=~))
import Data.Ord (comparing)
import Data.Ranged.Ranges (emptyRange)
import qualified Data.HashMap.Strict as M
import Data.String.Conversions (cs)
import Data.CaseInsensitive (original)
import Data.List (sortBy)
import Data.Functor.Identity
import qualified Data.Set as S
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as BS
import qualified Blaze.ByteString.Builder as BB
import qualified Data.Csv as CSV
import Network.HTTP.Types.Status
import Network.HTTP.Types.Header
import Network.HTTP.Types.URI (parseSimpleQuery)
import Network.HTTP.Base (urlEncodeVars)
import Network.Wai
import Network.Wai.Internal (Response(..))
import Data.Aeson
import Data.Monoid
import qualified Data.Vector as V
import qualified Hasql as H
import qualified Hasql.Backend as B
import qualified Hasql.Postgres as P
import PostgREST.Config (AppConfig(..))
import PostgREST.Auth
import PostgREST.PgQuery
import PostgREST.RangeQuery
import PostgREST.PgStructure
import Prelude
app :: AppConfig -> BL.ByteString -> Request -> H.Tx P.Postgres s Response
app conf reqBody req =
case (path, verb) of
([], _) -> do
body <- encode <$> tables (cs schema)
return $ responseLBS status200 [jsonH] $ cs body
([table], "OPTIONS") -> do
let qt = qualify table
cols <- columns qt
pkey <- map cs <$> primaryKeyColumns qt
return $ responseLBS status200 [jsonH, allOrigins]
$ encode (TableOptions cols pkey)
([table], "GET") ->
if range == Just emptyRange
then return $ responseLBS status416 [] "HTTP Range error"
else do
let qt = qualify table
select = B.Stmt "select " V.empty True <>
parentheticT (
whereT qt qq $ countRows qt
) <> commaq <> (
asJsonWithCount
. limitT range
. orderT (orderParse qq)
. whereT qt qq
$ selectStar qt
)
row <- H.maybeEx select
let (tableTotal, queryTotal, body) =
fromMaybe (0, 0, Just "" :: Maybe Text) row
from = fromMaybe 0 $ rangeOffset <$> range
to = from+queryTotal-1
contentRange = contentRangeH from to tableTotal
status = rangeStatus from to tableTotal
canonical = urlEncodeVars
. sortBy (comparing fst)
. map (join (***) cs)
. parseSimpleQuery
$ rawQueryString req
return $ responseLBS status
[jsonH, contentRange,
("Content-Location",
"/" <> cs table <>
if Prelude.null canonical then "" else "?" <> cs canonical
)
] (cs $ fromMaybe "[]" body)
(["postgrest", "users"], "POST") -> do
let user = decode reqBody :: Maybe AuthUser
case user of
Nothing -> return $ responseLBS status400 [jsonH] $
encode . object $ [("message", String "Failed to parse user.")]
Just u -> do
_ <- addUser (cs $ userId u)
(cs $ userPass u) (cs $ userRole u)
return $ responseLBS status201
[ jsonH
, (hLocation, "/postgrest/users?id=eq." <> cs (userId u))
] ""
(["postgrest", "tokens"], "POST") ->
case jwtSecret of
"secret" -> return $ responseLBS status500 [jsonH] $
encode . object $ [("message", String "JWT Secret is set as \"secret\" which is an unsafe default.")]
_ -> do
let user = decode reqBody :: Maybe AuthUser
case user of
Nothing -> return $ responseLBS status400 [jsonH] $
encode . object $ [("message", String "Failed to parse user.")]
Just u -> do
setRole authenticator
login <- signInRole (cs $ userId u)
(cs $ userPass u)
case login of
LoginSuccess role uid ->
return $ responseLBS status201 [ jsonH ] $
encode . object $ [("token", String $ tokenJWT jwtSecret uid role)]
_ -> return $ responseLBS status401 [jsonH] $
encode . object $ [("message", String "Failed authentication.")]
([table], "POST") -> do
let qt = qualify table
echoRequested = lookup "Prefer" hdrs == Just "return=representation"
parsed :: Either String (V.Vector Text, V.Vector (V.Vector Value))
parsed = if lookup "Content-Type" hdrs == Just "text/csv"
then do
rows <- CSV.decode CSV.NoHeader reqBody
if V.null rows then Left "CSV requires header"
else Right (V.head rows, (V.map $ V.map $ parseCsvCell . cs) (V.tail rows))
else eitherDecode reqBody >>= \val ->
case val of
Object obj -> Right . second V.singleton . V.unzip . V.fromList $
M.toList obj
_ -> Left "Expecting single JSON object or CSV rows"
case parsed of
Left err -> return $ responseLBS status400 [] $
encode . object $ [("message", String $ "Failed to parse JSON payload. " <> cs err)]
Right toBeInserted -> do
rows :: [Identity Text] <- H.listEx $ uncurry (insertInto qt) toBeInserted
let inserted :: [Object] = mapMaybe (decode . cs . runIdentity) rows
primaryKeys <- primaryKeyColumns qt
let responses = flip map inserted $ \obj -> do
let primaries =
if Prelude.null primaryKeys
then obj
else M.filterWithKey (const . (`elem` primaryKeys)) obj
let params = urlEncodeVars
$ map (\t -> (cs $ fst t, cs (paramFilter $ snd t)))
$ sortBy (comparing fst) $ M.toList primaries
responseLBS status201
[ jsonH
, (hLocation, "/" <> cs table <> "?" <> cs params)
] $ if echoRequested then encode obj else ""
return $ multipart status201 responses
([table], "PUT") ->
handleJsonObj reqBody $ \obj -> do
let qt = qualify table
primaryKeys <- primaryKeyColumns qt
let specifiedKeys = map (cs . fst) qq
if S.fromList primaryKeys /= S.fromList specifiedKeys
then return $ responseLBS status405 []
"You must speficy all and only primary keys as params"
else do
tableCols <- map (cs . colName) <$> columns qt
let cols = map cs $ M.keys obj
if S.fromList tableCols == S.fromList cols
then do
let vals = M.elems obj
H.unitEx $ iffNotT
(whereT qt qq $ update qt cols vals)
(insertSelect qt cols vals)
return $ responseLBS status204 [ jsonH ] ""
else return $ if Prelude.null tableCols
then responseLBS status404 [] ""
else responseLBS status400 []
"You must specify all columns in PUT request"
([table], "PATCH") ->
handleJsonObj reqBody $ \obj -> do
let qt = qualify table
up = returningStarT
. whereT qt qq
$ update qt (map cs $ M.keys obj) (M.elems obj)
patch = withT up "t" $ B.Stmt
"select count(t), array_to_json(array_agg(row_to_json(t)))::character varying"
V.empty True
row <- H.maybeEx patch
let (queryTotal, body) =
fromMaybe (0 :: Int, Just "" :: Maybe Text) row
r = contentRangeH 0 (queryTotal-1) queryTotal
echoRequested = lookup "Prefer" hdrs == Just "return=representation"
s = case () of _ | queryTotal == 0 -> status404
| echoRequested -> status200
| otherwise -> status204
return $ responseLBS s [ jsonH, r ] $ if echoRequested then cs $ fromMaybe "[]" body else ""
([table], "DELETE") -> do
let qt = qualify table
let del = countT
. returningStarT
. whereT qt qq
$ deleteFrom qt
row <- H.maybeEx del
let (Identity deletedCount) = fromMaybe (Identity 0 :: Identity Int) row
return $ if deletedCount == 0
then responseLBS status404 [] ""
else responseLBS status204 [("Content-Range", "*/"<> cs (show deletedCount))] ""
(_, _) ->
return $ responseLBS status404 [] ""
where
path = pathInfo req
verb = requestMethod req
qq = queryString req
qualify = QualifiedTable schema
hdrs = requestHeaders req
schema = requestedSchema (cs $ configV1Schema conf) hdrs
authenticator = cs $ configDbUser conf
jwtSecret = cs $ configJwtSecret conf
range = rangeRequested hdrs
allOrigins = ("Access-Control-Allow-Origin", "*") :: Header
sqlError :: t
sqlError = undefined
isSqlError :: t
isSqlError = undefined
rangeStatus :: Int -> Int -> Int -> Status
rangeStatus from to total
| from > total = status416
| (1 + to - from) < total = status206
| otherwise = status200
contentRangeH :: Int -> Int -> Int -> Header
contentRangeH from to total =
("Content-Range",
if total == 0 || from > total
then "*/" <> cs (show total)
else cs (show from) <> "-"
<> cs (show to) <> "/"
<> cs (show total)
)
requestedSchema :: Text -> RequestHeaders -> Text
requestedSchema v1schema hdrs =
case verStr of
Just [[_, ver]] -> if ver == "1" then v1schema else cs ver
_ -> v1schema
where verRegex = "version[ ]*=[ ]*([0-9]+)" :: BS.ByteString
accept = cs <$> lookup hAccept hdrs :: Maybe BS.ByteString
verStr = (=~ verRegex) <$> accept :: Maybe [[BS.ByteString]]
jsonH :: Header
jsonH = (hContentType, "application/json")
handleJsonObj :: BL.ByteString -> (Object -> H.Tx P.Postgres s Response)
-> H.Tx P.Postgres s Response
handleJsonObj reqBody handler = do
let p = eitherDecode reqBody
case p of
Left err ->
return $ responseLBS status400 [jsonH] jErr
where
jErr = encode . object $
[("message", String $ "Failed to parse JSON payload. " <> cs err)]
Right (Object o) -> handler o
Right _ ->
return $ responseLBS status400 [jsonH] jErr
where
jErr = encode . object $
[("message", String "Expecting a JSON object")]
parseCsvCell :: BL.ByteString -> Value
parseCsvCell s = if s == "NULL" then Null else String $ cs s
multipart :: Status -> [Response] -> Response
multipart _ [] = responseLBS status204 [] ""
multipart _ [r] = r
multipart s rs =
responseLBS s [(hContentType, "multipart/mixed; boundary=\"postgrest_boundary\"")] $
BL.intercalate "\n--postgrest_boundary\n" (map renderResponseBody rs)
where
renderHeader :: Header -> BL.ByteString
renderHeader (k, v) = cs (original k) <> ": " <> cs v
renderResponseBody :: Response -> BL.ByteString
renderResponseBody (ResponseBuilder _ headers b) =
BL.intercalate "\n" (map renderHeader headers)
<> "\n\n" <> BB.toLazyByteString b
renderResponseBody _ = error
"Unable to create multipart response from non-ResponseBuilder"
data TableOptions = TableOptions {
tblOptcolumns :: [Column]
, tblOptpkey :: [Text]
}
instance ToJSON TableOptions where
toJSON t = object [
"columns" .= tblOptcolumns t
, "pkey" .= tblOptpkey t ]
| nayosx/postgrest | src/PostgREST/App.hs | mit | 12,040 | 0 | 35 | 3,963 | 3,665 | 1,883 | 1,782 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) 2006-2015 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.AsciiDoc
Copyright : Copyright (C) 2006-2015 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to asciidoc.
Note that some information may be lost in conversion, due to
expressive limitations of asciidoc. Footnotes and table cells with
paragraphs (or other block items) are not possible in asciidoc.
If pandoc encounters one of these, it will insert a message indicating
that it has omitted the construct.
AsciiDoc: <http://www.methods.co.nz/asciidoc/>
-}
module Text.Pandoc.Writers.AsciiDoc (writeAsciiDoc) where
import Text.Pandoc.Definition
import Text.Pandoc.Templates (renderTemplate')
import Text.Pandoc.Shared
import Text.Pandoc.Writers.Shared
import Text.Pandoc.Options
import Text.Pandoc.Parsing hiding (blankline, space)
import Data.Maybe (fromMaybe)
import Data.List ( stripPrefix, intersperse, intercalate )
import Text.Pandoc.Pretty
import Control.Monad.State
import qualified Data.Map as M
import Data.Aeson (Value(String), fromJSON, toJSON, Result(..))
import qualified Data.Text as T
import Control.Applicative ((<*), (*>))
data WriterState = WriterState { defListMarker :: String
, orderedListLevel :: Int
, bulletListLevel :: Int
, intraword :: Bool
}
-- | Convert Pandoc to AsciiDoc.
writeAsciiDoc :: WriterOptions -> Pandoc -> String
writeAsciiDoc opts document =
evalState (pandocToAsciiDoc opts document) WriterState{
defListMarker = "::"
, orderedListLevel = 1
, bulletListLevel = 1
, intraword = False
}
-- | Return asciidoc representation of document.
pandocToAsciiDoc :: WriterOptions -> Pandoc -> State WriterState String
pandocToAsciiDoc opts (Pandoc meta blocks) = do
let titleblock = not $ null (docTitle meta) && null (docAuthors meta) &&
null (docDate meta)
let colwidth = if writerWrapText opts
then Just $ writerColumns opts
else Nothing
metadata <- metaToJSON opts
(fmap (render colwidth) . blockListToAsciiDoc opts)
(fmap (render colwidth) . inlineListToAsciiDoc opts)
meta
let addTitleLine (String t) = String $
t <> "\n" <> T.replicate (T.length t) "="
addTitleLine x = x
let metadata' = case fromJSON metadata of
Success m -> toJSON $ M.adjust addTitleLine
("title" :: T.Text) m
_ -> metadata
body <- blockListToAsciiDoc opts blocks
let main = render colwidth body
let context = defField "body" main
$ defField "toc"
(writerTableOfContents opts && writerStandalone opts)
$ defField "titleblock" titleblock
$ metadata'
if writerStandalone opts
then return $ renderTemplate' (writerTemplate opts) context
else return main
-- | Escape special characters for AsciiDoc.
escapeString :: String -> String
escapeString = escapeStringUsing escs
where escs = backslashEscapes "{"
-- | Ordered list start parser for use in Para below.
olMarker :: Parser [Char] ParserState Char
olMarker = do (start, style', delim) <- anyOrderedListMarker
if delim == Period &&
(style' == UpperAlpha || (style' == UpperRoman &&
start `elem` [1, 5, 10, 50, 100, 500, 1000]))
then spaceChar >> spaceChar
else spaceChar
-- | True if string begins with an ordered list marker
beginsWithOrderedListMarker :: String -> Bool
beginsWithOrderedListMarker str =
case runParser olMarker defaultParserState "para start" (take 10 str) of
Left _ -> False
Right _ -> True
-- | Convert Pandoc block element to asciidoc.
blockToAsciiDoc :: WriterOptions -- ^ Options
-> Block -- ^ Block element
-> State WriterState Doc
blockToAsciiDoc _ Null = return empty
blockToAsciiDoc opts (Plain inlines) = do
contents <- inlineListToAsciiDoc opts inlines
return $ contents <> blankline
blockToAsciiDoc opts (Para [Image alt (src,'f':'i':'g':':':tit)]) = do
blockToAsciiDoc opts (Para [Image alt (src,tit)])
blockToAsciiDoc opts (Para inlines) = do
contents <- inlineListToAsciiDoc opts inlines
-- escape if para starts with ordered list marker
let esc = if beginsWithOrderedListMarker (render Nothing contents)
then text "\\"
else empty
return $ esc <> contents <> blankline
blockToAsciiDoc _ (RawBlock f s)
| f == "asciidoc" = return $ text s
| otherwise = return empty
blockToAsciiDoc _ HorizontalRule =
return $ blankline <> text "'''''" <> blankline
blockToAsciiDoc opts (Header level (ident,_,_) inlines) = do
contents <- inlineListToAsciiDoc opts inlines
let len = offset contents
-- ident seem to be empty most of the time and asciidoc will generate them automatically
-- so lets make them not show up when null
let identifier = if (null ident) then empty else ("[[" <> text ident <> "]]")
let setext = writerSetextHeaders opts
return $
(if setext
then
identifier $$ contents $$
(case level of
1 -> text $ replicate len '-'
2 -> text $ replicate len '~'
3 -> text $ replicate len '^'
4 -> text $ replicate len '+'
_ -> empty) <> blankline
else
identifier $$ text (replicate level '=') <> space <> contents <> blankline)
blockToAsciiDoc _ (CodeBlock (_,classes,_) str) = return $
flush (attrs <> dashes <> space <> attrs <> cr <> text str <>
cr <> dashes) <> blankline
where dashes = text $ replicate (maximum $ map length $ lines str) '-'
attrs = if null classes
then empty
else text $ intercalate "," $ "code" : classes
blockToAsciiDoc opts (BlockQuote blocks) = do
contents <- blockListToAsciiDoc opts blocks
let isBlock (BlockQuote _) = True
isBlock _ = False
-- if there are nested block quotes, put in an open block
let contents' = if any isBlock blocks
then "--" $$ contents $$ "--"
else contents
let cols = offset contents'
let bar = text $ replicate cols '_'
return $ bar $$ chomp contents' $$ bar <> blankline
blockToAsciiDoc opts (Table caption aligns widths headers rows) = do
caption' <- inlineListToAsciiDoc opts caption
let caption'' = if null caption
then empty
else "." <> caption' <> cr
let isSimple = all (== 0) widths
let relativePercentWidths = if isSimple
then widths
else map (/ (sum widths)) widths
let widths'' :: [Integer]
widths'' = map (floor . (* 100)) relativePercentWidths
-- ensure that the widths sum to 100
let widths' = case widths'' of
_ | isSimple -> widths''
(w:ws) | sum (w:ws) < 100
-> (100 - sum ws) : ws
ws -> ws
let totalwidth :: Integer
totalwidth = floor $ sum widths * 100
let colspec al wi = (case al of
AlignLeft -> "<"
AlignCenter -> "^"
AlignRight -> ">"
AlignDefault -> "") ++
if wi == 0 then "" else (show wi ++ "%")
let headerspec = if all null headers
then empty
else text "options=\"header\","
let widthspec = if totalwidth == 0
then empty
else text "width="
<> doubleQuotes (text $ show totalwidth ++ "%")
<> text ","
let tablespec = text "["
<> widthspec
<> text "cols="
<> doubleQuotes (text $ intercalate ","
$ zipWith colspec aligns widths')
<> text ","
<> headerspec <> text "]"
let makeCell [Plain x] = do d <- blockListToAsciiDoc opts [Plain x]
return $ text "|" <> chomp d
makeCell [Para x] = makeCell [Plain x]
makeCell [] = return $ text "|"
makeCell bs = do d <- blockListToAsciiDoc opts bs
return $ text "a|" $$ d
let makeRow cells = hsep `fmap` mapM makeCell cells
rows' <- mapM makeRow rows
head' <- makeRow headers
let head'' = if all null headers then empty else head'
let colwidth = if writerWrapText opts
then writerColumns opts
else 100000
let maxwidth = maximum $ map offset (head':rows')
let body = if maxwidth > colwidth then vsep rows' else vcat rows'
let border = text $ "|" ++ replicate (max 5 (min maxwidth colwidth) - 1) '='
return $
caption'' $$ tablespec $$ border $$ head'' $$ body $$ border $$ blankline
blockToAsciiDoc opts (BulletList items) = do
contents <- mapM (bulletListItemToAsciiDoc opts) items
return $ cat contents <> blankline
blockToAsciiDoc opts (OrderedList (_start, sty, _delim) items) = do
let sty' = case sty of
UpperRoman -> UpperAlpha
LowerRoman -> LowerAlpha
x -> x
let markers = orderedListMarkers (1, sty', Period) -- start num not used
let markers' = map (\m -> if length m < 3
then m ++ replicate (3 - length m) ' '
else m) markers
contents <- mapM (\(item, num) -> orderedListItemToAsciiDoc opts item num) $
zip markers' items
return $ cat contents <> blankline
blockToAsciiDoc opts (DefinitionList items) = do
contents <- mapM (definitionListItemToAsciiDoc opts) items
return $ cat contents <> blankline
blockToAsciiDoc opts (Div _ bs) = blockListToAsciiDoc opts bs
-- | Convert bullet list item (list of blocks) to asciidoc.
bulletListItemToAsciiDoc :: WriterOptions -> [Block] -> State WriterState Doc
bulletListItemToAsciiDoc opts blocks = do
let addBlock :: Doc -> Block -> State WriterState Doc
addBlock d b | isEmpty d = chomp `fmap` blockToAsciiDoc opts b
addBlock d b@(BulletList _) = do x <- blockToAsciiDoc opts b
return $ d <> cr <> chomp x
addBlock d b@(OrderedList _ _) = do x <- blockToAsciiDoc opts b
return $ d <> cr <> chomp x
addBlock d b = do x <- blockToAsciiDoc opts b
return $ d <> cr <> text "+" <> cr <> chomp x
lev <- bulletListLevel `fmap` get
modify $ \s -> s{ bulletListLevel = lev + 1 }
contents <- foldM addBlock empty blocks
modify $ \s -> s{ bulletListLevel = lev }
let marker = text (replicate lev '*')
return $ marker <> text " " <> contents <> cr
-- | Convert ordered list item (a list of blocks) to asciidoc.
orderedListItemToAsciiDoc :: WriterOptions -- ^ options
-> String -- ^ list item marker
-> [Block] -- ^ list item (list of blocks)
-> State WriterState Doc
orderedListItemToAsciiDoc opts marker blocks = do
let addBlock :: Doc -> Block -> State WriterState Doc
addBlock d b | isEmpty d = chomp `fmap` blockToAsciiDoc opts b
addBlock d b@(BulletList _) = do x <- blockToAsciiDoc opts b
return $ d <> cr <> chomp x
addBlock d b@(OrderedList _ _) = do x <- blockToAsciiDoc opts b
return $ d <> cr <> chomp x
addBlock d b = do x <- blockToAsciiDoc opts b
return $ d <> cr <> text "+" <> cr <> chomp x
lev <- orderedListLevel `fmap` get
modify $ \s -> s{ orderedListLevel = lev + 1 }
contents <- foldM addBlock empty blocks
modify $ \s -> s{ orderedListLevel = lev }
return $ text marker <> text " " <> contents <> cr
-- | Convert definition list item (label, list of blocks) to asciidoc.
definitionListItemToAsciiDoc :: WriterOptions
-> ([Inline],[[Block]])
-> State WriterState Doc
definitionListItemToAsciiDoc opts (label, defs) = do
labelText <- inlineListToAsciiDoc opts label
marker <- defListMarker `fmap` get
if marker == "::"
then modify (\st -> st{ defListMarker = ";;"})
else modify (\st -> st{ defListMarker = "::"})
let divider = cr <> text "+" <> cr
let defsToAsciiDoc :: [Block] -> State WriterState Doc
defsToAsciiDoc ds = (vcat . intersperse divider . map chomp)
`fmap` mapM (blockToAsciiDoc opts) ds
defs' <- mapM defsToAsciiDoc defs
modify (\st -> st{ defListMarker = marker })
let contents = nest 2 $ vcat $ intersperse divider $ map chomp defs'
return $ labelText <> text marker <> cr <> contents <> cr
-- | Convert list of Pandoc block elements to asciidoc.
blockListToAsciiDoc :: WriterOptions -- ^ Options
-> [Block] -- ^ List of block elements
-> State WriterState Doc
blockListToAsciiDoc opts blocks = cat `fmap` mapM (blockToAsciiDoc opts) blocks
-- | Convert list of Pandoc inline elements to asciidoc.
inlineListToAsciiDoc :: WriterOptions -> [Inline] -> State WriterState Doc
inlineListToAsciiDoc opts lst = do
oldIntraword <- gets intraword
setIntraword False
result <- go lst
setIntraword oldIntraword
return result
where go [] = return empty
go (y:x:xs)
| not (isSpacy y) = do
y' <- if isSpacy x
then inlineToAsciiDoc opts y
else withIntraword $ inlineToAsciiDoc opts y
x' <- withIntraword $ inlineToAsciiDoc opts x
xs' <- go xs
return (y' <> x' <> xs')
| x /= Space && x /= LineBreak = do
y' <- withIntraword $ inlineToAsciiDoc opts y
xs' <- go (x:xs)
return (y' <> xs')
go (x:xs) = do
x' <- inlineToAsciiDoc opts x
xs' <- go xs
return (x' <> xs')
isSpacy Space = True
isSpacy LineBreak = True
isSpacy _ = False
setIntraword :: Bool -> State WriterState ()
setIntraword b = modify $ \st -> st{ intraword = b }
withIntraword :: State WriterState a -> State WriterState a
withIntraword p = setIntraword True *> p <* setIntraword False
-- | Convert Pandoc inline element to asciidoc.
inlineToAsciiDoc :: WriterOptions -> Inline -> State WriterState Doc
inlineToAsciiDoc opts (Emph lst) = do
contents <- inlineListToAsciiDoc opts lst
isIntraword <- gets intraword
let marker = if isIntraword then "__" else "_"
return $ marker <> contents <> marker
inlineToAsciiDoc opts (Strong lst) = do
contents <- inlineListToAsciiDoc opts lst
isIntraword <- gets intraword
let marker = if isIntraword then "**" else "*"
return $ marker <> contents <> marker
inlineToAsciiDoc opts (Strikeout lst) = do
contents <- inlineListToAsciiDoc opts lst
return $ "[line-through]*" <> contents <> "*"
inlineToAsciiDoc opts (Superscript lst) = do
contents <- inlineListToAsciiDoc opts lst
return $ "^" <> contents <> "^"
inlineToAsciiDoc opts (Subscript lst) = do
contents <- inlineListToAsciiDoc opts lst
return $ "~" <> contents <> "~"
inlineToAsciiDoc opts (SmallCaps lst) = inlineListToAsciiDoc opts lst
inlineToAsciiDoc opts (Quoted SingleQuote lst) =
inlineListToAsciiDoc opts (Str "`" : lst ++ [Str "'"])
inlineToAsciiDoc opts (Quoted DoubleQuote lst) =
inlineListToAsciiDoc opts (Str "``" : lst ++ [Str "''"])
inlineToAsciiDoc _ (Code _ str) = return $
text "`" <> text (escapeStringUsing (backslashEscapes "`") str) <> "`"
inlineToAsciiDoc _ (Str str) = return $ text $ escapeString str
inlineToAsciiDoc _ (Math InlineMath str) =
return $ "latexmath:[$" <> text str <> "$]"
inlineToAsciiDoc _ (Math DisplayMath str) =
return $ "latexmath:[\\[" <> text str <> "\\]]"
inlineToAsciiDoc _ (RawInline f s)
| f == "asciidoc" = return $ text s
| otherwise = return empty
inlineToAsciiDoc _ (LineBreak) = return $ " +" <> cr
inlineToAsciiDoc _ Space = return space
inlineToAsciiDoc opts (Cite _ lst) = inlineListToAsciiDoc opts lst
inlineToAsciiDoc opts (Link txt (src, _tit)) = do
-- relative: link:downloads/foo.zip[download foo.zip]
-- abs: http://google.cod[Google]
-- or [email protected][email john]
linktext <- inlineListToAsciiDoc opts txt
let isRelative = ':' `notElem` src
let prefix = if isRelative
then text "link:"
else empty
let srcSuffix = fromMaybe src (stripPrefix "mailto:" src)
let useAuto = case txt of
[Str s] | escapeURI s == srcSuffix -> True
_ -> False
return $ if useAuto
then text srcSuffix
else prefix <> text src <> "[" <> linktext <> "]"
inlineToAsciiDoc opts (Image alternate (src, tit)) = do
-- image:images/logo.png[Company logo, title="blah"]
let txt = if (null alternate) || (alternate == [Str ""])
then [Str "image"]
else alternate
linktext <- inlineListToAsciiDoc opts txt
let linktitle = if null tit
then empty
else text $ ",title=\"" ++ tit ++ "\""
return $ "image:" <> text src <> "[" <> linktext <> linktitle <> "]"
inlineToAsciiDoc opts (Note [Para inlines]) =
inlineToAsciiDoc opts (Note [Plain inlines])
inlineToAsciiDoc opts (Note [Plain inlines]) = do
contents <- inlineListToAsciiDoc opts inlines
return $ text "footnote:[" <> contents <> "]"
-- asciidoc can't handle blank lines in notes
inlineToAsciiDoc _ (Note _) = return "[multiblock footnote omitted]"
inlineToAsciiDoc opts (Span _ ils) = inlineListToAsciiDoc opts ils
| ddssff/pandoc | src/Text/Pandoc/Writers/AsciiDoc.hs | gpl-2.0 | 18,861 | 0 | 19 | 5,601 | 5,310 | 2,622 | 2,688 | 351 | 30 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Reporting
-- Copyright : (c) David Waern 2008
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Anonymous build report data structure, printing and parsing
--
-----------------------------------------------------------------------------
module Distribution.Client.BuildReports.Anonymous (
BuildReport(..),
InstallOutcome(..),
Outcome(..),
-- * Constructing and writing reports
new,
-- * parsing and pretty printing
parse,
parseList,
show,
-- showList,
) where
import qualified Distribution.Client.Types as BR
( BuildResult, BuildFailure(..), BuildSuccess(..)
, DocsResult(..), TestsResult(..) )
import Distribution.Client.Utils
( mergeBy, MergeResult(..) )
import qualified Paths_cabal_install (version)
import Distribution.Package
( PackageIdentifier(..), PackageName(..) )
import Distribution.PackageDescription
( FlagName(..), FlagAssignment )
--import Distribution.Version
-- ( Version )
import Distribution.System
( OS, Arch )
import Distribution.Compiler
( CompilerId(..) )
import qualified Distribution.Text as Text
( Text(disp, parse) )
import Distribution.ParseUtils
( FieldDescr(..), ParseResult(..), Field(..)
, simpleField, listField, ppFields, readFields
, syntaxError, locatedErrorMsg )
import Distribution.Simple.Utils
( comparing )
import qualified Distribution.Compat.ReadP as Parse
( ReadP, pfail, munch1, skipSpaces )
import qualified Text.PrettyPrint as Disp
( Doc, render, char, text )
import Text.PrettyPrint
( (<+>), (<>) )
import Data.List
( unfoldr, sortBy )
import Data.Char as Char
( isAlpha, isAlphaNum )
import Prelude hiding (show)
data BuildReport
= BuildReport {
-- | The package this build report is about
package :: PackageIdentifier,
-- | The OS and Arch the package was built on
os :: OS,
arch :: Arch,
-- | The Haskell compiler (and hopefully version) used
compiler :: CompilerId,
-- | The uploading client, ie cabal-install-x.y.z
client :: PackageIdentifier,
-- | Which configurations flags we used
flagAssignment :: FlagAssignment,
-- | Which dependent packages we were using exactly
dependencies :: [PackageIdentifier],
-- | Did installing work ok?
installOutcome :: InstallOutcome,
-- Which version of the Cabal library was used to compile the Setup.hs
-- cabalVersion :: Version,
-- Which build tools we were using (with versions)
-- tools :: [PackageIdentifier],
-- | Configure outcome, did configure work ok?
docsOutcome :: Outcome,
-- | Configure outcome, did configure work ok?
testsOutcome :: Outcome
}
data InstallOutcome
= PlanningFailed
| DependencyFailed PackageIdentifier
| DownloadFailed
| UnpackFailed
| SetupFailed
| ConfigureFailed
| BuildFailed
| TestsFailed
| InstallFailed
| InstallOk
deriving Eq
data Outcome = NotTried | Failed | Ok
deriving Eq
new :: OS -> Arch -> CompilerId -> PackageIdentifier -> FlagAssignment
-> [PackageIdentifier] -> BR.BuildResult -> BuildReport
new os' arch' comp pkgid flags deps result =
BuildReport {
package = pkgid,
os = os',
arch = arch',
compiler = comp,
client = cabalInstallID,
flagAssignment = flags,
dependencies = deps,
installOutcome = convertInstallOutcome,
-- cabalVersion = undefined
docsOutcome = convertDocsOutcome,
testsOutcome = convertTestsOutcome
}
where
convertInstallOutcome = case result of
Left BR.PlanningFailed -> PlanningFailed
Left (BR.DependentFailed p) -> DependencyFailed p
Left (BR.DownloadFailed _) -> DownloadFailed
Left (BR.UnpackFailed _) -> UnpackFailed
Left (BR.ConfigureFailed _) -> ConfigureFailed
Left (BR.BuildFailed _) -> BuildFailed
Left (BR.TestsFailed _) -> TestsFailed
Left (BR.InstallFailed _) -> InstallFailed
Right (BR.BuildOk _ _ _) -> InstallOk
convertDocsOutcome = case result of
Left _ -> NotTried
Right (BR.BuildOk BR.DocsNotTried _ _) -> NotTried
Right (BR.BuildOk BR.DocsFailed _ _) -> Failed
Right (BR.BuildOk BR.DocsOk _ _) -> Ok
convertTestsOutcome = case result of
Left (BR.TestsFailed _) -> Failed
Left _ -> NotTried
Right (BR.BuildOk _ BR.TestsNotTried _) -> NotTried
Right (BR.BuildOk _ BR.TestsOk _) -> Ok
cabalInstallID :: PackageIdentifier
cabalInstallID =
PackageIdentifier (PackageName "cabal-install") Paths_cabal_install.version
-- ------------------------------------------------------------
-- * External format
-- ------------------------------------------------------------
initialBuildReport :: BuildReport
initialBuildReport = BuildReport {
package = requiredField "package",
os = requiredField "os",
arch = requiredField "arch",
compiler = requiredField "compiler",
client = requiredField "client",
flagAssignment = [],
dependencies = [],
installOutcome = requiredField "install-outcome",
-- cabalVersion = Nothing,
-- tools = [],
docsOutcome = NotTried,
testsOutcome = NotTried
}
where
requiredField fname = error ("required field: " ++ fname)
-- -----------------------------------------------------------------------------
-- Parsing
parse :: String -> Either String BuildReport
parse s = case parseFields s of
ParseFailed perror -> Left msg where (_, msg) = locatedErrorMsg perror
ParseOk _ report -> Right report
--FIXME: this does not allow for optional or repeated fields
parseFields :: String -> ParseResult BuildReport
parseFields input = do
fields <- mapM extractField =<< readFields input
let merged = mergeBy (\desc (_,name,_) -> compare (fieldName desc) name)
sortedFieldDescrs
(sortBy (comparing (\(_,name,_) -> name)) fields)
checkMerged initialBuildReport merged
where
extractField :: Field -> ParseResult (Int, String, String)
extractField (F line name value) = return (line, name, value)
extractField (Section line _ _ _) = syntaxError line "Unrecognized stanza"
extractField (IfBlock line _ _ _) = syntaxError line "Unrecognized stanza"
checkMerged report [] = return report
checkMerged report (merged:remaining) = case merged of
InBoth fieldDescr (line, _name, value) -> do
report' <- fieldSet fieldDescr line value report
checkMerged report' remaining
OnlyInRight (line, name, _) ->
syntaxError line ("Unrecognized field " ++ name)
OnlyInLeft fieldDescr ->
fail ("Missing field " ++ fieldName fieldDescr)
parseList :: String -> [BuildReport]
parseList str =
[ report | Right report <- map parse (split str) ]
where
split :: String -> [String]
split = filter (not . null) . unfoldr chunk . lines
chunk [] = Nothing
chunk ls = case break null ls of
(r, rs) -> Just (unlines r, dropWhile null rs)
-- -----------------------------------------------------------------------------
-- Pretty-printing
show :: BuildReport -> String
show = Disp.render . ppFields fieldDescrs
-- -----------------------------------------------------------------------------
-- Description of the fields, for parsing/printing
fieldDescrs :: [FieldDescr BuildReport]
fieldDescrs =
[ simpleField "package" Text.disp Text.parse
package (\v r -> r { package = v })
, simpleField "os" Text.disp Text.parse
os (\v r -> r { os = v })
, simpleField "arch" Text.disp Text.parse
arch (\v r -> r { arch = v })
, simpleField "compiler" Text.disp Text.parse
compiler (\v r -> r { compiler = v })
, simpleField "client" Text.disp Text.parse
client (\v r -> r { client = v })
, listField "flags" dispFlag parseFlag
flagAssignment (\v r -> r { flagAssignment = v })
, listField "dependencies" Text.disp Text.parse
dependencies (\v r -> r { dependencies = v })
, simpleField "install-outcome" Text.disp Text.parse
installOutcome (\v r -> r { installOutcome = v })
, simpleField "docs-outcome" Text.disp Text.parse
docsOutcome (\v r -> r { docsOutcome = v })
, simpleField "tests-outcome" Text.disp Text.parse
testsOutcome (\v r -> r { testsOutcome = v })
]
sortedFieldDescrs :: [FieldDescr BuildReport]
sortedFieldDescrs = sortBy (comparing fieldName) fieldDescrs
dispFlag :: (FlagName, Bool) -> Disp.Doc
dispFlag (FlagName name, True) = Disp.text name
dispFlag (FlagName name, False) = Disp.char '-' <> Disp.text name
parseFlag :: Parse.ReadP r (FlagName, Bool)
parseFlag = do
name <- Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-')
case name of
('-':flag) -> return (FlagName flag, False)
flag -> return (FlagName flag, True)
instance Text.Text InstallOutcome where
disp PlanningFailed = Disp.text "PlanningFailed"
disp (DependencyFailed pkgid) = Disp.text "DependencyFailed" <+> Text.disp pkgid
disp DownloadFailed = Disp.text "DownloadFailed"
disp UnpackFailed = Disp.text "UnpackFailed"
disp SetupFailed = Disp.text "SetupFailed"
disp ConfigureFailed = Disp.text "ConfigureFailed"
disp BuildFailed = Disp.text "BuildFailed"
disp TestsFailed = Disp.text "TestsFailed"
disp InstallFailed = Disp.text "InstallFailed"
disp InstallOk = Disp.text "InstallOk"
parse = do
name <- Parse.munch1 Char.isAlphaNum
case name of
"PlanningFailed" -> return PlanningFailed
"DependencyFailed" -> do Parse.skipSpaces
pkgid <- Text.parse
return (DependencyFailed pkgid)
"DownloadFailed" -> return DownloadFailed
"UnpackFailed" -> return UnpackFailed
"SetupFailed" -> return SetupFailed
"ConfigureFailed" -> return ConfigureFailed
"BuildFailed" -> return BuildFailed
"TestsFailed" -> return TestsFailed
"InstallFailed" -> return InstallFailed
"InstallOk" -> return InstallOk
_ -> Parse.pfail
instance Text.Text Outcome where
disp NotTried = Disp.text "NotTried"
disp Failed = Disp.text "Failed"
disp Ok = Disp.text "Ok"
parse = do
name <- Parse.munch1 Char.isAlpha
case name of
"NotTried" -> return NotTried
"Failed" -> return Failed
"Ok" -> return Ok
_ -> Parse.pfail
| DavidAlphaFox/ghc | libraries/Cabal/cabal-install/Distribution/Client/BuildReports/Anonymous.hs | bsd-3-clause | 11,532 | 0 | 17 | 3,303 | 2,702 | 1,475 | 1,227 | 222 | 15 |
module Attribute where
data ComponentType =
IntegerSigned
| IntegerUnsigned
| FloatingPoint
data AttributeType = AttributeType {
attribute_component_type :: ComponentType,
attribute_component_count :: Word32,
attribute_component_size :: Word32
}
data Attribute = Attribute {
attribute_name :: String,
attribute_type :: AttributeType
}
| io7m/smf | com.io7m.smfj.specification/src/main/resources/com/io7m/smfj/specification/attribute.hs | isc | 358 | 0 | 8 | 61 | 65 | 41 | 24 | 12 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.