code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
import Crypto.Cipher.Benchmarks
import Crypto.Cipher.AES (AES128, AES192, AES256)
main = defaultMain
[GBlockCipher (undefined :: AES128)
,GBlockCipher (undefined :: AES192)
,GBlockCipher (undefined :: AES256)]
|
moonKimura/cipher-aes-0.2.6
|
Benchmarks/Benchmarks.hs
|
bsd-3-clause
| 223 | 0 | 8 | 35 | 68 | 40 | 28 | 6 | 1 |
import Control.Monad
memorized_func :: Int -> Integer
memorized_func = (map func [0..] !!)
where
func 0 = 1
func i = (sum [memorized_func x * memorized_func(i - x - 1) | x <-[0..i - 1]]) `mod` 100000007
main = do
n <- getLine
num <- replicateM (read n) getLine
putStr $ unlines $ map show $ [memorized_func (read x :: Int) | x <- num]
|
EdisonAlgorithms/HackerRank
|
practice/fp/dp/number-of-binary-search-tree/number-of-binary-search-tree.hs
|
mit
| 406 | 0 | 15 | 136 | 174 | 89 | 85 | 9 | 2 |
{-# LANGUAGE QuasiQuotes #-}
module Vimus.Command.HelpSpec (main, spec) where
import Test.Hspec
import Test.QuickCheck
import Data.String
import Vimus.Command.Help
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "help" $ do
it "strips leading space" $ do
unHelp [help|
foo bar
|] `shouldBe` ["foo bar"]
it "combines subsequent non-empty lines" $ do
unHelp [help|
foo
bar
|] `shouldBe` ["foo bar"]
it "treats an empty line as a paragraph separator" $ do
unHelp [help|
foo
bar
|] `shouldBe` ["foo", "bar"]
it "treats several empty lines as a paragraph separator" $ do
unHelp [help|
foo
bar
|] `shouldBe` ["foo", "bar"]
it "does automatic word wrapping" $ do
unHelp [help|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim
veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea
commodo consequat. Duis aute irure dolor in reprehenderit in voluptate
velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia deserunt
mollit anim id est laborum.
|] `shouldBe` [
"Lorem ipsum dolor sit amet, consectetur adipisicing elit,"
, "sed do eiusmod tempor incididunt ut labore et dolore magna"
, "aliqua. Ut enim ad minim veniam, quis nostrud exercitation"
, "ullamco laboris nisi ut aliquip ex ea commodo consequat."
, "Duis aute irure dolor in reprehenderit in voluptate velit"
, "esse cillum dolore eu fugiat nulla pariatur. Excepteur sint"
, "occaecat cupidatat non proident, sunt in culpa qui officia"
, "deserunt mollit anim id est laborum."
]
it "produces empty output on empty input" $ do
unHelp [help||] `shouldBe` []
it "produces empty output on all-whitespace input" $ do
unHelp [help|
|] `shouldBe` []
it "puts a word that is longer than the text width on a separate line" $ do
unHelp [help|
http://hackage.haskell.org/packages/archive/base/latest/doc/html/Control-Applicative.html
|] `shouldBe` ["http://hackage.haskell.org/packages/archive/base/latest/doc/html/Control-Applicative.html"]
it "corretly handles the case, where the input is exactly one line and one word" $ do
unHelp [help|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed
|] `shouldBe` [
"Lorem ipsum dolor sit amet, consectetur adipisicing elit,"
, "sed"
]
context "when given arbitrary input" $ do
it "ensures that lines never exceed the text width (or only consist of a single word)" $ property $
all (\x -> length x <= 60 || length (words x) == 1) . unHelp . fromString
|
vimus/vimus
|
test/Vimus/Command/HelpSpec.hs
|
mit
| 2,964 | 0 | 23 | 815 | 431 | 236 | 195 | 50 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcMonoType]{Typechecking user-specified @MonoTypes@}
-}
module ETA.TypeCheck.TcHsType (
tcHsSigType, tcHsSigTypeNC, tcHsDeriv, tcHsVectInst,
tcHsInstHead,
UserTypeCtxt(..),
-- Type checking type and class decls
kcLookupKind, kcTyClTyVars, tcTyClTyVars,
tcHsConArgType, tcDataKindSig,
tcClassSigType,
-- Kind-checking types
-- No kind generalisation, no checkValidType
kcHsTyVarBndrs, tcHsTyVarBndrs,
tcHsLiftedType, tcHsOpenType,
tcLHsType, tcCheckLHsType,
tcHsContext, tcInferApps, tcHsArgTys,
kindGeneralize, checkKind,
-- Sort-checking kinds
tcLHsKind,
-- Pattern type signatures
tcHsPatSigType, tcPatSig
) where
import ETA.HsSyn.HsSyn
import ETA.TypeCheck.TcRnMonad
import ETA.TypeCheck.TcEvidence( HsWrapper )
import ETA.TypeCheck.TcEnv
import ETA.TypeCheck.TcMType
import ETA.TypeCheck.TcValidity
import ETA.TypeCheck.TcUnify
import ETA.Iface.TcIface
import ETA.TypeCheck.TcType
import ETA.Types.Type
import ETA.Types.TypeRep( Type(..) ) -- For the mkNakedXXX stuff
import ETA.Types.Kind
import ETA.BasicTypes.RdrName( lookupLocalRdrOcc )
import ETA.BasicTypes.Var
import ETA.BasicTypes.VarSet
import ETA.Types.TyCon
import ETA.BasicTypes.ConLike
import ETA.BasicTypes.DataCon
import ETA.Prelude.TysPrim ( liftedTypeKindTyConName, constraintKindTyConName )
import ETA.Types.Class
import ETA.BasicTypes.Name
import ETA.BasicTypes.NameEnv
import ETA.Prelude.TysWiredIn
import ETA.BasicTypes.BasicTypes
import ETA.BasicTypes.SrcLoc
import ETA.Main.DynFlags ( ExtensionFlag( Opt_DataKinds ), getDynFlags )
import ETA.BasicTypes.Unique
import ETA.BasicTypes.UniqSupply
import ETA.Utils.Outputable
import ETA.Utils.FastString
import ETA.Utils.Util
import Data.Maybe( isNothing )
import Control.Monad ( unless, when, zipWithM )
import ETA.Prelude.PrelNames( ipClassName, funTyConKey, allNameStrings )
{-
----------------------------
General notes
----------------------------
Generally speaking we now type-check types in three phases
1. kcHsType: kind check the HsType
*includes* performing any TH type splices;
so it returns a translated, and kind-annotated, type
2. dsHsType: convert from HsType to Type:
perform zonking
expand type synonyms [mkGenTyApps]
hoist the foralls [tcHsType]
3. checkValidType: check the validity of the resulting type
Often these steps are done one after the other (tcHsSigType).
But in mutually recursive groups of type and class decls we do
1 kind-check the whole group
2 build TyCons/Classes in a knot-tied way
3 check the validity of types in the now-unknotted TyCons/Classes
For example, when we find
(forall a m. m a -> m a)
we bind a,m to kind varibles and kind-check (m a -> m a). This makes
a get kind *, and m get kind *->*. Now we typecheck (m a -> m a) in
an environment that binds a and m suitably.
The kind checker passed to tcHsTyVars needs to look at enough to
establish the kind of the tyvar:
* For a group of type and class decls, it's just the group, not
the rest of the program
* For a tyvar bound in a pattern type signature, its the types
mentioned in the other type signatures in that bunch of patterns
* For a tyvar bound in a RULE, it's the type signatures on other
universally quantified variables in the rule
Note that this may occasionally give surprising results. For example:
data T a b = MkT (a b)
Here we deduce a::*->*, b::*
But equally valid would be a::(*->*)-> *, b::*->*
Validity checking
~~~~~~~~~~~~~~~~~
Some of the validity check could in principle be done by the kind checker,
but not all:
- During desugaring, we normalise by expanding type synonyms. Only
after this step can we check things like type-synonym saturation
e.g. type T k = k Int
type S a = a
Then (T S) is ok, because T is saturated; (T S) expands to (S Int);
and then S is saturated. This is a GHC extension.
- Similarly, also a GHC extension, we look through synonyms before complaining
about the form of a class or instance declaration
- Ambiguity checks involve functional dependencies, and it's easier to wait
until knots have been resolved before poking into them
Also, in a mutually recursive group of types, we can't look at the TyCon until we've
finished building the loop. So to keep things simple, we postpone most validity
checking until step (3).
Knot tying
~~~~~~~~~~
During step (1) we might fault in a TyCon defined in another module, and it might
(via a loop) refer back to a TyCon defined in this module. So when we tie a big
knot around type declarations with ARecThing, so that the fault-in code can get
the TyCon being defined.
************************************************************************
* *
Check types AND do validity checking
* *
************************************************************************
-}
tcHsSigType, tcHsSigTypeNC :: UserTypeCtxt -> LHsType Name -> TcM Type
-- NB: it's important that the foralls that come from the top-level
-- HsForAllTy in hs_ty occur *first* in the returned type.
-- See Note [Scoped] with TcSigInfo
tcHsSigType ctxt hs_ty
= addErrCtxt (pprSigCtxt ctxt empty (ppr hs_ty)) $
tcHsSigTypeNC ctxt hs_ty
tcHsSigTypeNC ctxt (L loc hs_ty)
= setSrcSpan loc $ -- The "In the type..." context
-- comes from the caller; hence "NC"
do { kind <- case expectedKindInCtxt ctxt of
Nothing -> newMetaKindVar
Just k -> return k
-- The kind is checked by checkValidType, and isn't necessarily
-- of kind * in a Template Haskell quote eg [t| Maybe |]
-- Generalise here: see Note [Kind generalisation]
; ty <- tcCheckHsTypeAndGen hs_ty kind
-- Zonk to expose kind information to checkValidType
; ty <- zonkSigType ty
; checkValidType ctxt ty
; return ty }
-----------------
tcHsInstHead :: UserTypeCtxt -> LHsType Name -> TcM ([TyVar], ThetaType, Class, [Type])
-- Like tcHsSigTypeNC, but for an instance head.
tcHsInstHead user_ctxt lhs_ty@(L loc hs_ty)
= setSrcSpan loc $ -- The "In the type..." context comes from the caller
do { inst_ty <- tc_inst_head hs_ty
; kvs <- zonkTcTypeAndFV inst_ty
; kvs <- kindGeneralize kvs
; inst_ty <- zonkSigType (mkForAllTys kvs inst_ty)
; checkValidInstance user_ctxt lhs_ty inst_ty }
tc_inst_head :: HsType Name -> TcM TcType
tc_inst_head (HsForAllTy _ _ hs_tvs hs_ctxt hs_ty)
= tcHsTyVarBndrs hs_tvs $ \ tvs ->
do { ctxt <- tcHsContext hs_ctxt
; ty <- tc_lhs_type hs_ty ekConstraint -- Body for forall has kind Constraint
; return (mkSigmaTy tvs ctxt ty) }
tc_inst_head hs_ty
= tc_hs_type hs_ty ekConstraint
-----------------
tcHsDeriv :: HsType Name -> TcM ([TyVar], Class, [Type], Kind)
-- Like tcHsSigTypeNC, but for the ...deriving( C t1 ty2 ) clause
-- Returns the C, [ty1, ty2, and the kind of C's *next* argument
-- E.g. class C (a::*) (b::k->k)
-- data T a b = ... deriving( C Int )
-- returns ([k], C, [k, Int], k->k)
-- Also checks that (C ty1 ty2 arg) :: Constraint
-- if arg has a suitable kind
tcHsDeriv hs_ty
= do { arg_kind <- newMetaKindVar
; ty <- tcCheckHsTypeAndGen hs_ty (mkArrowKind arg_kind constraintKind)
; ty <- zonkSigType ty
; arg_kind <- zonkSigType arg_kind
; let (tvs, pred) = splitForAllTys ty
; case getClassPredTys_maybe pred of
Just (cls, tys) -> return (tvs, cls, tys, arg_kind)
Nothing -> failWithTc (ptext (sLit "Illegal deriving item") <+> quotes (ppr hs_ty)) }
-- Used for 'VECTORISE [SCALAR] instance' declarations
--
tcHsVectInst :: LHsType Name -> TcM (Class, [Type])
tcHsVectInst ty
| Just (L _ cls_name, tys) <- splitLHsClassTy_maybe ty
= do { (cls, cls_kind) <- tcClass cls_name
; (arg_tys, _res_kind) <- tcInferApps cls_name cls_kind tys
; return (cls, arg_tys) }
| otherwise
= failWithTc $ ptext (sLit "Malformed instance type")
{-
These functions are used during knot-tying in
type and class declarations, when we have to
separate kind-checking, desugaring, and validity checking
************************************************************************
* *
The main kind checker: no validity checks here
* *
************************************************************************
First a couple of simple wrappers for kcHsType
-}
tcClassSigType :: LHsType Name -> TcM Type
tcClassSigType lhs_ty@(L _ hs_ty)
= addTypeCtxt lhs_ty $
do { ty <- tcCheckHsTypeAndGen hs_ty liftedTypeKind
; zonkSigType ty }
tcHsConArgType :: NewOrData -> LHsType Name -> TcM Type
-- Permit a bang, but discard it
tcHsConArgType NewType bty = tcHsLiftedType (getBangType bty)
-- Newtypes can't have bangs, but we don't check that
-- until checkValidDataCon, so do not want to crash here
tcHsConArgType DataType bty = tcHsOpenType (getBangType bty)
-- Can't allow an unlifted type for newtypes, because we're effectively
-- going to remove the constructor while coercing it to a lifted type.
-- And newtypes can't be bang'd
---------------------------
tcHsArgTys :: SDoc -> [LHsType Name] -> [Kind] -> TcM [TcType]
tcHsArgTys what tys kinds
= sequence [ addTypeCtxt ty $
tc_lhs_type ty (expArgKind what kind n)
| (ty,kind,n) <- zip3 tys kinds [1..] ]
tc_hs_arg_tys :: SDoc -> [LHsType Name] -> [Kind] -> TcM [TcType]
-- Just like tcHsArgTys but without the addTypeCtxt
tc_hs_arg_tys what tys kinds
= sequence [ tc_lhs_type ty (expArgKind what kind n)
| (ty,kind,n) <- zip3 tys kinds [1..] ]
---------------------------
tcHsOpenType, tcHsLiftedType :: LHsType Name -> TcM TcType
-- Used for type signatures
-- Do not do validity checking
tcHsOpenType ty = addTypeCtxt ty $ tc_lhs_type ty ekOpen
tcHsLiftedType ty = addTypeCtxt ty $ tc_lhs_type ty ekLifted
-- Like tcHsType, but takes an expected kind
tcCheckLHsType :: LHsType Name -> Kind -> TcM Type
tcCheckLHsType hs_ty exp_kind
= addTypeCtxt hs_ty $
tc_lhs_type hs_ty (EK exp_kind expectedKindMsg)
tcLHsType :: LHsType Name -> TcM (TcType, TcKind)
-- Called from outside: set the context
tcLHsType ty = addTypeCtxt ty (tc_infer_lhs_type ty)
---------------------------
tcCheckHsTypeAndGen :: HsType Name -> Kind -> TcM Type
-- Input type is HsType, not LhsType; the caller adds the context
-- Typecheck a type signature, and kind-generalise it
-- The result is not necessarily zonked, and has not been checked for validity
tcCheckHsTypeAndGen hs_ty kind
= do { ty <- tc_hs_type hs_ty (EK kind expectedKindMsg)
; traceTc "tcCheckHsTypeAndGen" (ppr hs_ty)
; kvs <- zonkTcTypeAndFV ty
; kvs <- kindGeneralize kvs
; return (mkForAllTys kvs ty) }
{-
Like tcExpr, tc_hs_type takes an expected kind which it unifies with
the kind it figures out. When we don't know what kind to expect, we use
tc_lhs_type_fresh, to first create a new meta kind variable and use that as
the expected kind.
-}
tc_infer_lhs_type :: LHsType Name -> TcM (TcType, TcKind)
tc_infer_lhs_type ty =
do { kv <- newMetaKindVar
; r <- tc_lhs_type ty (EK kv expectedKindMsg)
; return (r, kv) }
tc_lhs_type :: LHsType Name -> ExpKind -> TcM TcType
tc_lhs_type (L span ty) exp_kind
= setSrcSpan span $
do { traceTc "tc_lhs_type:" (ppr ty $$ ppr exp_kind)
; tc_hs_type ty exp_kind }
tc_lhs_types :: [(LHsType Name, ExpKind)] -> TcM [TcType]
tc_lhs_types tys_w_kinds = mapM (uncurry tc_lhs_type) tys_w_kinds
------------------------------------------
tc_fun_type :: HsType Name -> LHsType Name -> LHsType Name -> ExpKind -> TcM TcType
-- We need to recognise (->) so that we can construct a FunTy,
-- *and* we need to do by looking at the Name, not the TyCon
-- (see Note [Zonking inside the knot]). For example,
-- consider f :: (->) Int Int (Trac #7312)
tc_fun_type ty ty1 ty2 exp_kind@(EK _ ctxt)
= do { ty1' <- tc_lhs_type ty1 (EK openTypeKind ctxt)
; ty2' <- tc_lhs_type ty2 (EK openTypeKind ctxt)
; checkExpectedKind ty liftedTypeKind exp_kind
; return (mkFunTy ty1' ty2') }
------------------------------------------
tc_hs_type :: HsType Name -> ExpKind -> TcM TcType
tc_hs_type (HsParTy ty) exp_kind = tc_lhs_type ty exp_kind
tc_hs_type (HsDocTy ty _) exp_kind = tc_lhs_type ty exp_kind
tc_hs_type (HsQuasiQuoteTy {}) _ = panic "tc_hs_type: qq" -- Eliminated by renamer
tc_hs_type ty@(HsBangTy {}) _
-- While top-level bangs at this point are eliminated (eg !(Maybe Int)),
-- other kinds of bangs are not (eg ((!Maybe) Int)). These kinds of
-- bangs are invalid, so fail. (#7210)
= failWithTc (ptext (sLit "Unexpected strictness annotation:") <+> ppr ty)
tc_hs_type (HsRecTy _) _ = panic "tc_hs_type: record" -- Unwrapped by con decls
-- Record types (which only show up temporarily in constructor
-- signatures) should have been removed by now
---------- Functions and applications
tc_hs_type hs_ty@(HsTyVar name) exp_kind
= do { (ty, k) <- tcTyVar name
; checkExpectedKind hs_ty k exp_kind
; return ty }
tc_hs_type ty@(HsFunTy ty1 ty2) exp_kind
= tc_fun_type ty ty1 ty2 exp_kind
tc_hs_type hs_ty@(HsOpTy ty1 (_, l_op@(L _ op)) ty2) exp_kind
| op `hasKey` funTyConKey
= tc_fun_type hs_ty ty1 ty2 exp_kind
| otherwise
= do { (op', op_kind) <- tcTyVar op
; tys' <- tcCheckApps hs_ty l_op op_kind [ty1,ty2] exp_kind
; return (mkNakedAppTys op' tys') }
-- mkNakedAppTys: see Note [Zonking inside the knot]
tc_hs_type hs_ty@(HsAppTy ty1 ty2) exp_kind
-- | L _ (HsTyVar fun) <- fun_ty
-- , fun `hasKey` funTyConKey
-- , [fty1,fty2] <- arg_tys
-- = tc_fun_type hs_ty fty1 fty2 exp_kind
-- | otherwise
= do { (fun_ty', fun_kind) <- tc_infer_lhs_type fun_ty
; arg_tys' <- tcCheckApps hs_ty fun_ty fun_kind arg_tys exp_kind
; return (mkNakedAppTys fun_ty' arg_tys') }
-- mkNakedAppTys: see Note [Zonking inside the knot]
-- This looks fragile; how do we *know* that fun_ty isn't
-- a TyConApp, say (which is never supposed to appear in the
-- function position of an AppTy)?
where
(fun_ty, arg_tys) = splitHsAppTys ty1 [ty2]
--------- Foralls
tc_hs_type hs_ty@(HsForAllTy _ _ hs_tvs context ty) exp_kind@(EK exp_k _)
| isConstraintKind exp_k
= failWithTc (hang (ptext (sLit "Illegal constraint:")) 2 (ppr hs_ty))
| otherwise
= tcHsTyVarBndrs hs_tvs $ \ tvs' ->
-- Do not kind-generalise here! See Note [Kind generalisation]
do { ctxt' <- tcHsContext context
; ty' <- if null (unLoc context) then -- Plain forall, no context
tc_lhs_type ty exp_kind -- Why exp_kind? See Note [Body kind of forall]
else
-- If there is a context, then this forall is really a
-- _function_, so the kind of the result really is *
-- The body kind (result of the function can be * or #, hence ekOpen
do { checkExpectedKind hs_ty liftedTypeKind exp_kind
; tc_lhs_type ty ekOpen }
; return (mkSigmaTy tvs' ctxt' ty') }
--------- Lists, arrays, and tuples
tc_hs_type hs_ty@(HsListTy elt_ty) exp_kind
= do { tau_ty <- tc_lhs_type elt_ty ekLifted
; checkExpectedKind hs_ty liftedTypeKind exp_kind
; checkWiredInTyCon listTyCon
; return (mkListTy tau_ty) }
tc_hs_type hs_ty@(HsPArrTy elt_ty) exp_kind
= do { tau_ty <- tc_lhs_type elt_ty ekLifted
; checkExpectedKind hs_ty liftedTypeKind exp_kind
; checkWiredInTyCon parrTyCon
; return (mkPArrTy tau_ty) }
-- See Note [Distinguishing tuple kinds] in HsTypes
-- See Note [Inferring tuple kinds]
tc_hs_type hs_ty@(HsTupleTy HsBoxedOrConstraintTuple hs_tys) exp_kind@(EK exp_k _ctxt)
-- (NB: not zonking before looking at exp_k, to avoid left-right bias)
| Just tup_sort <- tupKindSort_maybe exp_k
= traceTc "tc_hs_type tuple" (ppr hs_tys) >>
tc_tuple hs_ty tup_sort hs_tys exp_kind
| otherwise
= do { traceTc "tc_hs_type tuple 2" (ppr hs_tys)
; (tys, kinds) <- mapAndUnzipM tc_infer_lhs_type hs_tys
; kinds <- mapM zonkTcKind kinds
-- Infer each arg type separately, because errors can be
-- confusing if we give them a shared kind. Eg Trac #7410
-- (Either Int, Int), we do not want to get an error saying
-- "the second argument of a tuple should have kind *->*"
; let (arg_kind, tup_sort)
= case [ (k,s) | k <- kinds
, Just s <- [tupKindSort_maybe k] ] of
((k,s) : _) -> (k,s)
[] -> (liftedTypeKind, BoxedTuple)
-- In the [] case, it's not clear what the kind is, so guess *
; sequence_ [ setSrcSpan loc $
checkExpectedKind ty kind
(expArgKind (ptext (sLit "a tuple")) arg_kind n)
| (ty@(L loc _),kind,n) <- zip3 hs_tys kinds [1..] ]
; finish_tuple hs_ty tup_sort tys exp_kind }
tc_hs_type hs_ty@(HsTupleTy hs_tup_sort tys) exp_kind
= tc_tuple hs_ty tup_sort tys exp_kind
where
tup_sort = case hs_tup_sort of -- Fourth case dealt with above
HsUnboxedTuple -> UnboxedTuple
HsBoxedTuple -> BoxedTuple
HsConstraintTuple -> ConstraintTuple
_ -> panic "tc_hs_type HsTupleTy"
--------- Promoted lists and tuples
tc_hs_type hs_ty@(HsExplicitListTy _k tys) exp_kind
= do { tks <- mapM tc_infer_lhs_type tys
; let taus = map fst tks
; kind <- unifyKinds (ptext (sLit "In a promoted list")) tks
; checkExpectedKind hs_ty (mkPromotedListTy kind) exp_kind
; return (foldr (mk_cons kind) (mk_nil kind) taus) }
where
mk_cons k a b = mkTyConApp (promoteDataCon consDataCon) [k, a, b]
mk_nil k = mkTyConApp (promoteDataCon nilDataCon) [k]
tc_hs_type hs_ty@(HsExplicitTupleTy _ tys) exp_kind
= do { tks <- mapM tc_infer_lhs_type tys
; let n = length tys
kind_con = promotedTupleTyCon BoxedTuple n
ty_con = promotedTupleDataCon BoxedTuple n
(taus, ks) = unzip tks
tup_k = mkTyConApp kind_con ks
; checkExpectedKind hs_ty tup_k exp_kind
; return (mkTyConApp ty_con (ks ++ taus)) }
--------- Constraint types
tc_hs_type ipTy@(HsIParamTy n ty) exp_kind
= do { ty' <- tc_lhs_type ty ekLifted
; checkExpectedKind ipTy constraintKind exp_kind
; ipClass <- tcLookupClass ipClassName
; let n' = mkStrLitTy $ hsIPNameFS n
; return (mkClassPred ipClass [n',ty'])
}
tc_hs_type ty@(HsEqTy ty1 ty2) exp_kind
= do { (ty1', kind1) <- tc_infer_lhs_type ty1
; (ty2', kind2) <- tc_infer_lhs_type ty2
; checkExpectedKind ty2 kind2
(EK kind1 msg_fn)
; checkExpectedKind ty constraintKind exp_kind
; return (mkNakedTyConApp eqTyCon [kind1, ty1', ty2']) }
where
msg_fn pkind = ptext (sLit "The left argument of the equality had kind")
<+> quotes (pprKind pkind)
--------- Misc
tc_hs_type (HsKindSig ty sig_k) exp_kind
= do { sig_k' <- tcLHsKind sig_k
; checkExpectedKind ty sig_k' exp_kind
; tc_lhs_type ty (EK sig_k' msg_fn) }
where
msg_fn pkind = ptext (sLit "The signature specified kind")
<+> quotes (pprKind pkind)
tc_hs_type (HsCoreTy ty) exp_kind
= do { checkExpectedKind ty (typeKind ty) exp_kind
; return ty }
-- This should never happen; type splices are expanded by the renamer
tc_hs_type ty@(HsSpliceTy {}) _exp_kind
= failWithTc (ptext (sLit "Unexpected type splice:") <+> ppr ty)
tc_hs_type (HsWrapTy {}) _exp_kind
= panic "tc_hs_type HsWrapTy" -- We kind checked something twice
tc_hs_type hs_ty@(HsTyLit (HsNumTy _ n)) exp_kind
= do { checkExpectedKind hs_ty typeNatKind exp_kind
; checkWiredInTyCon typeNatKindCon
; return (mkNumLitTy n) }
tc_hs_type hs_ty@(HsTyLit (HsStrTy _ s)) exp_kind
= do { checkExpectedKind hs_ty typeSymbolKind exp_kind
; checkWiredInTyCon typeSymbolKindCon
; return (mkStrLitTy s) }
tc_hs_type HsWildcardTy _ = panic "tc_hs_type HsWildcardTy"
-- unnamed wildcards should have been replaced by named wildcards
tc_hs_type hs_ty@(HsNamedWildcardTy name) exp_kind
= do { (ty, k) <- tcTyVar name
; checkExpectedKind hs_ty k exp_kind
; return ty }
---------------------------
tupKindSort_maybe :: TcKind -> Maybe TupleSort
tupKindSort_maybe k
| isConstraintKind k = Just ConstraintTuple
| isLiftedTypeKind k = Just BoxedTuple
| otherwise = Nothing
tc_tuple :: HsType Name -> TupleSort -> [LHsType Name] -> ExpKind -> TcM TcType
tc_tuple hs_ty tup_sort tys exp_kind
= do { tau_tys <- tc_hs_arg_tys cxt_doc tys (repeat arg_kind)
; finish_tuple hs_ty tup_sort tau_tys exp_kind }
where
arg_kind = case tup_sort of
BoxedTuple -> liftedTypeKind
UnboxedTuple -> openTypeKind
ConstraintTuple -> constraintKind
cxt_doc = case tup_sort of
BoxedTuple -> ptext (sLit "a tuple")
UnboxedTuple -> ptext (sLit "an unboxed tuple")
ConstraintTuple -> ptext (sLit "a constraint tuple")
finish_tuple :: HsType Name -> TupleSort -> [TcType] -> ExpKind -> TcM TcType
finish_tuple hs_ty tup_sort tau_tys exp_kind
= do { traceTc "finish_tuple" (ppr res_kind $$ ppr exp_kind $$ ppr exp_kind)
; checkExpectedKind hs_ty res_kind exp_kind
; checkWiredInTyCon tycon
; return (mkTyConApp tycon tau_tys) }
where
tycon = tupleTyCon tup_sort (length tau_tys)
res_kind = case tup_sort of
UnboxedTuple -> unliftedTypeKind
BoxedTuple -> liftedTypeKind
ConstraintTuple -> constraintKind
---------------------------
tcInferApps :: Outputable a
=> a
-> TcKind -- Function kind
-> [LHsType Name] -- Arg types
-> TcM ([TcType], TcKind) -- Kind-checked args
tcInferApps the_fun fun_kind args
= do { (args_w_kinds, res_kind) <- splitFunKind (ppr the_fun) fun_kind args
; args' <- tc_lhs_types args_w_kinds
; return (args', res_kind) }
tcCheckApps :: Outputable a
=> HsType Name -- The type being checked (for err messages only)
-> a -- The function
-> TcKind -> [LHsType Name] -- Fun kind and arg types
-> ExpKind -- Expected kind
-> TcM [TcType]
tcCheckApps hs_ty the_fun fun_kind args exp_kind
= do { (arg_tys, res_kind) <- tcInferApps the_fun fun_kind args
; checkExpectedKind hs_ty res_kind exp_kind
; return arg_tys }
---------------------------
splitFunKind :: SDoc -> TcKind -> [b] -> TcM ([(b,ExpKind)], TcKind)
splitFunKind the_fun fun_kind args
= go 1 fun_kind args
where
go _ fk [] = return ([], fk)
go arg_no fk (arg:args)
= do { mb_fk <- matchExpectedFunKind fk
; case mb_fk of
Nothing -> failWithTc too_many_args
Just (ak,fk') -> do { (aks, rk) <- go (arg_no+1) fk' args
; let exp_kind = expArgKind (quotes the_fun) ak arg_no
; return ((arg, exp_kind) : aks, rk) } }
too_many_args = quotes the_fun <+>
ptext (sLit "is applied to too many type arguments")
---------------------------
tcHsContext :: LHsContext Name -> TcM [PredType]
tcHsContext ctxt = mapM tcHsLPredType (unLoc ctxt)
tcHsLPredType :: LHsType Name -> TcM PredType
tcHsLPredType pred = tc_lhs_type pred ekConstraint
---------------------------
tcTyVar :: Name -> TcM (TcType, TcKind)
-- See Note [Type checking recursive type and class declarations]
-- in TcTyClsDecls
tcTyVar name -- Could be a tyvar, a tycon, or a datacon
= do { traceTc "lk1" (ppr name)
; thing <- tcLookup name
; case thing of
ATyVar _ tv
| isKindVar tv
-> failWithTc (ptext (sLit "Kind variable") <+> quotes (ppr tv)
<+> ptext (sLit "used as a type"))
| otherwise
-> return (mkTyVarTy tv, tyVarKind tv)
AThing kind -> do { tc <- get_loopy_tc name
; inst_tycon (mkNakedTyConApp tc) kind }
-- mkNakedTyConApp: see Note [Zonking inside the knot]
AGlobal (ATyCon tc) -> inst_tycon (mkTyConApp tc) (tyConKind tc)
AGlobal (AConLike (RealDataCon dc))
| Just tc <- promoteDataCon_maybe dc
-> do { data_kinds <- xoptM Opt_DataKinds
; unless data_kinds $ promotionErr name NoDataKinds
; inst_tycon (mkTyConApp tc) (tyConKind tc) }
| otherwise -> failWithTc (ptext (sLit "Data constructor") <+> quotes (ppr dc)
<+> ptext (sLit "comes from an un-promotable type")
<+> quotes (ppr (dataConTyCon dc)))
APromotionErr err -> promotionErr name err
_ -> wrongThingErr "type" thing name }
where
get_loopy_tc name
= do { env <- getGblEnv
; case lookupNameEnv (tcg_type_env env) name of
Just (ATyCon tc) -> return tc
_ -> return (aThingErr "tcTyVar" name) }
inst_tycon :: ([Type] -> Type) -> Kind -> TcM (Type, Kind)
-- Instantiate the polymorphic kind
-- Lazy in the TyCon
inst_tycon mk_tc_app kind
| null kvs
= return (mk_tc_app [], ki_body)
| otherwise
= do { traceTc "lk4" (ppr name <+> dcolon <+> ppr kind)
; ks <- mapM (const newMetaKindVar) kvs
; return (mk_tc_app ks, substKiWith kvs ks ki_body) }
where
(kvs, ki_body) = splitForAllTys kind
tcClass :: Name -> TcM (Class, TcKind)
tcClass cls -- Must be a class
= do { thing <- tcLookup cls
; case thing of
AThing kind -> return (aThingErr "tcClass" cls, kind)
AGlobal (ATyCon tc)
| Just cls <- tyConClass_maybe tc
-> return (cls, tyConKind tc)
_ -> wrongThingErr "class" thing cls }
aThingErr :: String -> Name -> b
-- The type checker for types is sometimes called simply to
-- do *kind* checking; and in that case it ignores the type
-- returned. Which is a good thing since it may not be available yet!
aThingErr str x = pprPanic "AThing evaluated unexpectedly" (text str <+> ppr x)
{-
Note [Zonking inside the knot]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we are checking the argument types of a data constructor. We
must zonk the types before making the DataCon, because once built we
can't change it. So we must traverse the type.
BUT the parent TyCon is knot-tied, so we can't look at it yet.
So we must be careful not to use "smart constructors" for types that
look at the TyCon or Class involved.
* Hence the use of mkNakedXXX functions. These do *not* enforce
the invariants (for example that we use (FunTy s t) rather
than (TyConApp (->) [s,t])).
* Ditto in zonkTcType (which may be applied more than once, eg to
squeeze out kind meta-variables), we are careful not to look at
the TyCon.
* We arrange to call zonkSigType *once* right at the end, and it
does establish the invariants. But in exchange we can't look
at the result (not even its structure) until we have emerged
from the "knot".
* TcHsSyn.zonkTcTypeToType also can safely check/establish
invariants.
This is horribly delicate. I hate it. A good example of how
delicate it is can be seen in Trac #7903.
-}
mkNakedTyConApp :: TyCon -> [Type] -> Type
-- Builds a TyConApp
-- * without being strict in TyCon,
-- * without satisfying the invariants of TyConApp
-- A subsequent zonking will establish the invariants
mkNakedTyConApp tc tys = TyConApp tc tys
mkNakedAppTys :: Type -> [Type] -> Type
mkNakedAppTys ty1 [] = ty1
mkNakedAppTys (TyConApp tc tys1) tys2 = mkNakedTyConApp tc (tys1 ++ tys2)
mkNakedAppTys ty1 tys2 = foldl AppTy ty1 tys2
zonkSigType :: TcType -> TcM TcType
-- Zonk the result of type-checking a user-written type signature
-- It may have kind variables in it, but no meta type variables
-- Because of knot-typing (see Note [Zonking inside the knot])
-- it may need to establish the Type invariants;
-- hence the use of mkTyConApp and mkAppTy
zonkSigType ty
= go ty
where
go (TyConApp tc tys) = do tys' <- mapM go tys
return (mkTyConApp tc tys')
-- Key point: establish Type invariants!
-- See Note [Zonking inside the knot]
go (LitTy n) = return (LitTy n)
go (FunTy arg res) = do arg' <- go arg
res' <- go res
return (FunTy arg' res')
go (AppTy fun arg) = do fun' <- go fun
arg' <- go arg
return (mkAppTy fun' arg')
-- NB the mkAppTy; we might have instantiated a
-- type variable to a type constructor, so we need
-- to pull the TyConApp to the top.
-- The two interesting cases!
go (TyVarTy tyvar) | isTcTyVar tyvar = zonkTcTyVar tyvar
| otherwise = TyVarTy <$> updateTyVarKindM go tyvar
-- Ordinary (non Tc) tyvars occur inside quantified types
go (ForAllTy tv ty) = do { tv' <- zonkTcTyVarBndr tv
; ty' <- go ty
; return (ForAllTy tv' ty') }
{-
Note [Body kind of a forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The body of a forall is usually a type, but in principle
there's no reason to prohibit *unlifted* types.
In fact, GHC can itself construct a function with an
unboxed tuple inside a for-all (via CPR analyis; see
typecheck/should_compile/tc170).
Moreover in instance heads we get forall-types with
kind Constraint.
Moreover if we have a signature
f :: Int#
then we represent it as (HsForAll Implicit [] [] Int#). And this must
be legal! We can't drop the empty forall until *after* typechecking
the body because of kind polymorphism:
Typeable :: forall k. k -> Constraint
data Apply f t = Apply (f t)
-- Apply :: forall k. (k -> *) -> k -> *
instance Typeable Apply where ...
Then the dfun has type
df :: forall k. Typeable ((k->*) -> k -> *) (Apply k)
f :: Typeable Apply
f :: forall (t:k->*) (a:k). t a -> t a
class C a b where
op :: a b -> Typeable Apply
data T a = MkT (Typeable Apply)
| T2 a
T :: * -> *
MkT :: forall k. (Typeable ((k->*) -> k -> *) (Apply k)) -> T a
f :: (forall (k:BOX). forall (t:: k->*) (a:k). t a -> t a) -> Int
f :: (forall a. a -> Typeable Apply) -> Int
So we *must* keep the HsForAll on the instance type
HsForAll Implicit [] [] (Typeable Apply)
so that we do kind generalisation on it.
Really we should check that it's a type of value kind
{*, Constraint, #}, but I'm not doing that yet
Example that should be rejected:
f :: (forall (a:*->*). a) Int
Note [Inferring tuple kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Give a tuple type (a,b,c), which the parser labels as HsBoxedOrConstraintTuple,
we try to figure out whether it's a tuple of kind * or Constraint.
Step 1: look at the expected kind
Step 2: infer argument kinds
If after Step 2 it's not clear from the arguments that it's
Constraint, then it must be *. Once having decided that we re-check
the Check the arguments again to give good error messages
in eg. `(Maybe, Maybe)`
Note that we will still fail to infer the correct kind in this case:
type T a = ((a,a), D a)
type family D :: Constraint -> Constraint
While kind checking T, we do not yet know the kind of D, so we will default the
kind of T to * -> *. It works if we annotate `a` with kind `Constraint`.
Note [Desugaring types]
~~~~~~~~~~~~~~~~~~~~~~~
The type desugarer is phase 2 of dealing with HsTypes. Specifically:
* It transforms from HsType to Type
* It zonks any kinds. The returned type should have no mutable kind
or type variables (hence returning Type not TcType):
- any unconstrained kind variables are defaulted to AnyK just
as in TcHsSyn.
- there are no mutable type variables because we are
kind-checking a type
Reason: the returned type may be put in a TyCon or DataCon where
it will never subsequently be zonked.
You might worry about nested scopes:
..a:kappa in scope..
let f :: forall b. T '[a,b] -> Int
In this case, f's type could have a mutable kind variable kappa in it;
and we might then default it to AnyK when dealing with f's type
signature. But we don't expect this to happen because we can't get a
lexically scoped type variable with a mutable kind variable in it. A
delicate point, this. If it becomes an issue we might need to
distinguish top-level from nested uses.
Moreover
* it cannot fail,
* it does no unifications
* it does no validity checking, except for structural matters, such as
(a) spurious ! annotations.
(b) a class used as a type
Note [Kind of a type splice]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider these terms, each with TH type splice inside:
[| e1 :: Maybe $(..blah..) |]
[| e2 :: $(..blah..) |]
When kind-checking the type signature, we'll kind-check the splice
$(..blah..); we want to give it a kind that can fit in any context,
as if $(..blah..) :: forall k. k.
In the e1 example, the context of the splice fixes kappa to *. But
in the e2 example, we'll desugar the type, zonking the kind unification
variables as we go. When we encounter the unconstrained kappa, we
want to default it to '*', not to AnyK.
Help functions for type applications
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-}
addTypeCtxt :: LHsType Name -> TcM a -> TcM a
-- Wrap a context around only if we want to show that contexts.
-- Omit invisble ones and ones user's won't grok
addTypeCtxt (L _ ty) thing
= addErrCtxt doc thing
where
doc = ptext (sLit "In the type") <+> quotes (ppr ty)
{-
************************************************************************
* *
Type-variable binders
* *
************************************************************************
-}
mkKindSigVar :: Name -> TcM KindVar
-- Use the specified name; don't clone it
mkKindSigVar n
= do { mb_thing <- tcLookupLcl_maybe n
; case mb_thing of
Just (AThing k)
| Just kvar <- getTyVar_maybe k
-> return kvar
_ -> return $ mkTcTyVar n superKind (SkolemTv False) }
kcScopedKindVars :: [Name] -> TcM a -> TcM a
-- Given some tyvar binders like [a (b :: k -> *) (c :: k)]
-- bind each scoped kind variable (k in this case) to a fresh
-- kind skolem variable
kcScopedKindVars kv_ns thing_inside
= do { kvs <- mapM (\n -> newSigTyVar n superKind) kv_ns
-- NB: use mutable signature variables
; tcExtendTyVarEnv2 (kv_ns `zip` kvs) thing_inside }
-- | Kind-check a 'LHsTyVarBndrs'. If the decl under consideration has a complete,
-- user-supplied kind signature (CUSK), generalise the result. Used in 'getInitialKind'
-- and in kind-checking. See also Note [Complete user-supplied kind signatures] in
-- HsDecls.
kcHsTyVarBndrs :: Bool -- ^ True <=> the decl being checked has a CUSK
-> LHsTyVarBndrs Name
-> TcM (Kind, r) -- ^ the result kind, possibly with other info
-> TcM (Kind, r) -- ^ The full kind of the thing being declared,
-- with the other info
kcHsTyVarBndrs cusk (HsQTvs { hsq_kvs = kv_ns, hsq_tvs = hs_tvs }) thing_inside
= do { kvs <- if cusk
then mapM mkKindSigVar kv_ns
else mapM (\n -> newSigTyVar n superKind) kv_ns
; tcExtendTyVarEnv2 (kv_ns `zip` kvs) $
do { nks <- mapM (kc_hs_tv . unLoc) hs_tvs
; (res_kind, stuff) <- tcExtendKindEnv nks thing_inside
; let full_kind = mkArrowKinds (map snd nks) res_kind
kvs = filter (not . isMetaTyVar) $
varSetElems $ tyVarsOfType full_kind
gen_kind = if cusk
then mkForAllTys kvs full_kind
else full_kind
; return (gen_kind, stuff) } }
where
kc_hs_tv :: HsTyVarBndr Name -> TcM (Name, TcKind)
kc_hs_tv (UserTyVar n)
= do { mb_thing <- tcLookupLcl_maybe n
; kind <- case mb_thing of
Just (AThing k) -> return k
_ | cusk -> return liftedTypeKind
| otherwise -> newMetaKindVar
; return (n, kind) }
kc_hs_tv (KindedTyVar (L _ n) k)
= do { kind <- tcLHsKind k
-- In an associated type decl, the type variable may already
-- be in scope; in that case we want to make sure its kind
-- matches the one declared here
; mb_thing <- tcLookupLcl_maybe n
; case mb_thing of
Nothing -> return ()
Just (AThing ks) -> checkKind kind ks
Just thing -> pprPanic "check_in_scope" (ppr thing)
; return (n, kind) }
tcHsTyVarBndrs :: LHsTyVarBndrs Name
-> ([TcTyVar] -> TcM r)
-> TcM r
-- Bind the kind variables to fresh skolem variables
-- and type variables to skolems, each with a meta-kind variable kind
tcHsTyVarBndrs (HsQTvs { hsq_kvs = kv_ns, hsq_tvs = hs_tvs }) thing_inside
= do { kvs <- mapM mkKindSigVar kv_ns
; tcExtendTyVarEnv kvs $ do
{ tvs <- mapM tcHsTyVarBndr hs_tvs
; traceTc "tcHsTyVarBndrs {" (vcat [ text "Hs kind vars:" <+> ppr kv_ns
, text "Hs type vars:" <+> ppr hs_tvs
, text "Kind vars:" <+> ppr kvs
, text "Type vars:" <+> ppr tvs ])
; res <- tcExtendTyVarEnv tvs (thing_inside (kvs ++ tvs))
; traceTc "tcHsTyVarBndrs }" (vcat [ text "Hs kind vars:" <+> ppr kv_ns
, text "Hs type vars:" <+> ppr hs_tvs
, text "Kind vars:" <+> ppr kvs
, text "Type vars:" <+> ppr tvs ])
; return res } }
tcHsTyVarBndr :: LHsTyVarBndr Name -> TcM TcTyVar
-- Return a type variable
-- initialised with a kind variable.
-- Typically the Kind inside the HsTyVarBndr will be a tyvar with a mutable kind
-- in it.
--
-- If the variable is already in scope return it, instead of introducing a new
-- one. This can occur in
-- instance C (a,b) where
-- type F (a,b) c = ...
-- Here a,b will be in scope when processing the associated type instance for F.
-- See Note [Associated type tyvar names] in Class
tcHsTyVarBndr (L _ hs_tv)
= do { let name = hsTyVarName hs_tv
; mb_tv <- tcLookupLcl_maybe name
; case mb_tv of {
Just (ATyVar _ tv) -> return tv ;
_ -> do
{ kind <- case hs_tv of
UserTyVar {} -> newMetaKindVar
KindedTyVar _ kind -> tcLHsKind kind
; return ( mkTcTyVar name kind (SkolemTv False)) } } }
------------------
kindGeneralize :: TyVarSet -> TcM [KindVar]
kindGeneralize tkvs
= do { gbl_tvs <- tcGetGlobalTyVars -- Already zonked
; quantifyTyVars gbl_tvs (filterVarSet isKindVar tkvs) }
-- ToDo: remove the (filter isKindVar)
-- Any type variables in tkvs will be in scope,
-- and hence in gbl_tvs, so after removing gbl_tvs
-- we should only have kind variables left
--
-- BUT there is a smelly case (to be fixed when TH is reorganised)
-- f t = [| e :: $t |]
-- When typechecking the body of the bracket, we typecheck $t to a
-- unification variable 'alpha', with no biding forall. We don't
-- want to kind-quantify it!
{-
Note [Kind generalisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We do kind generalisation only at the outer level of a type signature.
For example, consider
T :: forall k. k -> *
f :: (forall a. T a -> Int) -> Int
When kind-checking f's type signature we generalise the kind at
the outermost level, thus:
f1 :: forall k. (forall (a:k). T k a -> Int) -> Int -- YES!
and *not* at the inner forall:
f2 :: (forall k. forall (a:k). T k a -> Int) -> Int -- NO!
Reason: same as for HM inference on value level declarations,
we want to infer the most general type. The f2 type signature
would be *less applicable* than f1, because it requires a more
polymorphic argument.
Note [Kinds of quantified type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcTyVarBndrsGen quantifies over a specified list of type variables,
*and* over the kind variables mentioned in the kinds of those tyvars.
Note that we must zonk those kinds (obviously) but less obviously, we
must return type variables whose kinds are zonked too. Example
(a :: k7) where k7 := k9 -> k9
We must return
[k9, a:k9->k9]
and NOT
[k9, a:k7]
Reason: we're going to turn this into a for-all type,
forall k9. forall (a:k7). blah
which the type checker will then instantiate, and instantiate does not
look through unification variables!
Hence using zonked_kinds when forming tvs'.
-}
--------------------
-- getInitialKind has made a suitably-shaped kind for the type or class
-- Unpack it, and attribute those kinds to the type variables
-- Extend the env with bindings for the tyvars, taken from
-- the kind of the tycon/class. Give it to the thing inside, and
-- check the result kind matches
kcLookupKind :: Name -> TcM Kind
kcLookupKind nm
= do { tc_ty_thing <- tcLookup nm
; case tc_ty_thing of
AThing k -> return k
AGlobal (ATyCon tc) -> return (tyConKind tc)
_ -> pprPanic "kcLookupKind" (ppr tc_ty_thing) }
kcTyClTyVars :: Name -> LHsTyVarBndrs Name -> TcM a -> TcM a
-- Used for the type variables of a type or class decl,
-- when doing the initial kind-check.
kcTyClTyVars name (HsQTvs { hsq_kvs = kvs, hsq_tvs = hs_tvs }) thing_inside
= kcScopedKindVars kvs $
do { tc_kind <- kcLookupKind name
; let (_, mono_kind) = splitForAllTys tc_kind
-- if we have a FullKindSignature, the tc_kind may already
-- be generalized. The kvs get matched up while kind-checking
-- the types in kc_tv, below
(arg_ks, _res_k) = splitKindFunTysN (length hs_tvs) mono_kind
-- There should be enough arrows, because
-- getInitialKinds used the tcdTyVars
; name_ks <- zipWithM kc_tv hs_tvs arg_ks
; tcExtendKindEnv name_ks thing_inside }
where
-- getInitialKind has already gotten the kinds of these type
-- variables, but tiresomely we need to check them *again*
-- to match the kind variables they mention against the ones
-- we've freshly brought into scope
kc_tv :: LHsTyVarBndr Name -> Kind -> TcM (Name, Kind)
kc_tv (L _ (UserTyVar n)) exp_k
= return (n, exp_k)
kc_tv (L _ (KindedTyVar (L _ n) hs_k)) exp_k
= do { k <- tcLHsKind hs_k
; checkKind k exp_k
; return (n, exp_k) }
-----------------------
tcTyClTyVars :: Name -> LHsTyVarBndrs Name -- LHS of the type or class decl
-> ([TyVar] -> Kind -> TcM a) -> TcM a
-- Used for the type variables of a type or class decl,
-- on the second pass when constructing the final result
-- (tcTyClTyVars T [a,b] thing_inside)
-- where T : forall k1 k2 (a:k1 -> *) (b:k1). k2 -> *
-- calls thing_inside with arguments
-- [k1,k2,a,b] (k2 -> *)
-- having also extended the type environment with bindings
-- for k1,k2,a,b
--
-- No need to freshen the k's because they are just skolem
-- constants here, and we are at top level anyway.
tcTyClTyVars tycon (HsQTvs { hsq_kvs = hs_kvs, hsq_tvs = hs_tvs }) thing_inside
= kcScopedKindVars hs_kvs $ -- Bind scoped kind vars to fresh kind univ vars
-- There may be fewer of these than the kvs of
-- the type constructor, of course
do { thing <- tcLookup tycon
; let { kind = case thing of
AThing kind -> kind
_ -> panic "tcTyClTyVars"
-- We only call tcTyClTyVars during typechecking in
-- TcTyClDecls, where the local env is extended with
-- the generalized_env (mapping Names to AThings).
; (kvs, body) = splitForAllTys kind
; (kinds, res) = splitKindFunTysN (length hs_tvs) body }
; tvs <- zipWithM tc_hs_tv hs_tvs kinds
; tcExtendTyVarEnv tvs (thing_inside (kvs ++ tvs) res) }
where
-- In the case of associated types, the renamer has
-- ensured that the names are in commmon
-- e.g. class C a_29 where
-- type T b_30 a_29 :: *
-- Here the a_29 is shared
tc_hs_tv (L _ (UserTyVar n)) kind = return (mkTyVar n kind)
tc_hs_tv (L _ (KindedTyVar (L _ n) hs_k)) kind
= do { tc_kind <- tcLHsKind hs_k
; checkKind kind tc_kind
; return (mkTyVar n kind) }
-----------------------------------
tcDataKindSig :: Kind -> TcM [TyVar]
-- GADT decls can have a (perhaps partial) kind signature
-- e.g. data T :: * -> * -> * where ...
-- This function makes up suitable (kinded) type variables for
-- the argument kinds, and checks that the result kind is indeed *.
-- We use it also to make up argument type variables for for data instances.
tcDataKindSig kind
= do { checkTc (isLiftedTypeKind res_kind) (badKindSig kind)
; span <- getSrcSpanM
; us <- newUniqueSupply
; rdr_env <- getLocalRdrEnv
; let uniqs = uniqsFromSupply us
occs = [ occ | str <- allNameStrings
, let occ = mkOccName tvName str
, isNothing (lookupLocalRdrOcc rdr_env occ) ]
-- Note [Avoid name clashes for associated data types]
; return [ mk_tv span uniq occ kind
| ((kind, occ), uniq) <- arg_kinds `zip` occs `zip` uniqs ] }
where
(arg_kinds, res_kind) = splitKindFunTys kind
mk_tv loc uniq occ kind
= mkTyVar (mkInternalName uniq occ loc) kind
badKindSig :: Kind -> SDoc
badKindSig kind
= hang (ptext (sLit "Kind signature on data type declaration has non-* return kind"))
2 (ppr kind)
{-
Note [Avoid name clashes for associated data types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider class C a b where
data D b :: * -> *
When typechecking the decl for D, we'll invent an extra type variable
for D, to fill out its kind. Ideally we don't want this type variable
to be 'a', because when pretty printing we'll get
class C a b where
data D b a0
(NB: the tidying happens in the conversion to IfaceSyn, which happens
as part of pretty-printing a TyThing.)
That's why we look in the LocalRdrEnv to see what's in scope. This is
important only to get nice-looking output when doing ":info C" in GHCi.
It isn't essential for correctness.
************************************************************************
* *
Scoped type variables
* *
************************************************************************
tcAddScopedTyVars is used for scoped type variables added by pattern
type signatures
e.g. \ ((x::a), (y::a)) -> x+y
They never have explicit kinds (because this is source-code only)
They are mutable (because they can get bound to a more specific type).
Usually we kind-infer and expand type splices, and then
tupecheck/desugar the type. That doesn't work well for scoped type
variables, because they scope left-right in patterns. (e.g. in the
example above, the 'a' in (y::a) is bound by the 'a' in (x::a).
The current not-very-good plan is to
* find all the types in the patterns
* find their free tyvars
* do kind inference
* bring the kinded type vars into scope
* BUT throw away the kind-checked type
(we'll kind-check it again when we type-check the pattern)
This is bad because throwing away the kind checked type throws away
its splices. But too bad for now. [July 03]
Historical note:
We no longer specify that these type variables must be univerally
quantified (lots of email on the subject). If you want to put that
back in, you need to
a) Do a checkSigTyVars after thing_inside
b) More insidiously, don't pass in expected_ty, else
we unify with it too early and checkSigTyVars barfs
Instead you have to pass in a fresh ty var, and unify
it with expected_ty afterwards
-}
tcHsPatSigType :: UserTypeCtxt
-> HsWithBndrs Name (LHsType Name) -- The type signature
-> TcM ( Type -- The signature
, [(Name, TcTyVar)] -- The new bit of type environment, binding
-- the scoped type variables
, [(Name, TcTyVar)] ) -- The wildcards
-- Used for type-checking type signatures in
-- (a) patterns e.g f (x::Int) = e
-- (b) result signatures e.g. g x :: Int = e
-- (c) RULE forall bndrs e.g. forall (x::Int). f x = x
tcHsPatSigType ctxt (HsWB { hswb_cts = hs_ty, hswb_kvs = sig_kvs,
hswb_tvs = sig_tvs, hswb_wcs = sig_wcs })
= addErrCtxt (pprSigCtxt ctxt empty (ppr hs_ty)) $
do { kvs <- mapM new_kv sig_kvs
; tvs <- mapM new_tv sig_tvs
; nwc_tvs <- mapM newWildcardVarMetaKind sig_wcs
; let nwc_binds = sig_wcs `zip` nwc_tvs
ktv_binds = (sig_kvs `zip` kvs) ++ (sig_tvs `zip` tvs)
; sig_ty <- tcExtendTyVarEnv2 (ktv_binds ++ nwc_binds) $
tcHsLiftedType hs_ty
; sig_ty <- zonkSigType sig_ty
; checkValidType ctxt sig_ty
; emitWildcardHoleConstraints (zip sig_wcs nwc_tvs)
; return (sig_ty, ktv_binds, nwc_binds) }
where
new_kv name = new_tkv name superKind
new_tv name = do { kind <- newMetaKindVar
; new_tkv name kind }
new_tkv name kind -- See Note [Pattern signature binders]
= case ctxt of
RuleSigCtxt {} -> return (mkTcTyVar name kind (SkolemTv False))
_ -> newSigTyVar name kind -- See Note [Unifying SigTvs]
tcPatSig :: Bool -- True <=> pattern binding
-> HsWithBndrs Name (LHsType Name)
-> TcSigmaType
-> TcM (TcType, -- The type to use for "inside" the signature
[(Name, TcTyVar)], -- The new bit of type environment, binding
-- the scoped type variables
[(Name, TcTyVar)], -- The wildcards
HsWrapper) -- Coercion due to unification with actual ty
-- Of shape: res_ty ~ sig_ty
tcPatSig in_pat_bind sig res_ty
= do { (sig_ty, sig_tvs, sig_nwcs) <- tcHsPatSigType PatSigCtxt sig
-- sig_tvs are the type variables free in 'sig',
-- and not already in scope. These are the ones
-- that should be brought into scope
; if null sig_tvs then do {
-- Just do the subsumption check and return
wrap <- addErrCtxtM (mk_msg sig_ty) $
tcSubType_NC PatSigCtxt res_ty sig_ty
; return (sig_ty, [], sig_nwcs, wrap)
} else do
-- Type signature binds at least one scoped type variable
-- A pattern binding cannot bind scoped type variables
-- It is more convenient to make the test here
-- than in the renamer
{ when in_pat_bind (addErr (patBindSigErr sig_tvs))
-- Check that all newly-in-scope tyvars are in fact
-- constrained by the pattern. This catches tiresome
-- cases like
-- type T a = Int
-- f :: Int -> Int
-- f (x :: T a) = ...
-- Here 'a' doesn't get a binding. Sigh
; let bad_tvs = [ tv | (_, tv) <- sig_tvs
, not (tv `elemVarSet` exactTyVarsOfType sig_ty) ]
; checkTc (null bad_tvs) (badPatSigTvs sig_ty bad_tvs)
-- Now do a subsumption check of the pattern signature against res_ty
; wrap <- addErrCtxtM (mk_msg sig_ty) $
tcSubType_NC PatSigCtxt res_ty sig_ty
-- Phew!
; return (sig_ty, sig_tvs, sig_nwcs, wrap)
} }
where
mk_msg sig_ty tidy_env
= do { (tidy_env, sig_ty) <- zonkTidyTcType tidy_env sig_ty
; (tidy_env, res_ty) <- zonkTidyTcType tidy_env res_ty
; let msg = vcat [ hang (ptext (sLit "When checking that the pattern signature:"))
4 (ppr sig_ty)
, nest 2 (hang (ptext (sLit "fits the type of its context:"))
2 (ppr res_ty)) ]
; return (tidy_env, msg) }
patBindSigErr :: [(Name, TcTyVar)] -> SDoc
patBindSigErr sig_tvs
= hang (ptext (sLit "You cannot bind scoped type variable") <> plural sig_tvs
<+> pprQuotedList (map fst sig_tvs))
2 (ptext (sLit "in a pattern binding signature"))
{-
Note [Pattern signature binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T = forall a. T a (a->Int)
f (T x (f :: a->Int) = blah)
Here
* The pattern (T p1 p2) creates a *skolem* type variable 'a_sk',
It must be a skolem so that that it retains its identity, and
TcErrors.getSkolemInfo can thereby find the binding site for the skolem.
* The type signature pattern (f :: a->Int) binds "a" -> a_sig in the envt
* Then unificaiton makes a_sig := a_sk
That's why we must make a_sig a MetaTv (albeit a SigTv),
not a SkolemTv, so that it can unify to a_sk.
For RULE binders, though, things are a bit different (yuk).
RULE "foo" forall (x::a) (y::[a]). f x y = ...
Here this really is the binding site of the type variable so we'd like
to use a skolem, so that we get a complaint if we unify two of them
together.
Note [Unifying SigTvs]
~~~~~~~~~~~~~~~~~~~~~~
ALAS we have no decent way of avoiding two SigTvs getting unified.
Consider
f (x::(a,b)) (y::c)) = [fst x, y]
Here we'd really like to complain that 'a' and 'c' are unified. But
for the reasons above we can't make a,b,c into skolems, so they
are just SigTvs that can unify. And indeed, this would be ok,
f x (y::c) = case x of
(x1 :: a1, True) -> [x,y]
(x1 :: a2, False) -> [x,y,y]
Here the type of x's first component is called 'a1' in one branch and
'a2' in the other. We could try insisting on the same OccName, but
they definitely won't have the sane lexical Name.
I think we could solve this by recording in a SigTv a list of all the
in-scope variables that it should not unify with, but it's fiddly.
************************************************************************
* *
Checking kinds
* *
************************************************************************
We would like to get a decent error message from
(a) Under-applied type constructors
f :: (Maybe, Maybe)
(b) Over-applied type constructors
f :: Int x -> Int x
-}
-- The ExpKind datatype means "expected kind" and contains
-- some info about just why that kind is expected, to improve
-- the error message on a mis-match
data ExpKind = EK TcKind (TcKind -> SDoc)
-- The second arg is function that takes a *tidied* version
-- of the first arg, and produces something like
-- "Expected kind k"
-- "Expected a constraint"
-- "The argument of Maybe should have kind k"
instance Outputable ExpKind where
ppr (EK k f) = f k
ekLifted, ekOpen, ekConstraint :: ExpKind
ekLifted = EK liftedTypeKind expectedKindMsg
ekOpen = EK openTypeKind expectedKindMsg
ekConstraint = EK constraintKind expectedKindMsg
expectedKindMsg :: TcKind -> SDoc
expectedKindMsg pkind
| isConstraintKind pkind = ptext (sLit "Expected a constraint")
| isOpenTypeKind pkind = ptext (sLit "Expected a type")
| otherwise = ptext (sLit "Expected kind") <+> quotes (pprKind pkind)
-- Build an ExpKind for arguments
expArgKind :: SDoc -> TcKind -> Int -> ExpKind
expArgKind exp kind arg_no = EK kind msg_fn
where
msg_fn pkind
= sep [ ptext (sLit "The") <+> speakNth arg_no
<+> ptext (sLit "argument of") <+> exp
, nest 2 $ ptext (sLit "should have kind")
<+> quotes (pprKind pkind) ]
unifyKinds :: SDoc -> [(TcType, TcKind)] -> TcM TcKind
unifyKinds fun act_kinds
= do { kind <- newMetaKindVar
; let check (arg_no, (ty, act_kind))
= checkExpectedKind ty act_kind (expArgKind (quotes fun) kind arg_no)
; mapM_ check (zip [1..] act_kinds)
; return kind }
checkKind :: TcKind -> TcKind -> TcM ()
checkKind act_kind exp_kind
= do { mb_subk <- unifyKindX act_kind exp_kind
; case mb_subk of
Just EQ -> return ()
_ -> unifyKindMisMatch act_kind exp_kind }
checkExpectedKind :: Outputable a => a -> TcKind -> ExpKind -> TcM ()
-- A fancy wrapper for 'unifyKindX', which tries
-- to give decent error messages.
-- (checkExpectedKind ty act_kind exp_kind)
-- checks that the actual kind act_kind is compatible
-- with the expected kind exp_kind
-- The first argument, ty, is used only in the error message generation
checkExpectedKind ty act_kind (EK exp_kind ek_ctxt)
= do { mb_subk <- unifyKindX act_kind exp_kind
-- Kind unification only generates definite errors
; case mb_subk of {
Just LT -> return () ; -- act_kind is a sub-kind of exp_kind
Just EQ -> return () ; -- The two are equal
_other -> do
{ -- So there's an error
-- Now to find out what sort
exp_kind <- zonkTcKind exp_kind
; act_kind <- zonkTcKind act_kind
; traceTc "checkExpectedKind" (ppr ty $$ ppr act_kind $$ ppr exp_kind)
; env0 <- tcInitTidyEnv
; dflags <- getDynFlags
; let (exp_as, _) = splitKindFunTys exp_kind
(act_as, _) = splitKindFunTys act_kind
n_exp_as = length exp_as
n_act_as = length act_as
n_diff_as = n_act_as - n_exp_as
(env1, tidy_exp_kind) = tidyOpenKind env0 exp_kind
(env2, tidy_act_kind) = tidyOpenKind env1 act_kind
occurs_check
| Just act_tv <- tcGetTyVar_maybe act_kind
= check_occ act_tv exp_kind
| Just exp_tv <- tcGetTyVar_maybe exp_kind
= check_occ exp_tv act_kind
| otherwise
= False
check_occ tv k = case occurCheckExpand dflags tv k of
OC_Occurs -> True
_bad -> False
err | isLiftedTypeKind exp_kind && isUnliftedTypeKind act_kind
= ptext (sLit "Expecting a lifted type, but") <+> quotes (ppr ty)
<+> ptext (sLit "is unlifted")
| isUnliftedTypeKind exp_kind && isLiftedTypeKind act_kind
= ptext (sLit "Expecting an unlifted type, but") <+> quotes (ppr ty)
<+> ptext (sLit "is lifted")
| occurs_check -- Must precede the "more args expected" check
= ptext (sLit "Kind occurs check") $$ more_info
| n_exp_as < n_act_as -- E.g. [Maybe]
= vcat [ ptext (sLit "Expecting") <+>
speakN n_diff_as <+> ptext (sLit "more argument")
<> (if n_diff_as > 1 then char 's' else empty)
<+> ptext (sLit "to") <+> quotes (ppr ty)
, more_info ]
-- Now n_exp_as >= n_act_as. In the next two cases,
-- n_exp_as == 0, and hence so is n_act_as
| otherwise -- E.g. Monad [Int]
= more_info
more_info = sep [ ek_ctxt tidy_exp_kind <> comma
, nest 2 $ ptext (sLit "but") <+> quotes (ppr ty)
<+> ptext (sLit "has kind") <+> quotes (pprKind tidy_act_kind)]
; traceTc "checkExpectedKind 1" (ppr ty $$ ppr tidy_act_kind $$ ppr tidy_exp_kind $$ ppr env1 $$ ppr env2)
; failWithTcM (env2, err) } } }
{-
************************************************************************
* *
Sort checking kinds
* *
************************************************************************
tcLHsKind converts a user-written kind to an internal, sort-checked kind.
It does sort checking and desugaring at the same time, in one single pass.
It fails when the kinds are not well-formed (eg. data A :: * Int), or if there
are non-promotable or non-fully applied kinds.
-}
tcLHsKind :: LHsKind Name -> TcM Kind
tcLHsKind k = addErrCtxt (ptext (sLit "In the kind") <+> quotes (ppr k)) $
tc_lhs_kind k
tc_lhs_kind :: LHsKind Name -> TcM Kind
tc_lhs_kind (L span ki) = setSrcSpan span (tc_hs_kind ki)
-- The main worker
tc_hs_kind :: HsKind Name -> TcM Kind
tc_hs_kind (HsTyVar tc) = tc_kind_var_app tc []
tc_hs_kind k@(HsAppTy _ _) = tc_kind_app k []
tc_hs_kind (HsParTy ki) = tc_lhs_kind ki
tc_hs_kind (HsFunTy ki1 ki2) =
do kappa_ki1 <- tc_lhs_kind ki1
kappa_ki2 <- tc_lhs_kind ki2
return (mkArrowKind kappa_ki1 kappa_ki2)
tc_hs_kind (HsListTy ki) =
do kappa <- tc_lhs_kind ki
checkWiredInTyCon listTyCon
return $ mkPromotedListTy kappa
tc_hs_kind (HsTupleTy _ kis) =
do kappas <- mapM tc_lhs_kind kis
checkWiredInTyCon tycon
return $ mkTyConApp tycon kappas
where
tycon = promotedTupleTyCon BoxedTuple (length kis)
-- Argument not kind-shaped
tc_hs_kind k = pprPanic "tc_hs_kind" (ppr k)
-- Special case for kind application
tc_kind_app :: HsKind Name -> [LHsKind Name] -> TcM Kind
tc_kind_app (HsAppTy ki1 ki2) kis = tc_kind_app (unLoc ki1) (ki2:kis)
tc_kind_app (HsTyVar tc) kis = do { arg_kis <- mapM tc_lhs_kind kis
; tc_kind_var_app tc arg_kis }
tc_kind_app ki _ = failWithTc (quotes (ppr ki) <+>
ptext (sLit "is not a kind constructor"))
tc_kind_var_app :: Name -> [Kind] -> TcM Kind
-- Special case for * and Constraint kinds
-- They are kinds already, so we don't need to promote them
tc_kind_var_app name arg_kis
| name == liftedTypeKindTyConName
|| name == constraintKindTyConName
= do { unless (null arg_kis)
(failWithTc (text "Kind" <+> ppr name <+> text "cannot be applied"))
; thing <- tcLookup name
; case thing of
AGlobal (ATyCon tc) -> return (mkTyConApp tc [])
_ -> panic "tc_kind_var_app 1" }
-- General case
tc_kind_var_app name arg_kis
= do { thing <- tcLookup name
; case thing of
AGlobal (ATyCon tc)
-> do { data_kinds <- xoptM Opt_DataKinds
; unless data_kinds $ addErr (dataKindsErr name)
; case promotableTyCon_maybe tc of
Just prom_tc | arg_kis `lengthIs` tyConArity prom_tc
-> return (mkTyConApp prom_tc arg_kis)
Just _ -> tycon_err tc "is not fully applied"
Nothing -> tycon_err tc "is not promotable" }
-- A lexically scoped kind variable
ATyVar _ kind_var
| not (isKindVar kind_var)
-> failWithTc (ptext (sLit "Type variable") <+> quotes (ppr kind_var)
<+> ptext (sLit "used as a kind"))
| not (null arg_kis) -- Kind variables always have kind BOX,
-- so cannot be applied to anything
-> failWithTc (ptext (sLit "Kind variable") <+> quotes (ppr name)
<+> ptext (sLit "cannot appear in a function position"))
| otherwise
-> return (mkAppTys (mkTyVarTy kind_var) arg_kis)
-- It is in scope, but not what we expected
AThing _
| isTyVarName name
-> failWithTc (ptext (sLit "Type variable") <+> quotes (ppr name)
<+> ptext (sLit "used in a kind"))
| otherwise
-> failWithTc (hang (ptext (sLit "Type constructor") <+> quotes (ppr name)
<+> ptext (sLit "used in a kind"))
2 (ptext (sLit "inside its own recursive group")))
APromotionErr err -> promotionErr name err
_ -> wrongThingErr "promoted type" thing name
-- This really should not happen
}
where
tycon_err tc msg = failWithTc (quotes (ppr tc) <+> ptext (sLit "of kind")
<+> quotes (ppr (tyConKind tc)) <+> ptext (sLit msg))
dataKindsErr :: Name -> SDoc
dataKindsErr name
= hang (ptext (sLit "Illegal kind:") <+> quotes (ppr name))
2 (ptext (sLit "Perhaps you intended to use DataKinds"))
promotionErr :: Name -> PromotionErr -> TcM a
promotionErr name err
= failWithTc (hang (pprPECategory err <+> quotes (ppr name) <+> ptext (sLit "cannot be used here"))
2 (parens reason))
where
reason = case err of
FamDataConPE -> ptext (sLit "it comes from a data family instance")
NoDataKinds -> ptext (sLit "Perhaps you intended to use DataKinds")
_ -> ptext (sLit "it is defined and used in the same recursive group")
{-
************************************************************************
* *
Scoped type variables
* *
************************************************************************
-}
badPatSigTvs :: TcType -> [TyVar] -> SDoc
badPatSigTvs sig_ty bad_tvs
= vcat [ fsep [ptext (sLit "The type variable") <> plural bad_tvs,
quotes (pprWithCommas ppr bad_tvs),
ptext (sLit "should be bound by the pattern signature") <+> quotes (ppr sig_ty),
ptext (sLit "but are actually discarded by a type synonym") ]
, ptext (sLit "To fix this, expand the type synonym")
, ptext (sLit "[Note: I hope to lift this restriction in due course]") ]
unifyKindMisMatch :: TcKind -> TcKind -> TcM a
unifyKindMisMatch ki1 ki2 = do
ki1' <- zonkTcKind ki1
ki2' <- zonkTcKind ki2
let msg = hang (ptext (sLit "Couldn't match kind"))
2 (sep [quotes (ppr ki1'),
ptext (sLit "against"),
quotes (ppr ki2')])
failWithTc msg
|
alexander-at-github/eta
|
compiler/ETA/TypeCheck/TcHsType.hs
|
bsd-3-clause
| 68,945 | 106 | 26 | 20,332 | 12,110 | 6,222 | 5,888 | 793 | 8 |
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses #-}
-- | Free theorems plugin,
-- Don Stewart 2006
module Plugin.FT where
import Plugin
import Plugin.Type (query_ghci)
$(plugin "FT")
instance Module FTModule () where
moduleCmds _ = ["ft"]
moduleHelp _ _ = "ft <ident>. Generate theorems for free"
process_ _ _ s = (liftM unlines . lift . query_ghci ":t") s >>= ios . ft
binary :: String
binary = "ftshell"
ft :: String -> IO String
ft src = run binary src $
unlines . map (' ':) . filter (not.null) . map cleanit . lines
cleanit :: String -> String
cleanit s | terminated `matches'` s = "Terminated\n"
| otherwise = filter isAscii s
where terminated = regex' "waitForProc"
|
zeekay/lambdabot
|
Plugin/FT.hs
|
mit
| 729 | 0 | 12 | 168 | 231 | 118 | 113 | 18 | 1 |
module Lets.Profunctor (
Profunctor(dimap)
) where
import Lets.Data
-- | A profunctor is a binary functor, with the first argument in contravariant
-- (negative) position and the second argument in covariant (positive) position.
class Profunctor p where
dimap ::
(b -> a)
-> (c -> d)
-> p a c
-> p b d
instance Profunctor (->) where
dimap f g = \h -> g . h . f
instance Profunctor Tagged where
dimap _ g (Tagged x) =
Tagged (g x)
|
bitemyapp/lets-lens
|
src/Lets/Profunctor.hs
|
bsd-3-clause
| 462 | 0 | 10 | 114 | 141 | 75 | 66 | 16 | 0 |
-- This module is always included behind the scenes when compiling.
-- It will not bring any code into the system, but brings several
-- names and type definitions into scope that the compiler expects
-- to exist.
module Jhc.Prim.Prim where
import Jhc.Prim.Bits()
import Jhc.Prim.IO()
import Jhc.Prim.List()
data (->) :: ?? -> ? -> *
infixr 5 :
data [] a = a : ([] a) | []
data {-# CTYPE "HsBool" #-} Bool = False | True
data Ordering = LT | EQ | GT
data () = ()
data (,) a b = (,) a b
data (,,) a b c = (,,) a b c
data (,,,) a b c d = (,,,) a b c d
data (,,,,) a b c d e = (,,,,) a b c d e
data (,,,,,) a b c d e f = (,,,,,) a b c d e f
data (,,,,,,) a b c d e f g = (,,,,,,) a b c d e f g
data (,,,,,,,) a b c d e f g h = (,,,,,,,) a b c d e f g h
data (,,,,,,,,) a b c d e f g h i = (,,,,,,,,) a b c d e f g h i
|
hvr/jhc
|
lib/jhc-prim/Jhc/Prim/Prim.hs
|
mit
| 823 | 7 | 13 | 218 | 475 | 284 | 191 | -1 | -1 |
{-# LANGUAGE BangPatterns, GADTs, OverloadedStrings, RecordWildCards #-}
-- |
-- Module : Data.Attoparsec.ByteString.Internal
-- Copyright : Bryan O'Sullivan 2007-2014
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- Simple, efficient parser combinators for 'ByteString' strings,
-- loosely based on the Parsec library.
module Data.Attoparsec.ByteString.Internal
(
-- * Parser types
Parser
, Result
-- * Running parsers
, parse
, parseOnly
-- * Combinators
, module Data.Attoparsec.Combinator
-- * Parsing individual bytes
, satisfy
, satisfyWith
, anyWord8
, skip
, word8
, notWord8
-- ** Lookahead
, peekWord8
, peekWord8'
-- ** Byte classes
, inClass
, notInClass
-- * Parsing more complicated structures
, storable
-- * Efficient string handling
, skipWhile
, string
, stringTransform
, take
, scan
, runScanner
, takeWhile
, takeWhile1
, takeTill
-- ** Consume all remaining input
, takeByteString
, takeLazyByteString
-- * Utilities
, endOfLine
, endOfInput
, match
, atEnd
) where
import Control.Applicative ((<|>), (<$>))
import Control.Monad (when)
import Data.Attoparsec.ByteString.Buffer (Buffer, buffer)
import Data.Attoparsec.ByteString.FastSet (charClass, memberWord8)
import Data.Attoparsec.Combinator ((<?>))
import Data.Attoparsec.Internal
import Data.Attoparsec.Internal.Fhthagn (inlinePerformIO)
import Data.Attoparsec.Internal.Types hiding (Parser, Failure, Success)
import Data.ByteString (ByteString)
import Data.Word (Word8)
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Ptr (castPtr, minusPtr, plusPtr)
import Foreign.Storable (Storable(peek, sizeOf))
import Prelude hiding (getChar, succ, take, takeWhile)
import qualified Data.Attoparsec.ByteString.Buffer as Buf
import qualified Data.Attoparsec.Internal.Types as T
import qualified Data.ByteString as B8
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Internal as B
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Unsafe as B
type Parser = T.Parser ByteString
type Result = IResult ByteString
type Failure r = T.Failure ByteString Buffer r
type Success a r = T.Success ByteString Buffer a r
-- | The parser @satisfy p@ succeeds for any byte for which the
-- predicate @p@ returns 'True'. Returns the byte that is actually
-- parsed.
--
-- >digit = satisfy isDigit
-- > where isDigit w = w >= 48 && w <= 57
satisfy :: (Word8 -> Bool) -> Parser Word8
satisfy p = do
h <- peekWord8'
if p h
then advance 1 >> return h
else fail "satisfy"
{-# INLINE satisfy #-}
-- | The parser @skip p@ succeeds for any byte for which the predicate
-- @p@ returns 'True'.
--
-- >skipDigit = skip isDigit
-- > where isDigit w = w >= 48 && w <= 57
skip :: (Word8 -> Bool) -> Parser ()
skip p = do
h <- peekWord8'
if p h
then advance 1
else fail "skip"
-- | The parser @satisfyWith f p@ transforms a byte, and succeeds if
-- the predicate @p@ returns 'True' on the transformed value. The
-- parser returns the transformed byte that was parsed.
satisfyWith :: (Word8 -> a) -> (a -> Bool) -> Parser a
satisfyWith f p = do
h <- peekWord8'
let c = f h
if p c
then advance 1 >> return c
else fail "satisfyWith"
{-# INLINE satisfyWith #-}
storable :: Storable a => Parser a
storable = hack undefined
where
hack :: Storable b => b -> Parser b
hack dummy = do
(fp,o,_) <- B.toForeignPtr `fmap` take (sizeOf dummy)
return . B.inlinePerformIO . withForeignPtr fp $ \p ->
peek (castPtr $ p `plusPtr` o)
-- | Consume @n@ bytes of input, but succeed only if the predicate
-- returns 'True'.
takeWith :: Int -> (ByteString -> Bool) -> Parser ByteString
takeWith n0 p = do
let n = max n0 0
s <- ensure n
if p s
then advance n >> return s
else fail "takeWith"
-- | Consume exactly @n@ bytes of input.
take :: Int -> Parser ByteString
take n = takeWith n (const True)
{-# INLINE take #-}
-- | @string s@ parses a sequence of bytes that identically match
-- @s@. Returns the parsed string (i.e. @s@). This parser consumes no
-- input if it fails (even if a partial match).
--
-- /Note/: The behaviour of this parser is different to that of the
-- similarly-named parser in Parsec, as this one is all-or-nothing.
-- To illustrate the difference, the following parser will fail under
-- Parsec given an input of @\"for\"@:
--
-- >string "foo" <|> string "for"
--
-- The reason for its failure is that the first branch is a
-- partial match, and will consume the letters @\'f\'@ and @\'o\'@
-- before failing. In attoparsec, the above parser will /succeed/ on
-- that input, because the failed first branch will consume nothing.
string :: ByteString -> Parser ByteString
string s = takeWith (B.length s) (==s)
{-# INLINE string #-}
stringTransform :: (ByteString -> ByteString) -> ByteString
-> Parser ByteString
stringTransform f s = takeWith (B.length s) ((==f s) . f)
{-# INLINE stringTransform #-}
-- | Skip past input for as long as the predicate returns 'True'.
skipWhile :: (Word8 -> Bool) -> Parser ()
skipWhile p = go
where
go = do
t <- B8.takeWhile p <$> get
continue <- inputSpansChunks (B.length t)
when continue go
{-# INLINE skipWhile #-}
-- | Consume input as long as the predicate returns 'False'
-- (i.e. until it returns 'True'), and return the consumed input.
--
-- This parser does not fail. It will return an empty string if the
-- predicate returns 'True' on the first byte of input.
--
-- /Note/: Because this parser does not fail, do not use it with
-- combinators such as 'Control.Applicative.many', because such
-- parsers loop until a failure occurs. Careless use will thus result
-- in an infinite loop.
takeTill :: (Word8 -> Bool) -> Parser ByteString
takeTill p = takeWhile (not . p)
{-# INLINE takeTill #-}
-- | Consume input as long as the predicate returns 'True', and return
-- the consumed input.
--
-- This parser does not fail. It will return an empty string if the
-- predicate returns 'False' on the first byte of input.
--
-- /Note/: Because this parser does not fail, do not use it with
-- combinators such as 'Control.Applicative.many', because such
-- parsers loop until a failure occurs. Careless use will thus result
-- in an infinite loop.
takeWhile :: (Word8 -> Bool) -> Parser ByteString
takeWhile p = (B.concat . reverse) `fmap` go []
where
go acc = do
s <- B8.takeWhile p <$> get
continue <- inputSpansChunks (B.length s)
if continue
then go (s:acc)
else return (s:acc)
{-# INLINE takeWhile #-}
takeRest :: Parser [ByteString]
takeRest = go []
where
go acc = do
input <- wantInput
if input
then do
s <- get
advance (B.length s)
go (s:acc)
else return (reverse acc)
-- | Consume all remaining input and return it as a single string.
takeByteString :: Parser ByteString
takeByteString = B.concat `fmap` takeRest
-- | Consume all remaining input and return it as a single string.
takeLazyByteString :: Parser L.ByteString
takeLazyByteString = L.fromChunks `fmap` takeRest
data T s = T {-# UNPACK #-} !Int s
scan_ :: (s -> [ByteString] -> Parser r) -> s -> (s -> Word8 -> Maybe s)
-> Parser r
scan_ f s0 p = go [] s0
where
go acc s1 = do
let scanner (B.PS fp off len) =
withForeignPtr fp $ \ptr0 -> do
let start = ptr0 `plusPtr` off
end = start `plusPtr` len
inner ptr !s
| ptr < end = do
w <- peek ptr
case p s w of
Just s' -> inner (ptr `plusPtr` 1) s'
_ -> done (ptr `minusPtr` start) s
| otherwise = done (ptr `minusPtr` start) s
done !i !s = return (T i s)
inner start s1
bs <- get
let T i s' = inlinePerformIO $ scanner bs
!h = B.unsafeTake i bs
continue <- inputSpansChunks i
if continue
then go (h:acc) s'
else f s' (h:acc)
{-# INLINE scan_ #-}
-- | A stateful scanner. The predicate consumes and transforms a
-- state argument, and each transformed state is passed to successive
-- invocations of the predicate on each byte of the input until one
-- returns 'Nothing' or the input ends.
--
-- This parser does not fail. It will return an empty string if the
-- predicate returns 'Nothing' on the first byte of input.
--
-- /Note/: Because this parser does not fail, do not use it with
-- combinators such as 'Control.Applicative.many', because such
-- parsers loop until a failure occurs. Careless use will thus result
-- in an infinite loop.
scan :: s -> (s -> Word8 -> Maybe s) -> Parser ByteString
scan = scan_ $ \_ chunks ->
case chunks of
[x] -> return x
xs -> return $! B.concat $ reverse xs
{-# INLINE scan #-}
-- | Like 'scan', but generalized to return the final state of the
-- scanner.
runScanner :: s -> (s -> Word8 -> Maybe s) -> Parser (ByteString, s)
runScanner = scan_ $ \s xs -> return (B.concat (reverse xs), s)
{-# INLINE runScanner #-}
-- | Consume input as long as the predicate returns 'True', and return
-- the consumed input.
--
-- This parser requires the predicate to succeed on at least one byte
-- of input: it will fail if the predicate never returns 'True' or if
-- there is no input left.
takeWhile1 :: (Word8 -> Bool) -> Parser ByteString
takeWhile1 p = do
(`when` demandInput) =<< endOfChunk
s <- B8.takeWhile p <$> get
let len = B.length s
if len == 0
then fail "takeWhile1"
else do
advance len
eoc <- endOfChunk
if eoc
then (s<>) `fmap` takeWhile p
else return s
-- | Match any byte in a set.
--
-- >vowel = inClass "aeiou"
--
-- Range notation is supported.
--
-- >halfAlphabet = inClass "a-nA-N"
--
-- To add a literal @\'-\'@ to a set, place it at the beginning or end
-- of the string.
inClass :: String -> Word8 -> Bool
inClass s = (`memberWord8` mySet)
where mySet = charClass s
{-# NOINLINE mySet #-}
{-# INLINE inClass #-}
-- | Match any byte not in a set.
notInClass :: String -> Word8 -> Bool
notInClass s = not . inClass s
{-# INLINE notInClass #-}
-- | Match any byte.
anyWord8 :: Parser Word8
anyWord8 = satisfy $ const True
{-# INLINE anyWord8 #-}
-- | Match a specific byte.
word8 :: Word8 -> Parser Word8
word8 c = satisfy (== c) <?> show c
{-# INLINE word8 #-}
-- | Match any byte except the given one.
notWord8 :: Word8 -> Parser Word8
notWord8 c = satisfy (/= c) <?> "not " ++ show c
{-# INLINE notWord8 #-}
-- | Match any byte, to perform lookahead. Returns 'Nothing' if end of
-- input has been reached. Does not consume any input.
--
-- /Note/: Because this parser does not fail, do not use it with
-- combinators such as 'Control.Applicative.many', because such
-- parsers loop until a failure occurs. Careless use will thus result
-- in an infinite loop.
peekWord8 :: Parser (Maybe Word8)
peekWord8 = T.Parser $ \t pos@(Pos pos_) more _lose succ ->
case () of
_| pos_ < Buf.length t ->
let !w = Buf.unsafeIndex t pos_
in succ t pos more (Just w)
| more == Complete ->
succ t pos more Nothing
| otherwise ->
let succ' t' pos' more' = let !w = Buf.unsafeIndex t' pos_
in succ t' pos' more' (Just w)
lose' t' pos' more' = succ t' pos' more' Nothing
in prompt t pos more lose' succ'
{-# INLINE peekWord8 #-}
-- | Match any byte, to perform lookahead. Does not consume any
-- input, but will fail if end of input has been reached.
peekWord8' :: Parser Word8
peekWord8' = T.Parser $ \t pos more lose succ ->
if lengthAtLeast pos 1 t
then succ t pos more (Buf.unsafeIndex t (fromPos pos))
else let succ' t' pos' more' bs' = succ t' pos' more' $! B.unsafeHead bs'
in ensureSuspended 1 t pos more lose succ'
{-# INLINE peekWord8' #-}
-- | Match either a single newline character @\'\\n\'@, or a carriage
-- return followed by a newline character @\"\\r\\n\"@.
endOfLine :: Parser ()
endOfLine = (word8 10 >> return ()) <|> (string "\r\n" >> return ())
-- | Terminal failure continuation.
failK :: Failure a
failK t (Pos pos) _more stack msg = Fail (Buf.unsafeDrop pos t) stack msg
{-# INLINE failK #-}
-- | Terminal success continuation.
successK :: Success a a
successK t (Pos pos) _more a = Done (Buf.unsafeDrop pos t) a
{-# INLINE successK #-}
-- | Run a parser.
parse :: Parser a -> ByteString -> Result a
parse m s = T.runParser m (buffer s) (Pos 0) Incomplete failK successK
{-# INLINE parse #-}
-- | Run a parser that cannot be resupplied via a 'Partial' result.
--
-- This function does not force a parser to consume all of its input.
-- Instead, any residual input will be discarded. To force a parser
-- to consume all of its input, use something like this:
--
-- @
--'parseOnly' (myParser 'Control.Applicative.<*' 'endOfInput')
-- @
parseOnly :: Parser a -> ByteString -> Either String a
parseOnly m s = case T.runParser m (buffer s) (Pos 0) Complete failK successK of
Fail _ _ err -> Left err
Done _ a -> Right a
_ -> error "parseOnly: impossible error!"
{-# INLINE parseOnly #-}
get :: Parser ByteString
get = T.Parser $ \t pos more _lose succ ->
succ t pos more (Buf.unsafeDrop (fromPos pos) t)
{-# INLINE get #-}
endOfChunk :: Parser Bool
endOfChunk = T.Parser $ \t pos more _lose succ ->
succ t pos more (fromPos pos == Buf.length t)
{-# INLINE endOfChunk #-}
inputSpansChunks :: Int -> Parser Bool
inputSpansChunks i = T.Parser $ \t pos_ more _lose succ ->
let pos = pos_ + Pos i
in if fromPos pos < Buf.length t || more == Complete
then succ t pos more False
else let lose' t' pos' more' = succ t' pos' more' False
succ' t' pos' more' = succ t' pos' more' True
in prompt t pos more lose' succ'
{-# INLINE inputSpansChunks #-}
advance :: Int -> Parser ()
advance n = T.Parser $ \t pos more _lose succ ->
succ t (pos + Pos n) more ()
{-# INLINE advance #-}
ensureSuspended :: Int -> Buffer -> Pos -> More
-> Failure r
-> Success ByteString r
-> Result r
ensureSuspended n t pos more lose succ =
runParser (demandInput >> go) t pos more lose succ
where go = T.Parser $ \t' pos' more' lose' succ' ->
if lengthAtLeast pos' n t'
then succ' t' pos' more' (substring pos (Pos n) t')
else runParser (demandInput >> go) t' pos' more' lose' succ'
-- | If at least @n@ elements of input are available, return the
-- current input, otherwise fail.
ensure :: Int -> Parser ByteString
ensure n = T.Parser $ \t pos more lose succ ->
if lengthAtLeast pos n t
then succ t pos more (substring pos (Pos n) t)
-- The uncommon case is kept out-of-line to reduce code size:
else ensureSuspended n t pos more lose succ
-- Non-recursive so the bounds check can be inlined:
{-# INLINE ensure #-}
-- | Return both the result of a parse and the portion of the input
-- that was consumed while it was being parsed.
match :: Parser a -> Parser (ByteString, a)
match p = T.Parser $ \t pos more lose succ ->
let succ' t' pos' more' a =
succ t' pos' more' (substring pos (pos'-pos) t', a)
in runParser p t pos more lose succ'
lengthAtLeast :: Pos -> Int -> Buffer -> Bool
lengthAtLeast (Pos pos) n bs = Buf.length bs >= pos + n
{-# INLINE lengthAtLeast #-}
substring :: Pos -> Pos -> Buffer -> ByteString
substring (Pos pos) (Pos n) = Buf.substring pos n
{-# INLINE substring #-}
|
DavidAlphaFox/ghc
|
utils/haddock/haddock-library/vendor/attoparsec-0.12.1.1/Data/Attoparsec/ByteString/Internal.hs
|
bsd-3-clause
| 15,757 | 0 | 27 | 3,763 | 3,753 | 2,005 | 1,748 | 288 | 3 |
import Test.Cabal.Prelude
-- Test building an executable whose main() function is defined in a C
-- file
main = setupAndCabalTest $ setup_build []
|
mydaum/cabal
|
cabal-testsuite/PackageTests/CMain/setup.test.hs
|
bsd-3-clause
| 147 | 0 | 7 | 23 | 23 | 13 | 10 | 2 | 1 |
module ControlledVisit where
import Control.Monad (filterM)
import System.Directory (Permissions(..), getModificationTime, getPermissions, getDirectoryContents)
import System.Time (ClockTime(..))
import System.FilePath (takeExtension)
import Control.OldException (bracket, handle)
import System.IO (IOMode(..), hClose, hFileSize, openFile)
import System.FilePath((</>))
import Control.Monad (forM, liftM)
data Info = Info {
infoPath :: FilePath,
infoPerms :: Maybe Permissions,
infoSize :: Maybe Integer,
infoModTime :: Maybe ClockTime
} deriving (Eq, Ord, Show)
maybeIO :: IO a -> IO (Maybe a)
maybeIO act = handle (\_ -> return Nothing) (Just `liftM` act)
getInfo :: FilePath -> IO Info
getInfo path = do
perms <- maybeIO (getPermissions path)
size <- maybeIO (bracket (openFile path ReadMode) hClose hFileSize)
modified <- maybeIO (getModificationTime path)
return (Info path perms size modified)
isDirectory :: Info -> Bool
isDirectory = maybe False searchable . infoPerms
traverse :: ([Info] -> [Info]) -> FilePath -> IO [Info]
traverse order path = do
names <- getUsefulContents path
contents <- mapM getInfo (path : map (path </>) names)
liftM concat $ forM (order contents) $ \info -> do
if isDirectory info && infoPath info /= path
then traverse order (infoPath info)
else return [info]
getUsefulContents :: FilePath -> IO [String]
getUsefulContents path = do
names <- getDirectoryContents path
return (filter (`notElem` [".", ".."]) names)
|
pauldoo/scratch
|
RealWorldHaskell/ch09/ControlledVisit.hs
|
isc
| 1,547 | 0 | 14 | 297 | 563 | 300 | 263 | 37 | 2 |
{-# LANGUAGE ExistentialQuantification #-}
-- {-# LANGUAGE GADTs #-}
-- data C a => T a = Cons a
-- requires DatatypeContext
-- "The designers of Haskell 98 do now think, that it was a bad decision to allow constraints on constructors. GHC as of version 7.2 disallows them by default (turn back on with -XDatatypeContexts).|"
-- see: https://wiki.haskell.org/Data_declaration_with_constraint
ShowBox1 :: forall a. Show a => a -> ShowBox1 a
|
NickAger/LearningHaskell
|
HaskellProgrammingFromFirstPrinciples/Chapter30/ExistentialTypes.hsproj/Main.hs
|
mit
| 446 | 1 | 4 | 76 | 28 | 18 | 10 | -1 | -1 |
{-# LANGUAGE CPP #-}
module BCM.Visualize.Internal.Types where
#if !MIN_VERSION_base(4,8,0)
import Foreign.ForeignPtr.Safe( ForeignPtr, castForeignPtr )
#else
import Foreign.ForeignPtr( ForeignPtr, castForeignPtr )
#endif
import Control.Monad (when)
import Data.Serialize
import Data.Bits( xor, (.&.), unsafeShiftR )
import qualified Data.Vector.Unboxed as U
import Data.Word
import Data.List (foldl')
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BS
import Data.Vector.Storable (Vector)
-- | Value used to identify a png chunk, must be 4 bytes long.
type ChunkSignature = B.ByteString
type Palette = Vector Word8
-- | Generic header used in PNG images.
data PngIHdr = PngIHdr
{ width :: !Word32 -- ^ Image width in number of pixel
, height :: !Word32 -- ^ Image height in number of pixel
, bitDepth :: !Word8 -- ^ Number of bit per sample
, colourType :: !PngImageType -- ^ Kind of png image (greyscale, true color, indexed...)
, compressionMethod :: !Word8 -- ^ Compression method used
, filterMethod :: !Word8 -- ^ Must be 0
, interlaceMethod :: !PngInterlaceMethod -- ^ If the image is interlaced (for progressive rendering)
}
deriving Show
-- | Data structure during real png loading/parsing
data PngRawChunk = PngRawChunk
{ chunkLength :: Word32
, chunkType :: ChunkSignature
, chunkCRC :: Word32
, chunkData :: B.ByteString
}
-- | What kind of information is encoded in the IDAT section
-- of the PngFile
data PngImageType =
PngGreyscale
| PngTrueColour
| PngIndexedColor
| PngGreyscaleWithAlpha
| PngTrueColourWithAlpha
deriving Show
-- | Different known interlace methods for PNG image
data PngInterlaceMethod =
-- | No interlacing, basic data ordering, line by line
-- from left to right.
PngNoInterlace
-- | Use the Adam7 ordering, see `adam7Reordering`
| PngInterlaceAdam7
deriving (Enum, Show)
instance Serialize PngRawChunk where
put chunk = do
putWord32be $ chunkLength chunk
putByteString $ chunkType chunk
when (chunkLength chunk /= 0)
(putByteString $ chunkData chunk)
putWord32be $ chunkCRC chunk
get = do
size <- getWord32be
chunkSig <- getByteString (fromIntegral $ B.length iHDRSignature)
imgData <- if size == 0
then return B.empty
else getByteString (fromIntegral size)
crc <- getWord32be
let computedCrc = pngComputeCrc [chunkSig, imgData]
when (computedCrc `xor` crc /= 0)
(fail $ "Invalid CRC : " ++ show computedCrc ++ ", "
++ show crc)
return PngRawChunk {
chunkLength = size,
chunkData = imgData,
chunkCRC = crc,
chunkType = chunkSig
}
instance Serialize PngImageType where
put PngGreyscale = putWord8 0
put PngTrueColour = putWord8 2
put PngIndexedColor = putWord8 3
put PngGreyscaleWithAlpha = putWord8 4
put PngTrueColourWithAlpha = putWord8 6
get = get >>= imageTypeOfCode
imageTypeOfCode :: Word8 -> Get PngImageType
imageTypeOfCode 0 = return PngGreyscale
imageTypeOfCode 2 = return PngTrueColour
imageTypeOfCode 3 = return PngIndexedColor
imageTypeOfCode 4 = return PngGreyscaleWithAlpha
imageTypeOfCode 6 = return PngTrueColourWithAlpha
imageTypeOfCode _ = fail "Invalid png color code"
instance Serialize PngIHdr where
put hdr = do
putWord32be 13
let inner = runPut $ do
putByteString iHDRSignature
putWord32be $ width hdr
putWord32be $ height hdr
putWord8 $ bitDepth hdr
put $ colourType hdr
put $ compressionMethod hdr
put $ filterMethod hdr
put $ interlaceMethod hdr
crc = pngComputeCrc [inner]
putByteString inner
putWord32be crc
get = do
_size <- getWord32be
ihdrSig <- getByteString (B.length iHDRSignature)
when (ihdrSig /= iHDRSignature)
(fail "Invalid PNG file, wrong ihdr")
w <- getWord32be
h <- getWord32be
depth <- get
colorType <- get
compression <- get
filtermethod <- get
interlace <- get
_crc <- getWord32be
return PngIHdr {
width = w,
height = h,
bitDepth = depth,
colourType = colorType,
compressionMethod = compression,
filterMethod = filtermethod,
interlaceMethod = interlace
}
instance Serialize PngInterlaceMethod where
get = getWord8 >>= \w -> case w of
0 -> return PngNoInterlace
1 -> return PngInterlaceAdam7
_ -> fail "Invalid interlace method"
put PngNoInterlace = putWord8 0
put PngInterlaceAdam7 = putWord8 1
-- signature
-- | Signature signalling that the following data will be a png image
-- in the png bit stream
pngSignature :: ChunkSignature
pngSignature = B.pack [137, 80, 78, 71, 13, 10, 26, 10]
-- | Helper function to help pack signatures.
signature :: String -> ChunkSignature
signature = BS.pack
-- | Signature for the header chunk of png (must be the first)
iHDRSignature :: ChunkSignature
iHDRSignature = signature "IHDR"
-- | Signature for a palette chunk in the pgn file. Must
-- occure before iDAT.
pLTESignature :: ChunkSignature
pLTESignature = signature "PLTE"
-- | Signature for a data chuck (with image parts in it)
iDATSignature :: ChunkSignature
iDATSignature = signature "IDAT"
-- | Signature for the last chunk of a png image, telling
-- the end.
iENDSignature :: ChunkSignature
iENDSignature = signature "IEND"
----------------------------------------------------------------------------
-- | Compute the CRC of a raw buffer, as described in annex D of the PNG
-- specification.
pngComputeCrc :: [B.ByteString] -> Word32
pngComputeCrc = (0xFFFFFFFF `xor`) . B.foldl' updateCrc 0xFFFFFFFF . B.concat
where updateCrc crc val =
let u32Val = fromIntegral val
lutVal = pngCrcTable U.! fromIntegral ((crc `xor` u32Val) .&. 0xFF)
in lutVal `xor` (crc `unsafeShiftR` 8)
-- | From the Annex D of the png specification.
pngCrcTable :: U.Vector Word32
pngCrcTable = U.fromListN 256 [ foldl' updateCrcConstant c [zero .. 7] | c <- [0 .. 255] ]
where zero = 0 :: Int -- To avoid defaulting to Integer
updateCrcConstant c _ | c .&. 1 /= 0 = magicConstant `xor` (c `unsafeShiftR` 1)
| otherwise = c `unsafeShiftR` 1
magicConstant = 0xedb88320 :: Word32
|
kaizhang/BCMtools
|
src/BCM/Visualize/Internal/Types.hs
|
mit
| 6,793 | 0 | 16 | 1,901 | 1,402 | 752 | 650 | 157 | 1 |
module CFDI.Types.PaymentChain where
import CFDI.Chainable
import CFDI.Types.Type
import Data.Text (Text, pack, unpack)
import Text.Regex (mkRegex)
import Text.Regex.Posix (matchTest)
newtype PaymentChain = PaymentChain Text deriving (Eq, Show)
instance Chainable PaymentChain where
chain (PaymentChain c) = c
instance Type PaymentChain where
parseExpr str
| matchTest regExp str = Right . PaymentChain $ pack str
| otherwise = Left $ DoesNotMatchExpr "[^|]{1,8192}"
where
regExp = mkRegex "^.{1,8192}$"
render (PaymentChain c) = unpack c
|
yusent/cfdis
|
src/CFDI/Types/PaymentChain.hs
|
mit
| 581 | 0 | 9 | 112 | 177 | 94 | 83 | 15 | 0 |
module Hpcb.Component.Switch (
tact_switch
) where
import Hpcb.Data.Action
import Hpcb.Data.Base
import Hpcb.Data.Circuit
import Hpcb.Data.Effects
import Hpcb.Data.Layer
import Hpcb.Data.FpElement
import Hpcb.Functions
import Data.Monoid
-- | Generic tactile switch
tact_switch :: String -- ^ Reference
-> Circuit
tact_switch ref = footprint ref "TACT_SWITCH" $
fpText "reference" ref defaultEffects # translate (V2 0 (-4)) # layer FSilkS
<> fpText "value" "TACT_SWITCH" defaultEffects # translate (V2 0 4) # layer FFab
<> (
fpLine (V2 (-2) (-3)) (V2 2 (-3))
<> fpLine (V2 (-2) 3) (V2 2 3)
<> fpLine (V2 (-3) (-1)) (V2 (-3) 1)
<> fpLine (V2 3 (-1)) (V2 3 1)
) # layer FSilkS # width 0.15
<> fpCircle 3 # layer FFab # width 0.15
<> fpRectangle 8.7 6.7 # layer FCrtYd # width 0.05
<> (
pad 1 ThroughHole{getDrill=1} Circle (V2 1.7272 1.7272) (newNet ref 1) # translate (V2 (-6.5/2) (-4.5/2))
<> pad 1 ThroughHole{getDrill=1} Circle (V2 1.7272 1.7272) (newNet ref 1) # translate (V2 (6.5/2) (-4.5/2))
<> pad 2 ThroughHole{getDrill=1} Circle (V2 1.7272 1.7272) (newNet ref 2) # translate (V2 (-6.5/2) (4.5/2))
<> pad 2 ThroughHole{getDrill=1} Circle (V2 1.7272 1.7272) (newNet ref 2) # translate (V2 (6.5/2) (4.5/2))
) # layers (copperLayers ++ maskLayers)
|
iemxblog/hpcb
|
src/Hpcb/Component/Switch.hs
|
mit
| 1,330 | 0 | 25 | 274 | 651 | 336 | 315 | 29 | 1 |
{-# LANGUAGE RecordWildCards #-}
module TestSuite.Counter
( byteCounterArithmetic
, throughputThresholds
, activationCounterPropagation
, byteCounterPropagation
, patternTimeCounterPropagation
, sleepTimeCounterPropagation
, latencyTimeCounterPropagation
, convert1ByteToThroughput
) where
import Control.Concurrent.STM (STM)
import Control.Monad (forM_)
import Data.Time (NominalDiffTime)
import qualified Data.Vector as Vector
import Generators.Plan ()
import SyntheticWeb.Counter
import SyntheticWeb.Counter.ByteCounter
import SyntheticWeb.Counter.Throughput
import SyntheticWeb.Plan
import SyntheticWeb.Task
import Test.HUnit hiding (assert)
import Test.QuickCheck
import Test.QuickCheck.Monadic
import GHC.Int (Int64)
instance Arbitrary ByteCounter where
arbitrary = ByteCounter <$> choose (0, 2000000)
<*> choose (0, 2000000)
byteCounterArithmetic :: ByteCounter -> ByteCounter -> Bool
byteCounterArithmetic b1 b2 =
let b3 = b1 `addByteCount` b2
in download b3 == download b2 + download b1
&& upload b3 == upload b2 + upload b1
throughputThresholds :: ByteCounter -> Bool
throughputThresholds byteCounter@ByteCounter {..} =
let (dl, ul) = toThroughput byteCounter 1
in dl `matches` (download * 8) && ul `matches` (upload * 8)
where
matches :: Throughput -> Int64 -> Bool
matches (Bps bits) bits' =
-- The throughput shall not be scaled.
bits' < 500 && bits == fromIntegral bits'
matches (Kbps bits) bits' =
-- The throughput shall be scaled down by 1000
bits' < 500000 && bits == fromIntegral bits' * 0.001
matches (Mbps bits) bits' =
-- The throughput shall be scaled down by 1000000
bits' < 500000000 && bits == fromIntegral bits' * 0.000001
matches (Gbps bits) bits' =
-- The throughput shall be scaled down by 1000000000
bits' >= 500000000 && bits == fromIntegral bits' * 0.000000001
activationCounterPropagation :: Plan -> Property
activationCounterPropagation plan@(Plan plan') =
monadicIO $ do
(counters, tasks) <- run (mkTaskSet plan)
run $ Vector.mapM_ (atomically . activatePattern . counterFrom) tasks
FrozenSet (_, g, ps) <- run (freeze counters)
-- Check that the total activations is equal to the number of
-- tasks.
assert $ fromIntegral (Vector.length tasks) == totalActivations g
-- Check that the activations for each pattern is equal to the
-- pattern's weight.
forM_ plan' $ \(Weight w, p) -> do
let [patternCounter] = filter (\c -> patternName c == name p) ps
assert $ fromIntegral w == activations patternCounter
byteCounterPropagation :: Plan -> Property
byteCounterPropagation plan@(Plan plan') =
monadicIO $ do
(counters, tasks) <- run (mkTaskSet plan)
let bytes = ByteCounter { download = 1, upload = 2 }
run $ Vector.mapM_ (atomically . updateByteCount bytes . counterFrom) tasks
FrozenSet (_, g, ps) <- run (freeze counters)
-- Check that the total download bytes is equal to the number of
-- tasks, and that the upload bytes are twice as big.
let numTasks = fromIntegral $ Vector.length tasks
assert $ numTasks == download (totalByteCount g)
assert $ 2 * numTasks == upload (totalByteCount g)
-- Check that the counters for each pattern has the same
-- proportions.
forM_ plan' $ \(Weight w, p) -> do
let [patternCounter] = filter (\c -> patternName c == name p) ps
w' = fromIntegral w
assert $ w' == download (byteCount patternCounter)
assert $ 2 * w' == upload (byteCount patternCounter)
patternTimeCounterPropagation :: Plan -> Property
patternTimeCounterPropagation =
timeCounterPropagation updatePatternTime totalPatternTime patternTime
sleepTimeCounterPropagation :: Plan -> Property
sleepTimeCounterPropagation =
timeCounterPropagation updateSleepTime totalSleepTime sleepTime
latencyTimeCounterPropagation :: Plan -> Property
latencyTimeCounterPropagation =
timeCounterPropagation updateLatencyTime totalLatencyTime latencyTime
timeCounterPropagation :: (NominalDiffTime -> CounterPair -> STM ())
-> (GlobalCounter -> NominalDiffTime)
-> (PatternCounter -> NominalDiffTime)
-> Plan
-> Property
timeCounterPropagation setter ggetter pgetter plan@(Plan plan') =
monadicIO $ do
(counters, tasks) <- run (mkTaskSet plan)
let delta = toEnum 1
run $ Vector.mapM_ (atomically . setter delta . counterFrom) tasks
FrozenSet (_, g, ps) <- run (freeze counters)
-- Check that the total time is equal to the "time" proportial to
-- the number of tasks.
let totTime = toEnum $ Vector.length tasks
assert $ totTime == ggetter g
-- Check that the time for each pattern are the same as the
-- weight.
forM_ plan' $ \(Weight w, p) -> do
let [patternCounter] = filter (\c -> patternName c == name p) ps
delta' = toEnum w
assert $ delta' == pgetter patternCounter
convert1ByteToThroughput :: Assertion
convert1ByteToThroughput = do
let counter1 = ByteCounter 1 0
counter2 = ByteCounter 0 1
counter3 = ByteCounter 1 1
-- For one second duration.
assertEqual "Shall be equal" (Bps 8, Bps 0) $ toThroughput counter1 1.0
assertEqual "Shall be equal" (Bps 0, Bps 8) $ toThroughput counter2 1.0
assertEqual "Shall be equal" (Bps 8, Bps 8) $ toThroughput counter3 1.0
-- For two seconds duration.
assertEqual "Shall be equal" (Bps 4, Bps 0) $ toThroughput counter1 2.0
assertEqual "Shall be equal" (Bps 0, Bps 4) $ toThroughput counter2 2.0
assertEqual "Shall be equal" (Bps 4, Bps 4) $ toThroughput counter3 2.0
|
kosmoskatten/synthetic-web
|
test/TestSuite/Counter.hs
|
mit
| 5,830 | 0 | 19 | 1,359 | 1,579 | 800 | 779 | 107 | 4 |
-- Based on http://decipheringmusictheory.com/?page_id=46
module Other.Counterpoint where
import Mezzo
v1 = d qn :|: g qn :|: fs qn :|: g en :|: a en :|: bf qn :|: a qn :|: g hn
v2 = d qn :|: ef qn :|: d qn :|: bf_ en :|: a_ en :|: g_ qn :|: fs_ qn :|: g_ hn
sco = score setKeySig g_min
setRuleSet strict
withMusic (v1 :-: v2)
main = renderScore "rendered/Counterpoint.mid"
"2-voice contrapuntal composition"
sco
|
DimaSamoz/mezzo
|
examples/src/Other/Counterpoint.hs
|
mit
| 482 | 0 | 12 | 146 | 161 | 76 | 85 | 10 | 1 |
{%-
set imported_module_names =
(["API", "Types"] | map("child_module_name") | list) +
["Data.Time.Calendar", "Servant"]
-%}
{{hs_copyright}}
module {{module_name}}App.App (app) where
{% for m in imported_module_names | sort -%}
import {{m}}
{% endfor %}
isaac :: User
isaac = User "Isaac Newton" 372 "[email protected]" (fromGregorian 1683 3 1)
albert :: User
albert = User "Albert Einstein" 136 "[email protected]" (fromGregorian 1905 12 1)
discombobulator :: Widget
discombobulator = Widget "Discombobulator" "Discombobulates according to system settings"
frobber :: Widget
frobber = Widget "Frobber" "Frobs on demand"
apiServer :: Server API
apiServer = return [isaac, albert]
:<|> return albert
:<|> return isaac
:<|> return [discombobulator, frobber]
apiProxy :: Proxy API
apiProxy = Proxy
app :: Application
app = serve apiProxy apiServer
|
rcook/ptool-templates
|
elm-haskell/app_App.hs
|
mit
| 893 | 29 | 9 | 169 | 274 | 154 | 120 | -1 | -1 |
module TypeKwonDo where
f :: Int -> String
f = undefined
g :: String -> Char
g = undefined
h :: Int -> Char
h i = g(f(i))
data A
data B
data C
q :: A -> B
q = undefined
w :: B -> C
w = undefined
m :: A -> C
m a = w(q(a))
munge :: (x -> y) -> (y -> (w, z)) -> x -> w
munge f f1 v = fst $ f1 $ f v
|
raventid/coursera_learning
|
haskell/chpater5/type_kwon_do.hs
|
mit
| 306 | 0 | 9 | 94 | 187 | 103 | 84 | -1 | -1 |
module BinaryTree where
data BinaryTree a =
Leaf
| Node (BinaryTree a) a (BinaryTree a)
deriving (Eq, Ord, Show)
unfold :: (a -> Maybe (a,b,a)) -> a -> BinaryTree b
unfold f b = case f b of
Just (a,b,c) -> Node (unfold f a) b (unfold f c)
Nothing -> Leaf
-- TODO: it would be very nice to add funny pretty printing like
-- 0
-- / \
-- 1 1
-- / \ / \
-- 2 2 2 2
-- / \ / \ / \ / \
-- 3 3 3 3 3 3 3 3
-- But it seems to be a bit harder for deep trees. I'm not sure I know how to visualize deep tree this way :(
treeBuild :: Integer -> BinaryTree Integer
treeBuild = unfold (\i -> if i == 0 then Nothing else Just(i-1, i, i-1))
|
raventid/coursera_learning
|
haskell/chapter12/binary_tree.hs
|
mit
| 710 | 0 | 11 | 232 | 216 | 120 | 96 | 11 | 2 |
module Document.Tests.UnlessExcept where
-- Modules
import Document.Tests.Suite
import Test.UnitTest
test_case :: TestCase
test_case = test
test :: TestCase
test = test_cases
"Unless / except clause"
[ (poCase "test 0, unless/except without indices"
(verify path0 0) result0)
, (poCase "test 1, unless/except with indices and free variables"
(verify path0 1) result1)
]
path0 :: FilePath
path0 = [path|Tests/unless-except.tex|]
result0 :: String
result0 = unlines
[ " o m0/evt0/FIS/p@prime"
, " xxx m0/evt0/SAF/saf1"
, " o m0/evt1/FIS/p@prime"
, " o m0/evt1/SAF/saf1"
, "passed 3 / 4"
]
result1 :: String
result1 = unlines
[ " o m1/evt0/FIS/f@prime"
, " xxx m1/evt0/SAF/saf1"
, " xxx m1/evt0/WD/ACT/m0:act0"
, " o m1/evt1/FIS/f@prime"
, " o m1/evt1/SAF/saf1"
, " xxx m1/evt1/WD/ACT/m0:act0"
, " xxx m1/saf1/SAF/WD/lhs"
, "passed 3 / 7"
]
|
literate-unitb/literate-unitb
|
src/Document/Tests/UnlessExcept.hs
|
mit
| 1,003 | 0 | 10 | 290 | 165 | 99 | 66 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
module Pyrec.TypeCheckSpec where
import Prelude hiding (map, mapM)
import Control.Applicative
import Control.Monad hiding (mapM, forM)
import Control.Monad.Writer hiding (mapM, forM, sequence)
import Control.Monad.RWS hiding (mapM, forM, sequence)
import qualified Data.Map as M
import Data.Map (Map)
import Data.Word
import Data.Traversable hiding (for, sequence)
import Text.Parsec.Error
import Control.Monad (mzero)
import System.FilePath hiding ((<.>))
import System.Directory (getDirectoryContents)
import System.IO.Unsafe as Sin
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck hiding ((.&.))
import Pyrec.Misc
import Pyrec.Error
import Pyrec.PrettyPrint
import Pyrec
import qualified Pyrec.AST as A
import Pyrec.IR
import qualified Pyrec.IR.Desugar as D
import Pyrec.IR.Check as C
import qualified Pyrec.IR.Core as R
import qualified Pyrec.Parse as P
import qualified Pyrec.Desugar as D
import Pyrec.ScopeCheck as S
import Pyrec.TypeCheck as T
import qualified Pyrec.Report as R
strip (D.E l t e) = D.E l t $ strip <$> e
strip (D.Constraint _ _ e) = strip e
pd :: String -> Either ParseError (D.Expr, Word, [D.ErrorMessage])
pd = (\m -> runRWS m () 0) <.> parseDesugar
testInfer :: T.Env -> String -> Either ParseError (Bool, R.Expr, R.Expr,
[R.ErrorMessage],
[R.ErrorMessage],
[R.ErrorMessage])
testInfer env src = for (pd src) $ \case
(e, errors) -> (e1' == e2', e1', e2', errors', e1r, e2r)
where errors' = R.Earlier <$$> errors
(e1', e1r) = runWriter $ checkReport env e
(e2', e2r) = runWriter $ checkReport env $ strip e
noErrors, fillInConstraints ::
Either ParseError (Bool, R.Expr, R.Expr,
[R.ErrorMessage],
[R.ErrorMessage],
[R.ErrorMessage]) -> Bool
noErrors = \case
(Right (_, _, _, [], [], [])) -> True
_ -> False
fillInConstraints = \case
(Right (b, _, _, [], [], [])) -> b
_ -> False
spec :: Spec
spec = unifySpec >> tcSpec
unifySpec = describe "the type-unifier" $ do
it "combines identical types without modification" $
property $ within 5000000 $ \(ty :: C.Type) -> unify M.empty ty ty == ty
tcSpec = describe "the type checker" $ do
it "type checks natural number literals" $
property $ \(num :: Word) ->
fillInConstraints $ testInfer env $ "(" ++ show num ++ " :: Number)"
it "type checks decimal literals" $
property $ \(n1 :: Word) (n2 :: Word) ->
fillInConstraints $ testInfer env
$ "(" ++ show n1 ++ "." ++ show n2 ++ " :: Number)"
it "type checks string literals" $
property $ \(num :: String) -> fillInConstraints $ testInfer env $ "(" ++ show num ++ " :: String)"
it "accepts the identity function" $
testInfer env "fun<A>(x :: A): x;" `shouldSatisfy` noErrors
it "accepts a concrete \"infinite loop\"" $
testInfer env "fun bob(x :: String) -> A: bob(x);" `shouldSatisfy` noErrors
it "accepts a polymorphic \"infinite loop\"" $
testInfer env "fun bob<A>(x :: A) -> A: bob<A>(x);" `shouldSatisfy` noErrors
testFiles "tests/error" "catches bad program"
(testInfer env) (not . noErrors)
testFiles "tests/fill-in" "fills in the removed constraints"
(testInfer env) fillInConstraints
-- hopefully an upstream change will pave the way for my atonement
testFiles :: Show a => FilePath -> String -> (String -> a) -> (a -> Bool) -> Spec
testFiles directory msg function predicate = forM_ files $ \fileName ->
it (msg ++ ": " ++ fileName) $ do
contents <- readFile $ directory </> fileName
function contents `shouldSatisfy` predicate
where files :: [FilePath]
files = filter (/= ".") $ filter (/= "..")
$ Sin.unsafePerformIO $ getDirectoryContents directory
|
kmicklas/pyrec
|
src/Pyrec/TypeCheckSpec.hs
|
mit
| 4,392 | 0 | 16 | 1,390 | 1,239 | 690 | 549 | -1 | -1 |
module Main where
import Game
import World
import Rooms
import Actions
import Direction
import Movement
import Player
import Item.Item
import Control.Monad.Writer
import Control.Monad.State
import Data.Map
gameWorld = World
(createRooms [
RoomInfo R1 (RoomName "Kitchen") (RoomDesc "It is a small room with white walls. There is the faint odour of something rotten."),
RoomInfo R2 (RoomName "Dining Room") (RoomDesc "The table has been prepared for a banquet of some kind."),
RoomInfo R3 (RoomName "Living Room") (RoomDesc "The couch is in the centre of the room."),
RoomInfo R4 (RoomName "Ballroom") (RoomDesc "The room looks abandoned. There must not have been many balls for quite some time."),
RoomInfo R5 (RoomName "Rumpus Room") (RoomDesc "No-one could have any fun here."),
RoomInfo R6 (RoomName "Foyer") (RoomDesc "There are portraits all over the walls."),
RoomInfo R7 (RoomName "Greenhouse") (RoomDesc "The plants seem to be dying."),
RoomInfo R8 (RoomName "Library") (RoomDesc "It would take a lifetime to read all of these books."),
RoomInfo R9 (RoomName "Study") (RoomDesc "The room is very quiet.")
])
(createItems
[
ItemInfo ItemKey (ItemName "Key") (Just R1) (ItemDesc "The key is oddly-shaped and blue."),
ItemInfo ItemCrowbar (ItemName "Crowbar") Nothing (ItemDesc "The crowbar is lean and silver.")
]
)
(Data.Map.fromList
[
-- ((R1, South), R2),
((R2, North), R1),
((R2, East), R5),
((R2, South), R3),
((R3, East), R6),
((R3, North), R2),
((R4, South), R5),
((R5, North), R4),
((R5, West), R2),
((R6, West), R3),
((R6, East), R9),
((R7, South), R8),
((R8, North), R7),
((R8, South), R9),
((R9, North), R8),
((R9, West), R6)
])
runGame :: TurnsLeft -> IO ()
runGame turns = do
((result, log), (_, _)) <- (runStateT $ runWriterT (playGame turns)) (gameWorld, PlayerState R1 [])
let actions = lines log
counter = take (length actions) [1..]
steps = zipWith (\n msg -> (show n) ++ ". " ++ msg) counter actions
putStrLn $ "\nComplete Story: \n" ++ (unlines steps)
return ()
main :: IO ()
main = runGame 50
|
disquieting-silence/adventure
|
src/Main.hs
|
mit
| 2,171 | 26 | 16 | 465 | 773 | 434 | 339 | 54 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveGeneric #-}
module Vong where
import Control.Monad (foldM)
import Data.Csv (FromRecord)
import Data.List (isPrefixOf)
import GHC.Generics (Generic)
import System.Random (randomRIO)
type Probability = Float
data Replacement = Replacement
{ search :: String
, replace :: String
, probability :: Probability
} deriving (Generic)
instance FromRecord Replacement
conditioned :: Probability -> IO Bool
conditioned p = (1 - p <=) <$> randomRIO (0, 1)
replaceConditioned :: String -> Replacement -> IO String
replaceConditioned [] _ = return []
replaceConditioned text@(x:xs) r@Replacement {..} =
if isPrefixOf search text
then do
should <- conditioned probability
if should
then (replace ++) <$>
replaceConditioned (drop (length search) text) r
else (x :) <$> replaceConditioned xs r
else (x :) <$> replaceConditioned xs r
translate :: [Replacement] -> String -> IO String
translate = flip (foldM replaceConditioned)
|
kmein/1generator
|
src/Vong.hs
|
mit
| 1,082 | 0 | 14 | 257 | 326 | 179 | 147 | 30 | 3 |
{-# LANGUAGE FlexibleContexts #-}
module TwelfInterface where
import Control.Monad
import Control.Monad.Catch
import Control.Monad.Except
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Typeable
import qualified Data.Map as M
import System.IO
import System.IO.Temp
import qualified Language.Twelf.AST as AST
import Language.Twelf.IntSyn
import Language.Twelf.Parser
import Language.Twelf.Reconstruct
import Language.Twelf.TwelfServer
checkDecls :: String -> String -> String -> IO (Either String String)
checkDecls twelfServer fitchPath declString = withTwelfServer twelfServer False $ do
(fitchResp, loadSucceeded) <- runTwelfCmd' $ "loadFile " ++ fitchPath
if not loadSucceeded then
return . Left . concat $ [
"An error occurred while loading Fitch definition:\n",
fitchResp,
"\n"]
else do
(resp, declSucceeded) <- runTwelfCmd' $ "readDecl\n" ++ declString
return $ if declSucceeded then Right resp else Left resp
extractDecl :: String -> String -> String -> IO (ConstName, A, M)
extractDecl twelfServer fitchPath declString = do
fitchSigText <- T.readFile fitchPath
let Right (ds, ps) = parseSig initParserState fitchPath fitchSigText
case parseDecl ps "<proof declaration>" (T.pack declString) of
Left err -> error $ show err
Right (d, _) -> withTwelfServer twelfServer False $ do
_ <- reconstruct ds
[(_, DDefn n a m)] <- M.toList <$> extract [d]
return (n, a, m)
isDefn :: AST.Decl -> Bool
isDefn (AST.DDefn _ _ _ _) = True
isDefn _ = True
test :: (MonadIO m, MonadMask m) => TwelfMonadT m (Either String String)
test = runExceptT $ do
lift $ runTwelfCmd "help"
defnName :: Decl -> String
defnName (DDefn n _ _) = n
defnName _ = error "Not a definition"
convertDefn :: AST.Decl -> AST.Decl -> (String, A, M)
convertDefn (AST.DDefn _ n ta tm) (AST.DDefn _ _ ta' tm')
= (n
,inferImplicitA ta $ toType M.empty ta'
,inferImplicitM tm $ toTerm M.empty tm')
convertDefn _ _ = error "Not a definition"
data CheckException = CheckException String
deriving (Show, Typeable)
instance Exception CheckException
checkException :: (MonadThrow m) => String -> m a
checkException = throwM . CheckException
getFullDefn :: (MonadThrow m, MonadIO m)
=> AST.Decl -> TwelfMonadT m (String, A, M)
getFullDefn astDecl@(AST.DDefn _ n _ _) = do
_ <- runTwelfCmd "set Print.implicit true"
fullDeclStr <- runTwelfCmd $ "decl " ++ n ++ "\n"
let fullDeclResult = parseDecl initParserState "<twelf process>" (T.pack fullDeclStr)
astFullDecl <- either (checkException . show) (return . fst) fullDeclResult
return $ convertDefn astDecl astFullDecl
getFullDefn _ = error "Not a definition"
check' :: (MonadMask m, MonadIO m) => String -> String -> TwelfMonadT m (String, [(String, A, M)])
check' fitchPath declString = do
(fitchResp, fitchSucceeded) <- runTwelfCmd' $ "loadFile " ++ fitchPath
when (not fitchSucceeded) $ checkException $ concat $
[ "An error occurred while loading Fitch definition:\n"
, fitchResp
, "\n" ]
-- Twelf succeeded. Assume that our own parser can parse the Fitch signature.
-- We need the parser state to correctly parse with fixity information.
fitchSig <- liftIO $ T.readFile fitchPath
fitchParserState <- either (checkException . show) (\(_, ps) -> return ps)
$ parseSig initParserState "Fitch signature" fitchSig
-- Parse the user-provided signature. For now, we only accept term definitions.
let sigResult = parseSig fitchParserState "<BoxProver input>" (T.pack declString)
astDefs <- either
(checkException . show)
(\(ds,_) -> if any (not . isDefn) ds then
checkException "Signature may only contain definitions."
else
return ds)
sigResult
-- Load the file into Twelf and reconstruct all types and implicit parameters.
withSystemTempFile "twelf-input" $ \tmpPath h -> do
liftIO $ (hPutStr h declString >> hFlush h)
(declResp, declSucceeded) <- runTwelfCmd' $ "loadFile " ++ tmpPath
when (not declSucceeded) $ checkException declResp
defns <- forM astDefs getFullDefn
return (declResp, defns)
check :: (MonadMask m, MonadIO m)
=> String -> String -> String -> m (Either String (String, [(String, A, M)]))
check twelfServer fitchPath declString =
catch
(withTwelfServer twelfServer False $ do
res <- check' fitchPath declString
return . Right $ res)
(\(CheckException err) -> return . Left $ err)
|
ulrikrasmussen/BoxProver
|
src/TwelfInterface.hs
|
mit
| 4,782 | 0 | 16 | 1,179 | 1,420 | 735 | 685 | 97 | 3 |
module Main where
import Haste
main = withElem "message"
$ \el -> do
hash <- getHash
let msg = if null hash then "hello"
else hash
setProp el "value" msg
onHashChange
$ \_ msg ->
setProp el "value" msg
|
laser/haste-experiment
|
demos/routing/routing.hs
|
mit
| 251 | 0 | 13 | 88 | 82 | 41 | 41 | 11 | 2 |
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Internals related to reading and writing an injected executable
-- hash.
module System.Executable.Hash.Internal where
import Control.Exception (SomeException, handle)
import "cryptohash" Crypto.Hash.SHA1 (hash)
import qualified Data.ByteString as BS
import Data.FileEmbed (dummySpaceWith, injectWith)
import Language.Haskell.TH (Q, Exp)
import System.Directory (doesFileExist)
-- | This generates an expression which yields the injected SHA1 hash.
--
-- The generated expression yields a 'Just' value when the injected
-- SHA1 hash is present in the executable. This hash is usually
-- injected due to a usage of 'injectExecutableHash' /
-- 'maybeInjectExecutableHash'.
injectedExecutableHash :: Q Exp
injectedExecutableHash =
[|
let bs = $(dummySpaceWith "executable-hash" 20)
in if BS.all (== toEnum (fromEnum '0')) bs
then Nothing
else Just bs
|]
-- | Given the path to an executable, computes its hash and injects it
-- into the binary, such that when that program demands the value of
-- 'injectedExecutableHash', it yields a 'Just' value.
--
-- See the documentation in "System.Executable.Hash" for an example of
-- how to use this with a cabal @postBuild@ hook
injectExecutableHash :: FilePath -> IO ()
injectExecutableHash fp = handle addPathToException $ do
binary <- BS.readFile fp
let sha1 = hash binary
case injectWith "executable-hash" sha1 binary of
Nothing -> fail "Impossible: dummy space too small for executable-hash."
Just binary' -> do
BS.writeFile fp binary'
putStrLn $ "Successfully wrote " ++ fp ++ " with injected hash."
where
addPathToException ex = fail $
"While injecting hash into " ++ fp ++
", the following exception occurred: " ++ show (ex :: SomeException)
-- | Injects an executable hash into the specified binary. If it
-- doesn't exist, then this prints a message to stdout indicating that
-- it failed to inject the hash.
maybeInjectExecutableHash :: FilePath -> IO ()
maybeInjectExecutableHash fp = do
exists <- doesFileExist fp
if exists
then injectExecutableHash fp
else putStrLn $ concat
[ "Not injecting executable hash into "
, fp
, ", as it doesn't exist."
]
|
fpco/executable-hash
|
System/Executable/Hash/Internal.hs
|
mit
| 2,440 | 0 | 15 | 566 | 321 | 179 | 142 | 38 | 2 |
module Stratosphere.Helpers
( maybeField
, prefixNamer
, prefixFieldRules
, modTemplateJSONField
, NamedItem (..)
, namedItemToJSON
) where
import Control.Lens (set)
import Control.Lens.TH
import Data.Aeson
import Data.Char (isUpper, toLower)
import Data.List (stripPrefix)
import Data.Maybe (maybeToList)
import qualified Data.Text as T
import Language.Haskell.TH
-- | Might create an aeson pair from a Maybe value.
maybeField :: ToJSON a => T.Text -> Maybe a -> Maybe (T.Text, Value)
maybeField field = fmap ((field .=) . toJSON)
-- | Similar to `camelCaseNamer`, except we specify the prefix exactly. We use
-- this because camelCaseNamer is terrible with names that start in all caps,
-- like EC2. We would like to start the field names with "ec2...", but
-- camelCaseNamer wants "eC2...".
prefixNamer :: String -> Name -> [Name] -> Name -> [DefName]
prefixNamer prefix _ _ field = maybeToList $
do
fieldPart <- stripPrefix prefix (nameBase field)
method <- computeMethod fieldPart
let cls = "Has" ++ fieldPart
return (MethodName (mkName cls) (mkName method))
where computeMethod (x:xs) | isUpper x = Just (toLower x : xs)
computeMethod _ = Nothing
-- | See `prefixNamer`
prefixFieldRules :: String -> LensRules
prefixFieldRules prefix = set lensField (prefixNamer prefix) defaultFieldRules
-- | Used for the JSON instances in Template. It is put here because it must be
-- in a separate module.
modTemplateJSONField :: String -> String
modTemplateJSONField "_templateFormatVersion" = "AWSTemplateFormatVersion"
modTemplateJSONField s = drop 9 s
-- | This class defines items with names in them. It is used to extract the
-- name from JSON fields so we can get an Object with the names as keys instead
-- of just an array.
class NamedItem a where
itemName :: a -> T.Text
nameToJSON :: a -> Value
namedItemToJSON :: (NamedItem a) => [a] -> Value
namedItemToJSON xs =
object $ fmap (\x -> itemName x .= nameToJSON x) xs
|
frontrowed/stratosphere
|
library/Stratosphere/Helpers.hs
|
mit
| 2,039 | 0 | 12 | 420 | 471 | 254 | 217 | 37 | 2 |
-- In diesem Modul werden eine Reihe von \begriff{worker}-Funktionen definiert die alle einen übergebenen Wert verändern und die geänderte
-- Variante zurückliefern.
module System.ArrowVHDL.Circuit.Workers
where
-- Zur Funktionsdefinition werden Funktionen aus folgenden Modulen benötigt.
import Data.List (nub, (\\))
import GHC.Exts (sortWith)
import System.ArrowVHDL.Circuit.Descriptor
import System.ArrowVHDL.Circuit.Sensors
import System.ArrowVHDL.Circuit.Tests
-- \subsection{CircuitDescriptor Funktionen}
-- Dieser Abschnitt befasst sich mit Funktionen, die auf \hsSource{CircuitDescriptor}en arbeiten.
-- Mit der Funktion \hsSource{alterCompIDs} lassen sich alle Komponenten IDs innerhalb eines \hsSource{CircuitDescriptor}s verändern. Der erste
-- Parameter legt dabei die kleinst mögliche ID fest.
alterCompIDs :: Int -> CircuitDescriptor -> CircuitDescriptor
alterCompIDs i sg
= sg { nodeDesc = nd { nodeId = nodeId nd + i }
, nodes = map (alterCompIDs i) $ nodes sg
, edges = map (\ (MkEdge (ci,pi) (co,po))
-> (MkEdge (maybe ci (Just.(+i)) $ ci ,pi)
(maybe co (Just.(+i)) $ co ,po))
) $ edges sg
}
where nd = nodeDesc sg
-- Die Funktion \hsSource{dropCircuit} ermöglicht es, einzelne Schaltkreis-Beschreibungen aus dem \hsSource{CircuitDescriptor} zu entfernen.
-- Hierzu wird eine match-Funktion als erster Parameter erwartet.
dropCircuit :: (CircuitDescriptor -> Bool) -> CircuitDescriptor -> CircuitDescriptor
dropCircuit f sg
= sg { nodes = newNodes
, edges = newEdges
}
where specific = filter f (nodes sg)
newEdges = foldl (flip dropEdgesBordering) (edges sg) (map (nodeId.nodeDesc) specific)
newNodes = map (dropCircuit f) $ nodes sg \\ specific
-- flatten} ist eine Funktion, welche die interne Struktur des \hsSource{CircuitDescriptor}s \begriff{glättet}. Jeder
-- CircuitDescriptor} der nicht Atomar ist, enthält weitere Unterstrukturen. Diese beschreiben, woraus dieser
-- CircuitDescriptor} aufgebaut wird. Enthält der \hsSource{CircuitDescriptor} unnötige Verschachtelungen, werden diese mittels
-- flatten} entfernt.
-- Als Sonderfall gelten die Schaltungen, die Schleifen darstellen. Hier gibt es keine überflüssigen Verschachtelungen, mindestens aber muss
-- der Algorithmus zum erkennen solcher ein anderer sein, so dass \hsSource{flatten} auf diese Teilbereiche zunächst nicht angewandt werden
-- sollte.
flatten :: CircuitDescriptor -> CircuitDescriptor
flatten g
| isLoop g
= g
| otherwise
= g { nodes = nub $ atomCIDs
, edges = esBetweenAtoms
}
where atomCIDs = filter isAtomic $ allCircuits g
esFromAtoms = concat $ map (fromCompEdges g . nodeId . nodeDesc) atomCIDs
esFromOuter = filter isFromOuter $ edges g
esBetweenAtoms = zipWith MkEdge (map sourceInfo $ esFromOuter ++ esFromAtoms) (map nextAtomOrOut $ esFromOuter ++ esFromAtoms)
nextAtomOrOut = (\e -> let (c, p) = nextAtomic g e
in if c == mainID then (Nothing, p) else (Just c, p))
mainID = nodeId.nodeDesc $ g
-- Die Funktionen \hsSource{dropGenerated} sowie \hsSource{dropID} stellen Spezialfälle der \hsSource{dropCircuit} Funktion dar.
-- dropGenerated} löscht sämtliche \hsSource{CircuitDescriptor}en, die automatisch generiert wurden. Ebenso löscht \hsSource{dropID}
-- CircuitDescriptor}en, die den \hsSource{isID}-Test bestehen. \hsSource{isID} sowie \hsSource{isGenerated} sind im Modul
-- \ref{mod:Circuit.Tests} beschrieben.
dropGenerated :: CircuitDescriptor -> CircuitDescriptor
dropGenerated = dropCircuit isGenerated
dropID :: CircuitDescriptor -> CircuitDescriptor
dropID = dropCircuit isID
-- Diese Funktionen arbeiten auf \hsSource{CircuitDescriptor}en, und erzeugen Kanten, oder es handelt sich um Funktionen, die aus bestehenden
-- Kanten neue generieren.
-- Mit der Funktion \hsSource{connectCID} lassen sich zwei \hsSource{CircuitDescriptor}en miteinander verbinden. Dabei werden zwei
-- \hsSource{CircuitDescriptor}en übergeben, sowie die Quell-Komponenten ID und die Ziel-Komponenten ID zusammen mit einer Ziel-Pin ID. Erzeugt
-- wird eine Kante, welche die Verbindung zwischen beiden \hsSource{CircuitDescriptor}en darstellt. Von der Quelle wird keine \hsSource{PinID}
-- benötigt, da hier auf den nächst freien Pin zurückgegriffen wird. Auf der Ziel-Seite ist es dann aber notwendig, einen Ziel-Pin zu
-- definieren.
connectCID :: CircuitDescriptor -> CircuitDescriptor -> CompID -> (CompID, PinID) -> Edge
connectCID old_g g cidF (cidT,pidT)
= MkEdge (Just cidF, nextFpin) (Just cidT, pidT)
where nextFpin = head $ drop cntEsFrom $ sources.nodeDesc $ getComp old_g cidF
cntEsFrom = length $ filter (\x -> (not.isFromOuter $ x) && (srcComp x == cidF)) $ edges g
-- Zum entfernen von Kanten die an eine Komponente angrenzen, ist die Funktion \hsSource{dropEdgesBordering} da. Übergeben wird die ID der
-- Komponente, die heraus gelöst werden soll, sowie die Liste mit den betroffenen Kanten. Es wird dann eine neue List mit Kanten erstellt, die
-- nicht mehr zu der Komponente mit der besagten ID führen. Alle Kanten die nicht mehr an einer Komponente andocken, werden zusammengefügt.
-- Diese Funktion kann nur dann funktionieren, wenn die zu lösende Komponente genausoviele eingehende Pins, wie ausgehende Pins besitzt.
-- %%% TODO : Components with different InPinCount and OutPinCount have a PROBLEM
dropEdgesBordering :: CompID -> [Edge] -> [Edge]
dropEdgesBordering cid es
= (es ++ mergeEdges (toIt, fromIt)) \\ (toIt ++ fromIt)
where toIt = filter ((== (Just cid)).fst.sinkInfo) $ es
fromIt = filter ((== (Just cid)).fst.sourceInfo) $ es
-- \hsSource{mergeEdges} ist eine Funktion, die zwei Listen mit Kanten entgegennimmt und diese beiden zusammenfasst. Kanten die auf einen Pin
-- enden und Kanten die vom gleichen Pin starten, werden zu einer Kante zusammengefasst.
mergeEdges :: ([Edge], [Edge]) -> [Edge]
mergeEdges (xs, ys)
= zipWith (\x y -> MkEdge (sourceInfo x) (sinkInfo y)) xs' ys'
where x_snkPins = map snkPin xs
y_srcPins = map srcPin ys
xs' = sortWith snkPin $ filter (\edg -> (snkPin edg) `elem` y_srcPins) xs
ys' = sortWith srcPin $ filter (\edg -> (srcPin edg) `elem` x_snkPins) ys
-- Mit der \hsSource{fillEdgeInfoCompID} Funktion, lassen sich die Quell- und Ziel-Komponenten IDs in Kanten setzen, in denen bis dahin
-- \hsSource{Nothing} als Wert gespeichert ist. Dies ist dann notwendig, wenn eine neue Komponente in eine bestehende Struktur eingefügt wird.
-- Dies wird dann benötigt, wenn eine Komponente in eine Struktur eingefügt werden soll. Eine noch nicht integrierte Komponente bekommt ihre %% TODO : Werte ist sicher nicht das richtige wort hier
-- Werte von einer unbekannte Komponente (\hsSource{Nothing}) und liefert die Ergebnisse auch an \hsSource{Nothing}. Wird sie nun eine
-- Unterkomponente, so kann das \hsSource{Nothing} durch eine tatsächliche Komponenten ID ersetzt werden.
fillEdgeInfoCompID :: CompID -> Edge -> Edge
fillEdgeInfoCompID cid (MkEdge (Nothing, srcPid) (snkInfo)) = (MkEdge (Just cid, srcPid) (snkInfo))
fillEdgeInfoCompID cid (MkEdge (srcInfo) (Nothing, snkPid)) = (MkEdge (srcInfo) (Just cid, snkPid))
fillEdgeInfoCompID _ e = e
-- Ein ähnliches Problem wie \hsSource{fillEdgeInfoCompID} wird auch von den Funktionen \hsSource{fillSrcInfoCompID} und
-- \hsSource{fillSnkInfoCompID} gelöst. Diese unterscheiden sich lediglich darin, dass diese Funktionen jeweils nur die Quell-Pins oder nur die
-- Ziel-Pins betreffen.
fillSrcInfoCompID :: CompID -> Edge -> Edge
fillSrcInfoCompID cid (MkEdge (Nothing, srcPid) (snkCid, snkPid))
= (MkEdge (Just cid, srcPid) (snkCid, snkPid))
fillSnkInfoCompID :: CompID -> Edge -> Edge
fillSnkInfoCompID cid (MkEdge (srcCid, srcPid) (Nothing, snkPid))
= (MkEdge (srcCid, srcPid) (Just cid, snkPid))
|
frosch03/arrowVHDL
|
src/System/ArrowVHDL/Circuit/Workers.hs
|
cc0-1.0
| 8,120 | 2 | 17 | 1,518 | 1,341 | 748 | 593 | 67 | 2 |
testFn x
| x == 0 = 0
| x <= 100 = x + (testFn (x-1))
| otherwise = error "too large"
repeatString str n =
if n == 0
then ""
else str ++ (repeatString str (n-1))
removeOdd [] = []
removeOdd (x : xs)
| mod x 2 == 0 = x : (removeOdd xs)
| otherwise = removeOdd xs
numEven nums =
let evenNums = removeOdd nums
in length evenNums
numEven' nums =
length evenNums
where evenNums = removeOdd nums
quicksort :: (Ord a) => [a] -> [a]
quicksort [] = []
quicksort (x:xs) =
let smallerOrEqual = filter (<= x) xs
larger = filter (> x) xs
in quicksort smallerOrEqual ++ [x] ++ quicksort larger
zipWith' :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith' _ [] _ = []
zipWith' _ _ [] = []
zipWith' f (x:xs) (y:ys) = f x y : zipWith' f xs ys
|
cbellone/haskell-playground
|
src/test.hs
|
gpl-3.0
| 802 | 2 | 10 | 239 | 427 | 214 | 213 | 28 | 2 |
module HobTest.Control (
runCtxActions
) where
import Hob.Context
import Hob.Control (flushEvents)
runCtxActions :: Context -> App() -> IO()
runCtxActions ctx actions = do
deferredRunner ctx actions
flushEvents
|
svalaskevicius/hob
|
test/HobTest/Control.hs
|
gpl-3.0
| 225 | 0 | 8 | 41 | 70 | 36 | 34 | 8 | 1 |
-- mark each node of a binary tree with
-- (x,y) coordinates
-- x -> inorder place
-- y -> depth
data Tree a = Empty | Branch a (Tree a) (Tree a) deriving Show
countN Empty = 0
countN (Branch _ l r) = 1 + countN l + countN r
p65 :: Tree a -> Tree (a,(Int,Int))
p65 t = p65' t 1 where
p65' Empty _ = Empty
p65' (Branch n l r) h = Branch (n,(countN l + 1,h))
(p65' l (h+1))
(p65' r (h+1))
|
yalpul/CENG242
|
H99/61-69/p64.hs
|
gpl-3.0
| 487 | 0 | 11 | 189 | 205 | 108 | 97 | 9 | 2 |
module FRP.Chimera.Environment.Utils
(
cont2dToDisc2d
, disc2dToCont2d
, cont2dTransDisc2d
, disc2dTransCont2d
) where
import FRP.Chimera.Environment.Continuous
import FRP.Chimera.Environment.Discrete
cont2dToDisc2d :: Continuous2dCoord -> Discrete2dCoord
cont2dToDisc2d (xc, yc) = (floor xc, floor yc)
disc2dToCont2d :: Discrete2dCoord -> Continuous2dCoord
disc2dToCont2d (xd, yd) = (fromIntegral xd, fromIntegral yd)
cont2dTransDisc2d :: Discrete2d c -> Continuous2d o -> Continuous2dCoord -> Discrete2dCoord
cont2dTransDisc2d ed ec (xc, yc) = wrapDisc2dEnv ed $ cont2dToDisc2d (xt, yt)
where
(ddx, ddy) = envDisc2dDims ed
(dcx, dcy) = envCont2dDims ec
rx = fromIntegral ddx / dcx
ry = fromIntegral ddy / dcy
xt = xc * rx
yt = yc * ry
disc2dTransCont2d :: Continuous2d o -> Discrete2d c -> Discrete2dCoord -> Continuous2dCoord
disc2dTransCont2d ec ed (xd, yd) = wrapCont2dEnv ec (xc, yc)
where
(ddx, ddy) = envDisc2dDims ed
(dcx, dcy) = envCont2dDims ec
rx = fromIntegral ddx / dcx
ry = fromIntegral ddy / dcy
xc = fromIntegral xd / rx
yc = fromIntegral yd / ry
|
thalerjonathan/phd
|
coding/libraries/chimera/src/FRP/Chimera/Environment/Utils.hs
|
gpl-3.0
| 1,140 | 0 | 8 | 230 | 369 | 199 | 170 | 28 | 1 |
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module Engine where
import Control.Applicative
import Control.Arrow
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Concurrent.Process
import Control.Exception
import Control.Monad
-- import Data.Accessor
import Data.Bits
import Data.Char (ord)
import Data.Function (fix)
import qualified Data.List as List
import Data.Maybe
import Data.Version (showVersion)
import Mescaline (Time)
import qualified Mescaline.Application as App
import qualified Mescaline.Application.Desktop as App
import qualified Mescaline.Application.Logger as Log
import qualified Mescaline.Database as DB
import qualified Mescaline.Database.Process as DatabaseP
import qualified Mescaline.Pattern.Sequencer as Sequencer
import qualified Mescaline.FeatureSpace.Model as FeatureSpace
import qualified Mescaline.FeatureSpace.Process as FeatureSpaceP
import qualified Mescaline.Pattern as Pattern
import qualified Mescaline.Pattern.Environment as Pattern
import qualified Mescaline.Pattern.Event as Event
import qualified Mescaline.Pattern.Patch as Patch
import qualified Mescaline.Pattern.Process as PatternP
import qualified Mescaline.Synth.OSCServer as OSCServer
import qualified Mescaline.Synth.Sampler.Process as SynthP
import Mescaline.Util (findFiles)
import qualified Sound.OpenSoundControl as OSC
import qualified Sound.SC3.Server.State as State
import qualified Sound.SC3.Server.Process as Server
import qualified Sound.SC3.Server.Process.CommandLine as Server
import System.Directory
import qualified System.Environment as Env
import System.Environment.FindBin (getProgPath)
import System.FilePath
import System.IO
import qualified System.Random as Random
pipe :: (a -> IO b) -> Chan a -> Chan b -> IO ()
pipe f ichan ochan = do
a <- readChan ichan
b <- f a
writeChan ochan b
pipe f ichan ochan
-- ====================================================================
-- Logging to text view
-- chanLogger :: Log.Priority -> String -> Chan String -> IO () -> Log.GenericHandler (Chan String)
-- chanLogger prio fmt chan action =
-- Log.GenericHandler
-- prio
-- (Log.simpleLogFormatter fmt)
-- chan
-- (\chan msg -> writeChan chan msg >> action)
-- (const (return ()))
--
-- createLoggers :: MainWindow -> IO ()
-- createLoggers logWindow = do
-- textEdit <- Qt.findChild logWindow ("<QTextEdit*>", "textEdit") :: IO (Qt.QTextEdit ())
-- chan <- newChan
-- Qt.connectSlot logWindow "logMessage()" logWindow "logMessage()" $ logMessage chan textEdit
-- let fmt = "[$prio][$loggername] $msg\n"
-- action = Qt.emitSignal logWindow "logMessage()" ()
-- components <- Log.getComponents
-- -- FIXME: The log levels have to be initialized first down in main, why?
-- mapM_ (\(logger, prio) -> do
-- Log.updateGlobalLogger
-- logger
-- (Log.setHandlers [chanLogger prio fmt chan action]))
-- components
-- -- Disable stderr logger
-- Log.updateGlobalLogger Log.rootLoggerName (Log.setHandlers ([] :: [Log.GenericHandler ()]))
-- where
-- logMessage :: Chan String -> Qt.QTextEdit () -> MainWindow -> IO ()
-- logMessage chan edit _ = do
-- msg <- readChan chan
-- c <- Qt.textCursor edit ()
-- Qt.insertText c msg
-- _ <- Qt.movePosition c (Qt.eEnd :: Qt.MoveOperation)
-- Qt.setTextCursor edit c
--
-- clearLog :: MainWindow -> IO ()
-- clearLog logWindow = do
-- edit <- Qt.findChild logWindow ("<QTextEdit*>", "textEdit") :: IO (Qt.QTextEdit ())
-- Qt.setPlainText edit ""
-- ====================================================================
-- Actions
logStrLn :: String -> IO ()
logStrLn = hPutStrLn stderr
logAppDirs = do
d1 <- App.getProgramDirectory
d2 <- App.getDataDirectory
d3 <- App.getResourceDirectory
logStrLn $ show [d1, d2, d3]
engine :: FilePath -> String -> IO (SynthP.Handle, FeatureSpaceP.Handle, IO ())
engine _ pattern = do
logAppDirs
docDir <- liftM (flip combine "Documents" . takeDirectory) App.getProgramDirectory
logStrLn $ "Documents: " ++ docDir
mapM_ (\(l,p) -> Log.updateGlobalLogger l (Log.setLevel p)) =<< Log.getComponents
-- createLoggers logWindow
-- Synth process
(synthP, synthQuit) <- SynthP.new
logStrLn "Synth started"
-- Feature space process
fspaceP <- FeatureSpaceP.new
logStrLn "FeatureSpace started"
-- -- Sequencer process
patternP <- PatternP.new Patch.defaultPatchEmbedded fspaceP
logStrLn "Sequencer started"
-- -- Database process
dbP <- DatabaseP.new
connect (\(DatabaseP.Changed path pattern) -> FeatureSpaceP.LoadDatabase path pattern) dbP fspaceP
let dbFile = docDir </> "mescaline.db"
sendTo dbP $ DatabaseP.Load dbFile pattern
logStrLn "Database started"
-- -- Pattern process
-- patternToFSpaceP <- spawn $ fix $ \loop -> do
-- x <- recv
-- case x of
-- PatternP.Event time event -> do
-- -- Event.withSynth (return ()) (sendTo synthP . SynthP.PlayUnit time) event
-- return ()
-- _ -> return ()
-- loop
-- patternToFSpaceP `listenTo` patternP
-- fspaceToPatternP <- spawn $ fix $ \loop -> do
-- x <- recv
-- case x of
-- FeatureSpaceP.RegionChanged _ -> do
-- fspace <- query fspaceP FeatureSpaceP.GetModel
-- sendTo patternP $ PatternP.SetFeatureSpace fspace
-- _ -> return ()
-- loop
-- fspaceToPatternP `listenTo` fspaceP
-- OSC server process
-- oscServer <- OSCServer.new 2010 synthP fspaceP
-- logStrLn "OSCServer started"
-- logStrLn "Starting event loop"
-- Signal synth thread and wait for it to exit.
-- Otherwise stale scsynth processes will be lingering around.
return (synthP, fspaceP, synthQuit >> logStrLn "Bye sucker.")
|
kaoskorobase/mescaline
|
app-ios/Engine.hs
|
gpl-3.0
| 6,269 | 0 | 14 | 1,512 | 852 | 519 | 333 | 74 | 1 |
{-
Copyright 2011 Alexander Midgley
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module GrahamScan(grahamScan, Point(..), point) where
import Data.Function (on)
import Data.List (sortBy)
import Prelude hiding (Either(..))
data Turn = Left | Right | Colinear
deriving (Show, Eq)
data Point a = Point {px :: a, py :: a}
deriving Show
point = uncurry Point
grahamScan :: (Floating a, Ord a) => [Point a] -> [Point a]
grahamScan points
| length points < 3 = error "Degenerate"
| otherwise =
let (firstPoint:rest) = findFirstPoint points
sortedRest = sortBy (compare `on` (angle firstPoint)) rest
loop (a:b:[]) = case turn a b firstPoint of
Left -> b : []
_ -> []
loop (a:b:c:ps) = case turn a b c of
Left -> b : loop (b:c:ps)
_ -> loop (a:c:ps)
in firstPoint : loop (firstPoint:sortedRest)
findFirstPoint points
| null points = error "Null points"
| otherwise = loop points [] where
loop (a:[]) ps = a:ps
loop (a:b:rest) ps =
if (py a, px a) < (py b, px b)
then loop (a:rest) (b:ps)
else loop (b:rest) (a:ps)
angle a b = (dx / len, len) where
dx = px a - px b
dy = py a - py b
len = sqrt (dx * dx + dy * dy)
turn a b c = case compare cross 0 of
GT -> Left
EQ -> Colinear
LT -> Right
where
cross = x1 * y2 - x2 * y1
x1 = px b - px a
y1 = py b - py a
x2 = px c - px b
y2 = py c - py b
|
shadwstalkr/GrahamScanDemo
|
GrahamScan.hs
|
gpl-3.0
| 2,219 | 0 | 17 | 735 | 719 | 371 | 348 | 43 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.SetLoadBasedAutoScaling
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Specify the load-based auto scaling configuration for a specified layer. For
-- more information, see <http://docs.aws.amazon.com/opsworks/latest/userguide/workinginstances-autoscaling.html Managing Load with Time-based and Load-based Instances>.
--
-- To use load-based auto scaling, you must create a set of load-based auto
-- scaling instances. Load-based auto scaling operates only on the instances
-- from that set, so you must ensure that you have created enough instances to
-- handle the maximum anticipated load.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack, or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_SetLoadBasedAutoScaling.html>
module Network.AWS.OpsWorks.SetLoadBasedAutoScaling
(
-- * Request
SetLoadBasedAutoScaling
-- ** Request constructor
, setLoadBasedAutoScaling
-- ** Request lenses
, slbasDownScaling
, slbasEnable
, slbasLayerId
, slbasUpScaling
-- * Response
, SetLoadBasedAutoScalingResponse
-- ** Response constructor
, setLoadBasedAutoScalingResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
data SetLoadBasedAutoScaling = SetLoadBasedAutoScaling
{ _slbasDownScaling :: Maybe AutoScalingThresholds
, _slbasEnable :: Maybe Bool
, _slbasLayerId :: Text
, _slbasUpScaling :: Maybe AutoScalingThresholds
} deriving (Eq, Read, Show)
-- | 'SetLoadBasedAutoScaling' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'slbasDownScaling' @::@ 'Maybe' 'AutoScalingThresholds'
--
-- * 'slbasEnable' @::@ 'Maybe' 'Bool'
--
-- * 'slbasLayerId' @::@ 'Text'
--
-- * 'slbasUpScaling' @::@ 'Maybe' 'AutoScalingThresholds'
--
setLoadBasedAutoScaling :: Text -- ^ 'slbasLayerId'
-> SetLoadBasedAutoScaling
setLoadBasedAutoScaling p1 = SetLoadBasedAutoScaling
{ _slbasLayerId = p1
, _slbasEnable = Nothing
, _slbasUpScaling = Nothing
, _slbasDownScaling = Nothing
}
-- | An 'AutoScalingThresholds' object with the downscaling threshold configuration.
-- If the load falls below these thresholds for a specified amount of time, AWS
-- OpsWorks stops a specified number of instances.
slbasDownScaling :: Lens' SetLoadBasedAutoScaling (Maybe AutoScalingThresholds)
slbasDownScaling = lens _slbasDownScaling (\s a -> s { _slbasDownScaling = a })
-- | Enables load-based auto scaling for the layer.
slbasEnable :: Lens' SetLoadBasedAutoScaling (Maybe Bool)
slbasEnable = lens _slbasEnable (\s a -> s { _slbasEnable = a })
-- | The layer ID.
slbasLayerId :: Lens' SetLoadBasedAutoScaling Text
slbasLayerId = lens _slbasLayerId (\s a -> s { _slbasLayerId = a })
-- | An 'AutoScalingThresholds' object with the upscaling threshold configuration.
-- If the load exceeds these thresholds for a specified amount of time, AWS
-- OpsWorks starts a specified number of instances.
slbasUpScaling :: Lens' SetLoadBasedAutoScaling (Maybe AutoScalingThresholds)
slbasUpScaling = lens _slbasUpScaling (\s a -> s { _slbasUpScaling = a })
data SetLoadBasedAutoScalingResponse = SetLoadBasedAutoScalingResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'SetLoadBasedAutoScalingResponse' constructor.
setLoadBasedAutoScalingResponse :: SetLoadBasedAutoScalingResponse
setLoadBasedAutoScalingResponse = SetLoadBasedAutoScalingResponse
instance ToPath SetLoadBasedAutoScaling where
toPath = const "/"
instance ToQuery SetLoadBasedAutoScaling where
toQuery = const mempty
instance ToHeaders SetLoadBasedAutoScaling
instance ToJSON SetLoadBasedAutoScaling where
toJSON SetLoadBasedAutoScaling{..} = object
[ "LayerId" .= _slbasLayerId
, "Enable" .= _slbasEnable
, "UpScaling" .= _slbasUpScaling
, "DownScaling" .= _slbasDownScaling
]
instance AWSRequest SetLoadBasedAutoScaling where
type Sv SetLoadBasedAutoScaling = OpsWorks
type Rs SetLoadBasedAutoScaling = SetLoadBasedAutoScalingResponse
request = post "SetLoadBasedAutoScaling"
response = nullResponse SetLoadBasedAutoScalingResponse
|
dysinger/amazonka
|
amazonka-opsworks/gen/Network/AWS/OpsWorks/SetLoadBasedAutoScaling.hs
|
mpl-2.0
| 5,500 | 0 | 9 | 1,034 | 565 | 344 | 221 | 65 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.MapsCoordinate.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.MapsCoordinate.Types.Sum where
import Network.Google.Prelude
-- | Job progress
data JobsPatchProgress
= Completed
-- ^ @COMPLETED@
-- Completed
| InProgress
-- ^ @IN_PROGRESS@
-- In progress
| NotAccepted
-- ^ @NOT_ACCEPTED@
-- Not accepted
| NotStarted
-- ^ @NOT_STARTED@
-- Not started
| Obsolete
-- ^ @OBSOLETE@
-- Obsolete
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable JobsPatchProgress
instance FromHttpApiData JobsPatchProgress where
parseQueryParam = \case
"COMPLETED" -> Right Completed
"IN_PROGRESS" -> Right InProgress
"NOT_ACCEPTED" -> Right NotAccepted
"NOT_STARTED" -> Right NotStarted
"OBSOLETE" -> Right Obsolete
x -> Left ("Unable to parse JobsPatchProgress from: " <> x)
instance ToHttpApiData JobsPatchProgress where
toQueryParam = \case
Completed -> "COMPLETED"
InProgress -> "IN_PROGRESS"
NotAccepted -> "NOT_ACCEPTED"
NotStarted -> "NOT_STARTED"
Obsolete -> "OBSOLETE"
instance FromJSON JobsPatchProgress where
parseJSON = parseJSONText "JobsPatchProgress"
instance ToJSON JobsPatchProgress where
toJSON = toJSONText
-- | Job progress
data JobsUpdateProgress
= JUPCompleted
-- ^ @COMPLETED@
-- Completed
| JUPInProgress
-- ^ @IN_PROGRESS@
-- In progress
| JUPNotAccepted
-- ^ @NOT_ACCEPTED@
-- Not accepted
| JUPNotStarted
-- ^ @NOT_STARTED@
-- Not started
| JUPObsolete
-- ^ @OBSOLETE@
-- Obsolete
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable JobsUpdateProgress
instance FromHttpApiData JobsUpdateProgress where
parseQueryParam = \case
"COMPLETED" -> Right JUPCompleted
"IN_PROGRESS" -> Right JUPInProgress
"NOT_ACCEPTED" -> Right JUPNotAccepted
"NOT_STARTED" -> Right JUPNotStarted
"OBSOLETE" -> Right JUPObsolete
x -> Left ("Unable to parse JobsUpdateProgress from: " <> x)
instance ToHttpApiData JobsUpdateProgress where
toQueryParam = \case
JUPCompleted -> "COMPLETED"
JUPInProgress -> "IN_PROGRESS"
JUPNotAccepted -> "NOT_ACCEPTED"
JUPNotStarted -> "NOT_STARTED"
JUPObsolete -> "OBSOLETE"
instance FromJSON JobsUpdateProgress where
parseJSON = parseJSONText "JobsUpdateProgress"
instance ToJSON JobsUpdateProgress where
toJSON = toJSONText
|
rueshyna/gogol
|
gogol-maps-coordinate/gen/Network/Google/MapsCoordinate/Types/Sum.hs
|
mpl-2.0
| 3,079 | 0 | 11 | 763 | 473 | 261 | 212 | 62 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Drive.Teamdrives.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deprecated use drives.get instead.
--
-- /See:/ <https://developers.google.com/drive/ Drive API Reference> for @drive.teamdrives.get@.
module Network.Google.Resource.Drive.Teamdrives.Get
(
-- * REST Resource
TeamdrivesGetResource
-- * Creating a Request
, teamdrivesGet
, TeamdrivesGet
-- * Request Lenses
, tgTeamDriveId
, tgUseDomainAdminAccess
) where
import Network.Google.Drive.Types
import Network.Google.Prelude
-- | A resource alias for @drive.teamdrives.get@ method which the
-- 'TeamdrivesGet' request conforms to.
type TeamdrivesGetResource =
"drive" :>
"v3" :>
"teamdrives" :>
Capture "teamDriveId" Text :>
QueryParam "useDomainAdminAccess" Bool :>
QueryParam "alt" AltJSON :> Get '[JSON] TeamDrive
-- | Deprecated use drives.get instead.
--
-- /See:/ 'teamdrivesGet' smart constructor.
data TeamdrivesGet =
TeamdrivesGet'
{ _tgTeamDriveId :: !Text
, _tgUseDomainAdminAccess :: !Bool
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TeamdrivesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tgTeamDriveId'
--
-- * 'tgUseDomainAdminAccess'
teamdrivesGet
:: Text -- ^ 'tgTeamDriveId'
-> TeamdrivesGet
teamdrivesGet pTgTeamDriveId_ =
TeamdrivesGet'
{_tgTeamDriveId = pTgTeamDriveId_, _tgUseDomainAdminAccess = False}
-- | The ID of the Team Drive
tgTeamDriveId :: Lens' TeamdrivesGet Text
tgTeamDriveId
= lens _tgTeamDriveId
(\ s a -> s{_tgTeamDriveId = a})
-- | Issue the request as a domain administrator; if set to true, then the
-- requester will be granted access if they are an administrator of the
-- domain to which the Team Drive belongs.
tgUseDomainAdminAccess :: Lens' TeamdrivesGet Bool
tgUseDomainAdminAccess
= lens _tgUseDomainAdminAccess
(\ s a -> s{_tgUseDomainAdminAccess = a})
instance GoogleRequest TeamdrivesGet where
type Rs TeamdrivesGet = TeamDrive
type Scopes TeamdrivesGet =
'["https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/drive.readonly"]
requestClient TeamdrivesGet'{..}
= go _tgTeamDriveId (Just _tgUseDomainAdminAccess)
(Just AltJSON)
driveService
where go
= buildClient (Proxy :: Proxy TeamdrivesGetResource)
mempty
|
brendanhay/gogol
|
gogol-drive/gen/Network/Google/Resource/Drive/Teamdrives/Get.hs
|
mpl-2.0
| 3,265 | 0 | 13 | 721 | 381 | 230 | 151 | 63 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.GlobalOperations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of Operation resources contained within the specified
-- project.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.globalOperations.list@.
module Network.Google.Resource.Compute.GlobalOperations.List
(
-- * REST Resource
GlobalOperationsListResource
-- * Creating a Request
, globalOperationsList
, GlobalOperationsList
-- * Request Lenses
, golReturnPartialSuccess
, golOrderBy
, golProject
, golFilter
, golPageToken
, golMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.globalOperations.list@ method which the
-- 'GlobalOperationsList' request conforms to.
type GlobalOperationsListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"operations" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] OperationList
-- | Retrieves a list of Operation resources contained within the specified
-- project.
--
-- /See:/ 'globalOperationsList' smart constructor.
data GlobalOperationsList =
GlobalOperationsList'
{ _golReturnPartialSuccess :: !(Maybe Bool)
, _golOrderBy :: !(Maybe Text)
, _golProject :: !Text
, _golFilter :: !(Maybe Text)
, _golPageToken :: !(Maybe Text)
, _golMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GlobalOperationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'golReturnPartialSuccess'
--
-- * 'golOrderBy'
--
-- * 'golProject'
--
-- * 'golFilter'
--
-- * 'golPageToken'
--
-- * 'golMaxResults'
globalOperationsList
:: Text -- ^ 'golProject'
-> GlobalOperationsList
globalOperationsList pGolProject_ =
GlobalOperationsList'
{ _golReturnPartialSuccess = Nothing
, _golOrderBy = Nothing
, _golProject = pGolProject_
, _golFilter = Nothing
, _golPageToken = Nothing
, _golMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
golReturnPartialSuccess :: Lens' GlobalOperationsList (Maybe Bool)
golReturnPartialSuccess
= lens _golReturnPartialSuccess
(\ s a -> s{_golReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
golOrderBy :: Lens' GlobalOperationsList (Maybe Text)
golOrderBy
= lens _golOrderBy (\ s a -> s{_golOrderBy = a})
-- | Project ID for this request.
golProject :: Lens' GlobalOperationsList Text
golProject
= lens _golProject (\ s a -> s{_golProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
golFilter :: Lens' GlobalOperationsList (Maybe Text)
golFilter
= lens _golFilter (\ s a -> s{_golFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
golPageToken :: Lens' GlobalOperationsList (Maybe Text)
golPageToken
= lens _golPageToken (\ s a -> s{_golPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
golMaxResults :: Lens' GlobalOperationsList Word32
golMaxResults
= lens _golMaxResults
(\ s a -> s{_golMaxResults = a})
. _Coerce
instance GoogleRequest GlobalOperationsList where
type Rs GlobalOperationsList = OperationList
type Scopes GlobalOperationsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient GlobalOperationsList'{..}
= go _golProject _golReturnPartialSuccess _golOrderBy
_golFilter
_golPageToken
(Just _golMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy GlobalOperationsListResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/GlobalOperations/List.hs
|
mpl-2.0
| 7,183 | 0 | 19 | 1,521 | 758 | 454 | 304 | 108 | 1 |
{-# LANGUAGE DisambiguateRecordFields, NamedFieldPuns, RecordWildCards, PostfixOperators, LiberalTypeSynonyms, TypeOperators, OverloadedStrings, PackageImports, ScopedTypeVariables #-}
module Graphics.Diagrams.DerivationTrees (
-- * Basics
module Data.Monoid,
module Data.LabeledTree,
-- * Derivation' building
-- axiom, rule, etc, aborted,
emptyDrv, haltDrv', delayPre,
dummy, rule, Derivation, Premise, Rule(..),
-- * Links
LineStyle,defaultLink,Link(..),
-- * Engine
derivationTreeDiag, delayD
) where
-- import DerivationTrees.Basics
import Control.Monad.Writer
import Data.LabeledTree
import Data.Monoid
import Graphics.Diagrams as D hiding (label)
import qualified Data.Tree as T
import Algebra.Classes
import Prelude hiding (Num(..))
------------------
--- Basics
type LineStyle = PathOptions -> PathOptions
data Link lab = Link {label :: lab, linkStyle :: LineStyle, steps :: Int} -- ^ Regular link
| Delayed -- ^ automatic delaying
defaultLink :: Monoid lab => Link lab
defaultLink = Link mempty (denselyDotted . outline "black") 0
-------------------
data Rule lab = Rule {ruleStyle :: LineStyle, delimiter :: lab, ruleLabel :: lab, conclusion :: lab}
-- deriving Show
type Premise lab = Link lab ::> Derivation lab
type Derivation lab = Tree (Link lab) (Rule lab)
--------------------------------------------------
-- Delay
depth :: forall lab t. Link lab ::> Tree (Link lab) t -> Int
depth (Link{steps} ::> Node _ ps) = 1 + steps + maximum (0 : map depth ps)
isDelayed :: Premise lab -> Bool
isDelayed (Delayed{} ::> _) = True
isDelayed _ = False
delayPre :: forall lab a. Int -> Link lab ::> a -> Link lab ::> a
delayPre s (Link {..} ::> j) = Link {steps = s, ..} ::> j
delayD :: Monoid lab => Derivation lab -> Derivation lab
delayD (Node r ps0) = Node r (map delayP ps)
where ps = fmap (fmap delayD) ps0
ps' = filter (not . isDelayed) ps
delayP (Delayed ::> d) = defaultLink {steps = 1 + maximum (0 : map depth ps')} ::> d
delayP p = p
----------------------------------------------------------
-- Diagramify
derivationTreeDiag :: Monad m => Derivation lab -> Diagram lab m ()
derivationTreeDiag d = do
h <- newVar "height" -- the height of a layer in the tree.
minimize h
h >== constant 1
tree@(T.Node (_,n,_) _) <- toDiagram h d
forM_ (T.levels tree) $ \ls ->
case ls of
[] -> return ()
(_:ls') -> forM_ (zip ls ls') $ \((_,_,l),(r,_,_)) ->
(l + Point (constant 10) zero) `westOf` r
let leftFringe = map head nonNilLevs
rightFringe = map last nonNilLevs
nonNilLevs = filter (not . null) $ T.levels tree
leftMost <- newVar "leftMost"; rightMost <- newVar "rightMost"
forM_ leftFringe $ \(p,_,_) ->
leftMost <== xpart p
forM_ rightFringe $ \(_,_,p) ->
xpart p <== rightMost
tighten 10 $ minimize $ (rightMost - leftMost)
n # Center .=. zero
toDiagPart :: Monad m => Expr -> Premise lab -> Diagram lab m (T.Tree (Point,Object,Point))
toDiagPart layerHeight (Link{..} ::> rul)
| steps == 0 = toDiagram layerHeight rul
| otherwise = do
above@(T.Node (_,concl,_) _) <- toDiagram layerHeight rul
ptObj <- vrule "ptObj"
let pt = ptObj # S
pt `eastOf` (concl # W)
pt `westOf` (concl # E)
(xpart pt) =~= (xpart (concl # Center))
let top = ypart (concl # S)
ypart pt + (fromIntegral steps *- layerHeight) === top
using linkStyle $ path $ polyline [ptObj # Base,Point (xpart pt) top]
let embedPt 1 x = T.Node (concl # W,ptObj,concl # E) [x]
embedPt n x = T.Node (pt,ptObj,pt) [embedPt (n-1) x]
return $ embedPt steps above
-- | @chainBases distance objects@
-- - Ensures that all the objects have the same baseline.
-- - Separates the objects by the given distance
-- - Returns an object encompassing the group, with a the baseline set correctly.
-- - Returns the average distance between the objects
chainBases :: Monad m => Expr -> [Object] -> Diagram lab m (Object,Expr)
chainBases _ [] = do
o <- obj box "empty"
return (o,zero)
chainBases spacing ls = do
grp <- obj box "grp"
forM_ [Base,N,S] $ \ anch -> do
D.align ypart $ map (# anch) (grp:ls)
dxs <- forM (zip ls (tail ls)) $ \(x,y) -> do
let dx = xdiff (x # E) (y # W)
dx >== spacing
return dx
D.align xpart [grp # W,head ls # W]
D.align xpart [grp # E,last ls # E]
return (grp,avg dxs)
debug :: Monad m => m a -> m ()
debug x = return ()
-- debug x = x >> return ()
-- | Put object in a box of the same vertical extent, and baseline,
-- but whose height can be bigger.
relaxHeight :: (Monad m) => Object -> Diagram lab m Object
relaxHeight o = do
b <- obj box "relaxed"
debug $ traceBox "green" o
D.align xpart [b#W,o#W]
D.align xpart [b#E,o#E]
D.align ypart [b#Base,o#Base]
o `fitsVerticallyIn` b
return b
toDiagram :: Monad m => Expr -> Derivation lab -> Diagram lab m (T.Tree (Point,Object,Point))
toDiagram layerHeight (Node Rule{..} premises) = do
ps <- mapM (toDiagPart layerHeight) premises
concl <- relaxHeight =<< extend (constant 1.5) <$> rawLabel "concl" conclusion
debug $ traceBox "red" concl
lab <- rawLabel "rulename" ruleLabel
-- Grouping
(psGrp,premisesDist) <- chainBases (constant 10) [p | T.Node (_,p,_) _ <- ps]
debug $ using denselyDotted $ traceBox "blue" psGrp
height psGrp === case ps of
[] -> zero
_ -> layerHeight
-- Separation rule
separ <- hrule "separation"
separ # N .=. psGrp # S
align ypart [concl # N,separ # S]
minimize $ width separ
psGrp `fitsHorizontallyIn` separ
concl `sloppyFitsHorizontallyIn` separ
-- rule label
lab # BaseW .=. separ # E + Point (constant 3) (constant (negate 1))
-- layout hints (not necessary for "correctness")
let xd = xdiff (separ # W) (psGrp # W)
xd === xdiff (psGrp # E) (separ # E)
relax 2 $ (2 *- xd) =~= premisesDist
-- centering of conclusion
(xpart (separ # Center) - xpart (concl # Center)) === zero
-- minimize (xpart (separ # Center) - xpart (concl # Center)) -- does not produce the expected results with current z3 version
-- draw the rule.
using ruleStyle $ path $ polyline [separ # W,separ # E]
return $ T.Node (separ # W, concl, lab # E) ps
-----------------------
rule :: Monoid lab => lab -> lab -> Rule lab
rule ruleLabel conclusion = Rule {delimiter = mempty, ruleStyle = outline "black", ..}
dummy :: Monoid lab => Rule lab
dummy = (rule mempty mempty) {ruleStyle = const defaultPathOptions}
emptyDrv :: forall k lab. Monoid lab => Tree k (Rule lab)
emptyDrv = Node dummy []
-- abortDrv (Node Rule {..} _) = Node Rule {ruleStyle = Waved, ..} []
-- | Used when the rest of the derivation is known.
haltDrv' :: forall lab. Monoid lab => lab -> Derivation lab -> Derivation lab
haltDrv' tex (Node r _) = Node r {ruleStyle = noOutline}
[lnk {steps = 1, label = tex} ::> emptyDrv]
where lnk :: Link lab
lnk = defaultLink
|
jyp/lp-diagrams
|
Graphics/Diagrams/DerivationTrees.hs
|
agpl-3.0
| 6,938 | 90 | 19 | 1,505 | 2,528 | 1,354 | 1,174 | 134 | 2 |
-- |
-- Module : DMSS.Daemon
-- License : Public Domain
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : untested
--
-- Dead Man Switch System daemon module
--
module DMSS.Daemon where
import DMSS.Config (createLocalDirectory)
import DMSS.Daemon.Command
import DMSS.Daemon.Memory (DaemonTVar, isPeerConnected, cleanupConnections)
import DMSS.Daemon.Network (connAttempt, incomingConnListen, lookupPeerHostPort)
import DMSS.Daemon.CLI ( Cli (Cli), daemonMain, FlagSilent (SilentOn) )
import DMSS.Storage ( StorageT, runStoragePool
, latestCheckIns, verifyPublicCheckIn
, unName, listPeers
, dbConnectionString
)
import DMSS.Storage.TH ( migrateAll )
import Paths_DMSS ( version )
import Control.Concurrent ( forkIO, threadDelay )
import Control.Monad (forever, foldM, unless, void)
import Control.Monad.IO.Class (liftIO)
import Control.Pipe.C3 ( commandReceiver )
import Control.Monad.Logger (runStdoutLoggingT)
import Control.Concurrent.STM.TVar (newTVar, readTVar)
import Control.Monad.STM (atomically)
import Data.Version ( showVersion )
import Data.Foldable ( traverse_ )
import Data.Text ( pack, unpack )
import System.Daemon
import System.IO.Silently (silence)
import System.Environment (setEnv)
import qualified Database.Persist.Sqlite as P
import qualified Control.Exception as E
type Response = String
checkerDaemon :: Command -> IO Response
checkerDaemon Status = return "Daemon is running!"
checkerDaemon Version = return $ "Daemon version: " ++ showVersion version
eventLoop :: Cli -> DaemonTVar -> StorageT ()
eventLoop (Cli _ _ _ s) sm = do
-- Check checkin status of all users
userCheckIns <- latestCheckIns
-- Valid checkin if any valid checkin with latestCheckIns timeframe
checkInsValid <- traverse (\(n,ps) ->
(,) <$> pure n
<*> foldM (\a p ->
if a
then pure a
else verifyPublicCheckIn n p) False ps
) userCheckIns
liftIO $ traverse_
(\(n,v) ->
if v
then logMsgLn s $ unName n ++ " has a valid checkin."
else logMsgLn s $ unName n ++ " has not checked in recently!"
) checkInsValid
-- TODO: Try to connect to all peers which don't currently have connections
-- Create shared variable for holding active connections.
currMem <- liftIO $ atomically $ readTVar sm
liftIO $ do putStrLn "Global memory dump:"; print currMem
-- TODO: Exponentially backoff peers that are not responding
-- TODO: Get resolved HostAddress and PortNumber in order to determine what connections to attempt
peers <- listPeers
peers' <- liftIO $ traverse (lookupPeerHostPort . snd) peers
liftIO $ cleanupConnections sm
liftIO $ traverse_ (\peer -> do
alreadyConnected <- isPeerConnected sm peer
unless alreadyConnected (void $ connAttempt peer sm)) peers'
daemonMain :: IO ()
daemonMain = DMSS.Daemon.CLI.daemonMain process
process :: Cli -> IO ()
process cli@(Cli h cp pp s) = do
-- Make sure local directory exists for storing data
mapM_ (setEnv "HOME") h
createLocalDirectory
-- Create shared memory for all threads
sm <- atomically $ newTVar []
-- Create shared storage pool for all processes
c <- dbConnectionString
runStdoutLoggingT $ P.withSqlitePool (pack c) 10 $ \pool -> do
-- Run migrations
liftIO $ runStoragePool pool $ P.runMigrationSilent migrateAll >>= liftIO . mapM_ (putStrLn . unpack)
liftIO $ logMsgLn s "Starting event loop"
_ <- liftIO $ forkIO $ forever $ do
let ms = 10000
num_ms = 1000
threadDelay (ms * num_ms)
-- TODO: Try to be better about catching specific errors that could occur
err <- E.try $ runStoragePool pool $ eventLoop cli sm :: IO (Either E.SomeException ())
either print return err
liftIO $ logMsgLn s $ "Listening for peers on port " ++ show pp
_ <- liftIO $ forkIO $ do
-- TODO: Save listening thread if needed
_ <- incomingConnListen sm pp
return ()
return ()
logMsgLn s $ "Listening for CLI commands on port " ++ show cp
logMsgLn s "== CTRL-C to quit =="
runInForeground (fromIntegral cp) (commandReceiver checkerDaemon)
-- Log messages functions. Simply outputs to stdout for now.
logMsg :: FlagSilent -> String -> IO ()
logMsg s str = silenceIf s $ putStr str
logMsgLn :: FlagSilent -> String -> IO ()
logMsgLn s str = silenceIf s $ putStrLn str
silenceIf :: FlagSilent -> IO a -> IO a
silenceIf s p = if s == SilentOn then silence p else p
|
dmp1ce/DMSS
|
src-lib/DMSS/Daemon.hs
|
unlicense
| 4,640 | 0 | 19 | 1,068 | 1,184 | 622 | 562 | 86 | 3 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE Trustworthy, NoMonomorphismRestriction, OverloadedStrings, UnicodeSyntax, LambdaCase #-}
-- | Utility functions and reexports for System.Envy, an environment variable config reader.
module Magicbane.Config (
module X
, module Magicbane.Config
) where
import qualified System.Envy
import System.Envy as X hiding ((.=), (.!=), decode)
import System.IO (stderr)
import Magicbane.Util (hPutStrLn)
decodeEnvy = System.Envy.decode
-- | Reads an Envy configuration from the env variables and launches the given action if successful.
-- (Does environment variable reading ever fail in practice? Probably not.)
withEnvConfig ∷ FromEnv α ⇒ (α → IO ()) → IO ()
withEnvConfig a = decodeEnv >>= \case
Left e → hPutStrLn stderr ("error reading env: " ++ e)
Right c → a c
|
myfreeweb/magicbane
|
library/Magicbane/Config.hs
|
unlicense
| 943 | 0 | 11 | 227 | 160 | 93 | 67 | 14 | 2 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>Server-Sent Events | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Telusuri</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/sse/resources/help_id_ID/helpset_id_ID.hs
|
apache-2.0
| 982 | 85 | 53 | 160 | 402 | 212 | 190 | -1 | -1 |
module DistinctMidpoints.A285490 (a285490_list, a285490) where
import Helpers.Midpoints (nonDuplicateMidpointSequence, ArithmeticProgression(..), Injectivity(..))
-- For all distinct integers n, m, j, k such that (n, m) != (j, k) != (m, n),
-- (n + m, a(n) + a(m)) != (j + k, a(j) + a(k))
a285490 n = a285490_list !! (n - 1)
a285490_list = nonDuplicateMidpointSequence Nonarithmetic Noninjective
|
peterokagey/haskellOEIS
|
src/DistinctMidpoints/A285490.hs
|
apache-2.0
| 398 | 0 | 7 | 58 | 69 | 42 | 27 | 4 | 1 |
module Config.Command.Scan
( Config(..)
, opts
, mkCfg
)
where
import Config.Store ( storeOptDescr, ParseStore )
import Config.GetOpt ( Opts, MkCfg, noArgs, contentDirDesc )
import State.Types ( State )
data Config =
Config
{ contentDir :: !(FilePath)
, store :: !(Maybe (IO State))
-- ^If this is Nothing, then just print out what we find.
}
opts :: [ParseStore (IO State)] -> Opts Config
opts ss =
[ contentDirDesc $ \p -> return $ \cfg -> cfg { contentDir = p }
, storeOptDescr ss $ \st cfg -> cfg { store = Just st }
]
mkCfg :: MkCfg Config
mkCfg = noArgs $ Config "." Nothing
|
j3h/doc-review
|
src/Config/Command/Scan.hs
|
bsd-3-clause
| 638 | 0 | 13 | 165 | 214 | 122 | 92 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent.MVar
import Control.Monad (when)
import qualified Data.ByteString.Lazy as L
import Data.IORef (IORef, modifyIORef, newIORef,
readIORef, writeIORef)
import System.IO (Handle, IOMode (WriteMode), hClose,
openFile)
import Network.SPDY.Base
import Network.SPDY.Connection
import Network.SPDY.Frame
main :: IO ()
main = do
state <- newIORef []
(_tlsctx, spdy) <- connect "dl.google.com" "443" (callbacks state)
{-sId <- sendRequest spdy $ \streamId -> do
handle <- openFile "google-chrome.rpm" WriteMode
modifyIORef state ((streamId,handle):)
-}
submitPing spdy
getLine
return ()
callbacks :: IORef [(StreamID,Handle)] -> Callbacks
callbacks stateRef = Callbacks
{ cb_end_of_input =
putStrLn "eof in cb"
, cb_recv_data_frame = \flags streamId payload -> do
print ("data_frame"::String, flags, streamId, "payload: " ++ show (L.length payload) ++ " bytes")
state <- readIORef stateRef
let Just handle = lookup streamId state
L.hPut handle payload
when (flags==1) $ do
hClose handle
writeIORef stateRef [(sid, h) | (sid,h) <- state, sid /= streamId]
putStrLn "done"
, cb_recv_syn_frame = \flags streamId associatedStreamId priority nvh ->
print ("syn_frame"::String, flags, streamId, associatedStreamId, priority, nvh)
, cb_recv_syn_reply_frame = \flags streamId nvh ->
print ("syn_reply_frame"::String, flags, streamId, nvh)
, cb_recv_ping_frame = \pingId sentTime repliedTime -> do
let delta = realToFrac repliedTime - realToFrac sentTime
print ("ping reply"::String, pingId, sentTime, repliedTime, delta*1000)
, cb_settings_frame = \flags settings -> print ("settiongs"::String, flags, settings)
, cb_rst_frame = \flags streamId code -> print ("rst"::String, flags, streamId, code)
, cb_go_away = \flags streamId -> print ("go away"::String, flags, streamId)
}
sendRequest :: SpdySession -> (StreamID -> IO ()) -> IO StreamID
sendRequest spdy successCb = do
streamIDMVar <- newEmptyMVar
submitRequest spdy 0 nvh Nothing
(\streamId -> do
putMVar streamIDMVar streamId
successCb streamId)
(\reason -> putMVar streamIDMVar (error "ups..."))
streamId <- takeMVar streamIDMVar
return $! streamId
nvh :: NVH
nvh =
[ ("host", "dl.google.com:443")
, ("method", "GET")
, ("url", "/linux/direct/google-chrome-stable_current_x86_64.rpm")
, ("scheme", "https")
, ("version", "HTTP/1.1")
, ("accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
, ("accept-charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.3")
, ("accept-encoding", "gzip,deflate,sdch")
, ("accept-language", "en-US,en;q=0.8")
, ("user-agent", "hope/0.0.0.0")
]
|
kolmodin/spdy
|
example/NewAPITest.hs
|
bsd-3-clause
| 2,980 | 2 | 18 | 693 | 713 | 416 | 297 | 64 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Snap.Loader.Dynamic.Evaluator
( HintLoadable
, protectedHintEvaluator
) where
------------------------------------------------------------------------------
import qualified Control.Exception as Ex
import Control.Monad (when)
import Control.Monad.Trans (liftIO)
import Control.Concurrent (ThreadId, forkIO, myThreadId)
import Control.Concurrent.MVar
import Snap.Core (Snap)
------------------------------------------------------------------------------
-- | A type synonym to simply talking about the type loaded by hint.
type HintLoadable = IO (Snap (), IO ())
------------------------------------------------------------------------------
-- | Convert an action to generate 'HintLoadable's into Snap and IO actions
-- that handle periodic reloading. The resulting action will share initialized
-- state until the next execution of the input action. At this time, the
-- cleanup action will be executed.
--
-- The first two arguments control when recompiles are done. The first argument
-- is an action that is executed when compilation starts. The second is a
-- function from the result of the first action to an action that determines
-- whether the value from the previous compilation is still good. This
-- abstracts out the strategy for determining when a cached result is no longer
-- valid.
--
-- If an exception is raised during the processing of the action, it will be
-- thrown to all waiting threads, and for all requests made before the
-- recompile condition is reached.
protectedHintEvaluator :: forall a.
IO a
-> (a -> IO Bool)
-> IO HintLoadable
-> IO (Snap (), IO ())
protectedHintEvaluator start test getInternals = do
-- The list of requesters waiting for a result. Contains the ThreadId in
-- case of exceptions, and an empty MVar awaiting a successful result.
readerContainer <- newReaderContainer
-- Contains the previous result and initialization value, and the time it
-- was stored, if a previous result has been computed. The result stored is
-- either the actual result and initialization result, or the exception
-- thrown by the calculation.
resultContainer <- newResultContainer
-- The model used for the above MVars in the returned action is "keep them
-- full, unless updating them." In every case, when one of those MVars is
-- emptied, the next action is to fill that same MVar. This makes
-- deadlocking on MVar wait impossible.
let snap = do
let waitForNewResult :: IO (Snap ())
waitForNewResult = do
-- Need to calculate a new result
tid <- myThreadId
reader <- newEmptyMVar
readers <- takeMVar readerContainer
-- Some strictness is employed to ensure the MVar
-- isn't holding on to a chain of unevaluated thunks.
let pair = (tid, reader)
newReaders = readers `seq` pair `seq` (pair : readers)
putMVar readerContainer $! newReaders
-- If this is the first reader to queue, clean up the
-- previous state, if there was any, and then begin
-- evaluation of the new code and state.
when (null readers) $ do
let runAndFill = Ex.mask $ \unmask -> do
-- run the cleanup action
previous <- readMVar resultContainer
unmask $ cleanup previous
-- compile the new internals and initialize
stateInitializer <- unmask getInternals
res <- unmask stateInitializer
let a = fst res
clearAndNotify unmask (Right res)
(flip putMVar a . snd)
killWaiting :: Ex.SomeException -> IO ()
killWaiting e = Ex.mask $ \unmask -> do
clearAndNotify unmask (Left e)
(flip Ex.throwTo e . fst)
Ex.throwIO e
clearAndNotify unmask r f = do
a <- unmask start
_ <- swapMVar resultContainer $ Just (r, a)
allReaders <- swapMVar readerContainer []
mapM_ f allReaders
_ <- forkIO $ runAndFill `Ex.catch` killWaiting
return ()
-- Wait for the evaluation of the action to complete,
-- and return its result.
takeMVar reader
existingResult <- liftIO $ readMVar resultContainer
getResult <- liftIO $ case existingResult of
Just (res, a) -> do
-- There's an existing result. Check for validity
valid <- test a
case (valid, res) of
(True, Right (x, _)) -> return x
(True, Left e) -> Ex.throwIO e
(False, _) -> waitForNewResult
Nothing -> waitForNewResult
getResult
clean = do
let msg = "invalid dynamic loader state. " ++
"The cleanup action has been executed"
contents <- swapMVar resultContainer $ error msg
cleanup contents
return (snap, clean)
where
newReaderContainer :: IO (MVar [(ThreadId, MVar (Snap ()))])
newReaderContainer = newMVar []
newResultContainer :: IO (MVar (Maybe (Either Ex.SomeException
(Snap (), IO ()), a)))
newResultContainer = newMVar Nothing
cleanup (Just (Right (_, clean), _)) = clean
cleanup _ = return ()
|
snapframework/snap-loader-dynamic
|
src/Snap/Loader/Dynamic/Evaluator.hs
|
bsd-3-clause
| 6,197 | 0 | 31 | 2,332 | 952 | 493 | 459 | 73 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Wai.MakeAssetsSpec where
import Control.Exception
import Control.Lens
import Data.ByteString.Lazy (isPrefixOf)
import Data.List (intercalate)
import Network.Wai
import Network.Wai.Handler.Warp
import Network.Wreq as Wreq
import System.Directory
import System.IO.Silently
import Test.Hspec
import Test.Mockery.Directory
import Network.Wai.MakeAssets
serveDef :: IO Application
serveDef = serveAssets def
spec :: Spec
spec = do
around_ silence $ do
describe "serverClient" $ do
it "returns static files" $ do
inTempDirectory $ do
createDirectoryIfMissing True "client"
createDirectoryIfMissing True "assets"
writeFile "assets/foo" "bar"
writeFile "client/Makefile" "all:\n\ttrue"
testWithApplication serveDef $ \ port -> do
let url = "http://localhost:" ++ show port ++ "/foo"
response <- get url
response ^. responseBody `shouldBe` "bar"
it "runs 'make' in 'client/' before answering requests" $ do
inTempDirectory $ do
createDirectoryIfMissing True "client"
createDirectoryIfMissing True "assets"
writeFile "client/Makefile" "all:\n\techo bar > ../assets/foo"
testWithApplication serveDef $ \ port -> do
let url = "http://localhost:" ++ show port ++ "/foo"
response <- get url
response ^. responseBody `shouldBe` "bar\n"
it "allows to configure the name of the 'client/' directory" $ do
inTempDirectory $ do
createDirectoryIfMissing True "custom"
createDirectoryIfMissing True "assets"
writeFile "custom/Makefile" "all:\n\techo bar > ../assets/foo"
let options = def{ clientDir = "custom" }
testWithApplication (serveAssets options) $ \ port -> do
let url = "http://localhost:" ++ show port ++ "/foo"
response <- get url
response ^. responseBody `shouldBe` "bar\n"
it "returns the error messages in case 'make' fails" $ do
inTempDirectory $ do
createDirectoryIfMissing True "client"
createDirectoryIfMissing True "assets"
writeFile "client/Makefile" "all:\n\t>&2 echo error message ; false"
testWithApplication serveDef $ \ port -> do
let url = "http://localhost:" ++ show port ++ "/foo"
response <- getWith acceptErrors url
let body = response ^. responseBody
body `shouldSatisfy` ("make error:\nerror message\n" `isPrefixOf`)
context "complains about missing files or directories" $ do
it "missing client/" $ do
inTempDirectory $ do
createDirectoryIfMissing True "assets"
let expected = intercalate "\n" $
"missing directory: 'client/'" :
"Please create 'client/'." :
"(You should put sources for assets in there.)" :
[]
testWithApplication serveDef (\ _ -> return ())
`shouldThrow` errorCall expected
it "missing client/Makefile" $ do
inTempDirectory $ do
createDirectoryIfMissing True "client"
createDirectoryIfMissing True "assets"
let expected = intercalate "\n" $
"missing file: 'client/Makefile'" :
"Please create 'client/Makefile'." :
"(Which will be invoked to build the assets. It should put compiled assets into 'assets/'.)" :
[]
testWithApplication serveDef (\ _ -> return ())
`shouldThrow` errorCall expected
it "missing assets/" $ do
inTempDirectory $ do
touch "client/Makefile"
let expected = intercalate "\n" $
"missing directory: 'assets/'" :
"Please create 'assets/'." :
"(All files in 'assets/' will be served.)" :
[]
catch (testWithApplication serveDef (\ _ -> return ())) $
\ (ErrorCall message) -> do
message `shouldBe` expected
acceptErrors :: Wreq.Options
acceptErrors = defaults &
checkResponse .~ Just (\ _ _ -> return ())
|
soenkehahn/wai-make-assets
|
test/Network/Wai/MakeAssetsSpec.hs
|
bsd-3-clause
| 4,386 | 0 | 30 | 1,436 | 909 | 429 | 480 | 95 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
module Control.Monad.Loop.ForEach (ForEach(..)) where
import Control.Monad (liftM)
import Control.Monad.Primitive (PrimMonad, PrimState)
import Control.Monad.Trans.Class (lift)
-- Import the vector package qualified to write the ForEach instances
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Generic.Mutable as MG
import qualified Data.Vector.Primitive as P
import qualified Data.Vector.Primitive.Mutable as MP
import qualified Data.Vector.Storable as S
import qualified Data.Vector.Storable.Mutable as MS
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as MU
import Control.Monad.Loop.Internal
-- | Class of containers that can be iterated over. The class is
-- parameterized over a base monad where the values of the container can be
-- read to allow iterating over mutable structures. The associated type
-- families parameterize the value and index types of the container,
-- allowing the class to be instantiated for container types (unboxed or
-- storable vectors, for example) which do not admit all types as values.
class ForEach m c where
type ForEachValue c
type ForEachIx c
-- | Iterate over the values in the container.
forEach :: Unrolling n => Unroll n -> c -> m (ForEachValue c)
-- | Iterate over the indices and the value at each index.
iforEach :: Unrolling n => Unroll n -> c -> m (ForEachIx c, ForEachValue c)
instance (Monad m) => ForEach (LoopLike r m) [a] where
type ForEachValue [a] = a
type ForEachIx [a] = Int
forEach unr = \as -> liftM head $ for unr as (not . null) tail
{-# INLINE forEach #-}
iforEach unr = forEach unr . zip [0..]
{-# INLINE iforEach #-}
instance (Monad m) => ForEach (LoopLike r m) (V.Vector a) where
type ForEachValue (V.Vector a) = a
type ForEachIx (V.Vector a) = Int
forEach = forEachVector
iforEach = iforEachVector
{-# INLINE forEach #-}
{-# INLINE iforEach #-}
instance (Monad m, U.Unbox a) => ForEach (LoopLike r m) (U.Vector a) where
type ForEachValue (U.Vector a) = a
type ForEachIx (U.Vector a) = Int
forEach = forEachVector
iforEach = iforEachVector
{-# INLINE forEach #-}
{-# INLINE iforEach #-}
instance (Monad m, P.Prim a) => ForEach (LoopLike r m) (P.Vector a) where
type ForEachValue (P.Vector a) = a
type ForEachIx (P.Vector a) = Int
forEach = forEachVector
iforEach = iforEachVector
{-# INLINE forEach #-}
{-# INLINE iforEach #-}
instance (Monad m, S.Storable a) => ForEach (LoopLike r m) (S.Vector a) where
type ForEachValue (S.Vector a) = a
type ForEachIx (S.Vector a) = Int
forEach = forEachVector
iforEach = iforEachVector
{-# INLINE forEach #-}
{-# INLINE iforEach #-}
forEachVector :: (Monad m, G.Vector v a, Unrolling n) => Unroll n -> v a -> LoopLike r m a
{-# INLINE forEachVector #-}
forEachVector unr = liftM snd . iforEachVector unr
iforEachVector :: (Monad m, G.Vector v a, Unrolling n) => Unroll n -> v a -> LoopLike r m (Int, a)
{-# INLINE iforEachVector #-}
iforEachVector unr = \v -> do
let len = G.length v
i <- for unr 0 (< len) (+ 1)
x <- G.unsafeIndexM v i
return (i, x)
instance (PrimMonad m, PrimState m ~ s) => ForEach (LoopLike r m) (MV.MVector s a) where
type ForEachValue (MV.MVector s a) = a
type ForEachIx (MV.MVector s a) = Int
forEach = forEachMVector
iforEach = iforEachMVector
{-# INLINE forEach #-}
{-# INLINE iforEach #-}
instance (PrimMonad m, U.Unbox a, PrimState m ~ s) => ForEach (LoopLike r m) (MU.MVector s a) where
type ForEachValue (MU.MVector s a) = a
type ForEachIx (MU.MVector s a) = Int
forEach = forEachMVector
iforEach = iforEachMVector
{-# INLINE forEach #-}
{-# INLINE iforEach #-}
instance (PrimMonad m, P.Prim a, PrimState m ~ s) => ForEach (LoopLike r m) (MP.MVector s a) where
type ForEachValue (MP.MVector s a) = a
type ForEachIx (MP.MVector s a) = Int
forEach = forEachMVector
iforEach = iforEachMVector
{-# INLINE forEach #-}
{-# INLINE iforEach #-}
instance (S.Storable a, PrimMonad m, PrimState m ~ s) => ForEach (LoopLike r m) (MS.MVector s a) where
type ForEachValue (MS.MVector s a) = a
type ForEachIx (MS.MVector s a) = Int
forEach = forEachMVector
iforEach = iforEachMVector
{-# INLINE forEach #-}
{-# INLINE iforEach #-}
forEachMVector :: (PrimMonad m, MG.MVector v a, Unrolling n) => Unroll n -> v (PrimState m) a -> LoopLike r m a
{-# INLINE forEachMVector #-}
forEachMVector unr = liftM snd . iforEachMVector unr
iforEachMVector :: (PrimMonad m, MG.MVector v a, Unrolling n) => Unroll n -> v (PrimState m) a -> LoopLike r m (Int, a)
{-# INLINE iforEachMVector #-}
iforEachMVector unr = \v -> do
let len = MG.length v
i <- for unr 0 (< len) (+ 1)
x <- lift $ MG.unsafeRead v i
return (i, x)
|
ttuegel/loops
|
src/Control/Monad/Loop/ForEach.hs
|
bsd-3-clause
| 5,062 | 0 | 12 | 1,079 | 1,561 | 851 | 710 | 106 | 1 |
{-# LANGUAGE EmptyDataDecls, NoMonomorphismRestriction #-}
{-# LANGUAGE TypeFamilies, FlexibleInstances #-}
-- | Unifying syntax with semantics
--
module Lambda.CFG4 where
data S -- clause
data NP -- noun phrase
data VP -- verb phrase
data TV -- transitive verb
class Symantics repr where
john, mary :: repr NP
like :: repr TV
r2 :: repr TV -> repr NP -> repr VP
r1 :: repr NP -> repr VP -> repr S
sentence = r1 john (r2 like mary)
data EN a = EN { unEN :: String }
instance Symantics EN where
john = EN "John"
mary = EN "Mary"
like = EN "likes"
r2 (EN f) (EN x) = EN (f ++ " " ++ x)
r1 (EN x) (EN f) = EN (x ++ " " ++ f)
instance Show (EN a) where
show = unEN
sentence_en = sentence :: EN S
type family Tr (a :: *) :: *
type instance Tr S = Bool
type instance Tr NP = Entity
type instance Tr VP = Entity -> Bool
type instance Tr TV = Entity -> Entity -> Bool
data Sem a = Sem { unSem :: Tr a }
data Entity = John | Mary
deriving (Eq, Show)
instance Symantics Sem where
john = Sem John
mary = Sem Mary
like = Sem (\o s -> elem (s,o) [(John,Mary), (Mary,John)])
r2 (Sem f) (Sem x) = Sem (f x)
r1 (Sem x) (Sem f) = Sem (f x)
instance Show (Sem S) where
show (Sem x) = show x
sentence_sem = sentence :: Sem S
|
suhailshergill/liboleg
|
Lambda/CFG4.hs
|
bsd-3-clause
| 1,484 | 0 | 11 | 538 | 547 | 295 | 252 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
module Guesswork.Types where
import Control.Monad.RWS.Strict
import Control.Exception
import Control.Applicative
import Data.Typeable
import Data.Serialize as S
import GHC.Generics
import qualified Data.Vector.Unboxed as V
type Guesswork a = RWST Conf Log () IO a
data Conf = Conf
defaultConf = Conf
type Log = [(String,[String])]
type LogFile = (String, [String])
type FeatureVector = V.Vector Double
type Trace = String
class Transformable a where
transform :: (FeatureVector -> FeatureVector) -> a -> a
class (Show a, Eq a, Transformable a) => Sample a where
target :: a -> Double
features :: a -> FeatureVector
toPair :: (Sample a) => a -> (Double,FeatureVector)
toPair x = (target x, features x)
newtype SamplePair = SP (Double,FeatureVector)
deriving (Show,Generic,Eq)
instance Serialize SamplePair
instance Sample SamplePair where
target (SP (x,_)) = x
features (SP (_,f)) = f
instance Transformable SamplePair where
transform f (SP (x,v)) = SP (x, f v)
instance Serialize (V.Vector Double) where
get = V.fromList <$> S.get
put = S.put . V.toList
data GuessworkException = ArrangeException String
| ParserException String
deriving (Show,Typeable)
instance Exception GuessworkException
class (Serialize a) => GuessworkEstimator a where
guessWith :: a -> FeatureVector -> Double
a +! b = a ++ show b
|
deggis/guesswork
|
src/Guesswork/Types.hs
|
bsd-3-clause
| 1,500 | 0 | 9 | 298 | 504 | 281 | 223 | 43 | 1 |
-- © 2002 Peter Thiemann
module WASH.Utility.SimpleParser where
import Data.Char
-- very simple parser combinators: Parsec is too sophisticated!
newtype Parser a b = Parser (a -> [(b, a)])
unParser (Parser g) = g
instance Monad (Parser a) where
return x = Parser (\ w -> [(x, w)])
m >>= f = let g = unParser m in
Parser (\ w -> [ (y, w'') | (x, w') <- g w, (y, w'') <- unParser (f x) w'])
fail str = Parser (\ w -> [])
satisfy p = Parser (\ w -> [(x, w') | x:w' <- [w], p x])
print = satisfy isPrint
alphaNum = satisfy isAlphaNum
alpha = satisfy isAlpha
ascii = satisfy isAscii
digit = satisfy isDigit
char c = satisfy (==c)
string s = foldr (\ x p -> do { c <- char x; cs <- p; return (c:cs); }) (return "") s
oneOf cs = satisfy (`elem` cs)
noneOf cs = satisfy (not . (`elem` cs))
eof = Parser (\ w -> if null w then [((),[])] else [])
try parser = parser
p1 <|> p2 = let g1 = unParser p1
g2 = unParser p2
in Parser (\w -> g1 w ++ g2 w)
option :: x -> Parser a x -> Parser a x
option x parser = parser <|> return x
many1 p =
do x <- p
xs <- many p
return (x : xs)
many p =
option [] (many1 p)
manyn n p =
if n <= 0
then return []
else do x <- p
xs <- manyn (n-1) p
return (x : xs)
parseFromString :: Parser String x -> String -> Maybe x
parseFromString parser str =
let g = unParser (parser >>= (\x -> eof >> return x)) in
case g str of
(x, ""): _ -> Just x
_ -> Nothing
parserToRead :: Parser String x -> ReadS x
parserToRead parser = unParser parser
|
nh2/WashNGo
|
WASH/Utility/SimpleParser.hs
|
bsd-3-clause
| 1,527 | 9 | 16 | 395 | 815 | 423 | 392 | 46 | 2 |
-- | This module defines a simple textual weather widget that polls
-- NOAA for weather data. To find your weather station, you can use
--
-- <http://www.nws.noaa.gov/tg/siteloc.php>
--
-- For example, Madison, WI is KMSN.
--
-- NOAA provides several pieces of information in each request; you
-- can control which pieces end up in your weather widget by providing
-- a _template_ that is filled in with the current information. The
-- template is just a 'String' with variables between dollar signs.
-- The variables will be substituted with real data by the widget.
-- Example:
--
-- > let wcfg = (defaultWeatherConfig "KMSN") { weatherTemplate = "$tempC$ C @ $humidity$" }
-- > weatherWidget = weatherNew wcfg 10
--
-- This example makes a new weather widget that checks the weather at
-- KMSN (Madison, WI) every 10 minutes, and displays the results in
-- Celcius.
--
-- Available variables:
--
-- [@stationPlace@] The name of the weather station
--
-- [@stationState@] The state that the weather station is in
--
-- [@year@] The year the report was generated
--
-- [@month@] The month the report was generated
--
-- [@day@] The day the report was generated
--
-- [@hour@] The hour the report was generated
--
-- [@wind@] The direction and strength of the wind
--
-- [@visibility@] Description of current visibility conditions
--
-- [@skyCondition@] ?
--
-- [@tempC@] The temperature in Celcius
--
-- [@tempF@] The temperature in Farenheit
--
-- [@dewPoint@] The current dew point
--
-- [@humidity@] The current relative humidity
--
-- [@pressure@] The current pressure
--
--
-- As an example, a template like
--
-- > "$tempF$ °F"
--
-- would yield a widget displaying the temperature in Farenheit with a
-- small label after it.
--
-- Implementation Note: the weather data parsing code is taken from
-- xmobar. This version of the code makes direct HTTP requests
-- instead of invoking a separate cURL process.
module System.Taffybar.Weather (
-- * Types
WeatherConfig(..),
WeatherInfo(..),
WeatherFormatter(WeatherFormatter),
-- * Constructor
weatherNew,
weatherCustomNew,
defaultWeatherConfig
) where
import Network.HTTP
import Network.URI
import Graphics.UI.Gtk
import Text.Parsec
import Text.Printf
import Text.StringTemplate
import System.Taffybar.Widgets.PollingLabel
data WeatherInfo =
WI { stationPlace :: String
, stationState :: String
, year :: String
, month :: String
, day :: String
, hour :: String
, wind :: String
, visibility :: String
, skyCondition :: String
, tempC :: Int
, tempF :: Int
, dewPoint :: String
, humidity :: Int
, pressure :: Int
} deriving (Show)
-- Parsers stolen from xmobar
type Parser = Parsec String ()
pTime :: Parser (String, String, String, String)
pTime = do
y <- getNumbersAsString
_ <- char '.'
m <- getNumbersAsString
_ <- char '.'
d <- getNumbersAsString
_ <- char ' '
(h:hh:mi:mimi) <- getNumbersAsString
_ <- char ' '
return (y, m, d ,([h]++[hh]++":"++[mi]++mimi))
pTemp :: Parser (Int, Int)
pTemp = do
let num = digit <|> char '-' <|> char '.'
f <- manyTill num $ char ' '
_ <- manyTill anyChar $ char '('
c <- manyTill num $ char ' '
_ <- skipRestOfLine
return $ (floor (read c :: Double), floor (read f :: Double))
pRh :: Parser Int
pRh = do
s <- manyTill digit $ (char '%' <|> char '.')
return $ read s
pPressure :: Parser Int
pPressure = do
_ <- manyTill anyChar $ char '('
s <- manyTill digit $ char ' '
_ <- skipRestOfLine
return $ read s
parseData :: Parser WeatherInfo
parseData = do
st <- getAllBut ","
_ <- space
ss <- getAllBut "("
_ <- skipRestOfLine >> getAllBut "/"
(y,m,d,h) <- pTime
w <- getAfterString "Wind: "
v <- getAfterString "Visibility: "
sk <- getAfterString "Sky conditions: "
_ <- skipTillString "Temperature: "
(tC,tF) <- pTemp
dp <- getAfterString "Dew Point: "
_ <- skipTillString "Relative Humidity: "
rh <- pRh
_ <- skipTillString "Pressure (altimeter): "
p <- pPressure
_ <- manyTill skipRestOfLine eof
return $ WI st ss y m d h w v sk tC tF dp rh p
getAllBut :: String -> Parser String
getAllBut s =
manyTill (noneOf s) (char $ head s)
getAfterString :: String -> Parser String
getAfterString s = pAfter <|> return ("<" ++ s ++ " not found!>")
where
pAfter = do
_ <- try $ manyTill skipRestOfLine $ string s
v <- manyTill anyChar $ newline
return v
skipTillString :: String -> Parser String
skipTillString s =
manyTill skipRestOfLine $ string s
getNumbersAsString :: Parser String
getNumbersAsString = skipMany space >> many1 digit >>= \n -> return n
skipRestOfLine :: Parser Char
skipRestOfLine = do
_ <- many $ noneOf "\n\r"
newline
-- | Simple: download the document at a URL. Taken from Real World
-- Haskell.
downloadURL :: String -> IO (Either String String)
downloadURL url = do
resp <- simpleHTTP request
case resp of
Left x -> return $ Left ("Error connecting: " ++ show x)
Right r ->
case rspCode r of
(2,_,_) -> return $ Right (rspBody r)
(3,_,_) -> -- A HTTP redirect
case findHeader HdrLocation r of
Nothing -> return $ Left (show r)
Just url' -> downloadURL url'
_ -> return $ Left (show r)
where
request = Request { rqURI = uri
, rqMethod = GET
, rqHeaders = []
, rqBody = ""
}
Just uri = parseURI url
getWeather :: String -> IO (Either String WeatherInfo)
getWeather url = do
dat <- downloadURL url
case dat of
Right dat' -> case parse parseData url dat' of
Right d -> return (Right d)
Left err -> return (Left (show err))
Left err -> return (Left (show err))
defaultFormatter :: StringTemplate String -> WeatherInfo -> String
defaultFormatter tpl wi = render tpl'
where
tpl' = setManyAttrib [ ("stationPlace", stationPlace wi)
, ("stationState", stationState wi)
, ("year", year wi)
, ("month", month wi)
, ("day", day wi)
, ("hour", hour wi)
, ("wind", wind wi)
, ("visibility", visibility wi)
, ("skyCondition", skyCondition wi)
, ("tempC", show (tempC wi))
, ("tempF", show (tempF wi))
, ("dewPoint", dewPoint wi)
, ("humidity", show (humidity wi))
, ("pressure", show (pressure wi))
] tpl
getCurrentWeather :: IO (Either String WeatherInfo)
-> StringTemplate String
-> WeatherFormatter
-> IO String
getCurrentWeather getter tpl formatter = do
dat <- getter
case dat of
Right wi -> do
case formatter of
DefaultWeatherFormatter -> return (defaultFormatter tpl wi)
WeatherFormatter f -> return (f wi)
Left err -> do
putStrLn err
return "N/A"
-- | The NOAA URL to get data from
baseUrl :: String
baseUrl = "http://weather.noaa.gov/pub/data/observations/metar/decoded"
-- | A wrapper to allow users to specify a custom weather formatter.
-- The default interpolates variables into a string as described
-- above. Custom formatters can do basically anything.
data WeatherFormatter = WeatherFormatter (WeatherInfo -> String) -- ^ Specify a custom formatter for 'WeatherInfo'
| DefaultWeatherFormatter -- ^ Use the default StringTemplate formatter
-- | The configuration for the weather widget. You can provide a custom
-- format string through 'weatherTemplate' as described above, or you can
-- provide a custom function to turn a 'WeatherInfo' into a String via the
-- 'weatherFormatter' field.
data WeatherConfig =
WeatherConfig { weatherStation :: String -- ^ The weather station to poll. No default
, weatherTemplate :: String -- ^ Template string, as described above. Default: $tempF$ °F
, weatherFormatter :: WeatherFormatter -- ^ Default: substitute in all interpolated variables (above)
}
-- | A sensible default configuration for the weather widget that just
-- renders the temperature.
defaultWeatherConfig :: String -> WeatherConfig
defaultWeatherConfig station = WeatherConfig { weatherStation = station
, weatherTemplate = "$tempF$ °F"
, weatherFormatter = DefaultWeatherFormatter
}
-- | Create a periodically-updating weather widget that polls NOAA.
weatherNew :: WeatherConfig -- ^ Configuration to render
-> Double -- ^ Polling period in _minutes_
-> IO Widget
weatherNew cfg delayMinutes = do
let url = printf "%s/%s.TXT" baseUrl (weatherStation cfg)
getter = getWeather url
weatherCustomNew getter (weatherTemplate cfg) (weatherFormatter cfg) delayMinutes
-- | Create a periodically-updating weather widget using custom weather getter
weatherCustomNew :: IO (Either String WeatherInfo) -- ^ Weather querying action
-> String -- ^ Weather template
-> WeatherFormatter -- ^ Weather formatter
-> Double -- ^ Polling period in _minutes_
-> IO Widget
weatherCustomNew getter tpl formatter delayMinutes = do
let tpl' = newSTMP tpl
l <- pollingLabelNew "N/A" (delayMinutes * 60) (getCurrentWeather getter tpl' formatter)
widgetShowAll l
return l
|
Undeterminant/taffybar
|
src/System/Taffybar/Weather.hs
|
bsd-3-clause
| 9,810 | 0 | 19 | 2,741 | 2,135 | 1,118 | 1,017 | 185 | 5 |
module Board_Test
(tests)
where
import Test.Tasty
--import Test.Tasty.SmallCheck as SC
--import Test.Tasty.QuickCheck as QC
import Test.Tasty.HUnit
import Data.List
import Data.Ord
tests :: TestTree
tests = testGroup "Board tests" [unitTests]
unitTests = testGroup "Unit tests"
[]
|
jasdennison/scrabble-solver
|
tests/Board_Test.hs
|
bsd-3-clause
| 290 | 0 | 6 | 44 | 62 | 37 | 25 | 10 | 1 |
module PL0.AST.Class (
Expression(..)
, ResolvedExpression(..)
, Type(..)
, Resolved(..)
, ResolvedType(..)
, OperationName(..)
, Operation(..)
, TypedExp(..)
, TFunction(..)
, checkArgs
) where
import PL0.Internal
|
LightAndLight/pl0-haskell
|
src/PL0/AST/Class.hs
|
bsd-3-clause
| 246 | 0 | 5 | 57 | 80 | 56 | 24 | 12 | 0 |
module Utils where
import Data.Either
import qualified Data.ByteString as B
import qualified Data.Binary.Strict.Get as S
import Test.Hspec
minizork () = B.readFile "stories/minizork.z3"
ev = flip $ either expectationFailure
runGet fi = fst . flip S.runGet fi
|
theor/zorkell
|
test/Utils.hs
|
bsd-3-clause
| 262 | 0 | 7 | 39 | 78 | 45 | 33 | 8 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -F -pgmFtrhsx #-}
module Crete.Stats.Stats where
import Control.Applicative
import Control.Monad
import Control.Monad.Reader
import Data.Time (getCurrentTime)
import qualified Data.Hashable as H
import qualified Data.Map as Map
import qualified Data.ByteString.Char8 as BS
import Crypto.Hash.SHA512 (hash)
import Happstack.Server
import HSP
import Web.Routes.XMLGenT ()
import Text.Reform
import Text.Reform.Happstack
import Text.Reform.HSP.String
import Crete.Type
import Crete.Url.Url
import Crete.Store.Store
import Crete.Store.StoreTypes (LoginToken(..), ProductMap, Product(..))
import qualified Crete.Templates.Page as Page
type Password = String
newtype Login = Login Password deriving (Show)
data LoginError = InvalidPassword
| AppCFE (CommonFormError [Input]) deriving (Show)
instance FormError LoginError where
type ErrorInputType LoginError = [Input]
commonFormError = AppCFE
type SimpleForm m =
Form m [Input] LoginError [XMLGenT m XML] ()
cretetoken :: String
cretetoken = "cretetoken"
cookieLife :: CookieLife
cookieLife = MaxAge (60 * 10)
--checkPassword :: String -> Either LoginError String
--checkPassword "abc123" = Right "Ok, you're in!"
--checkPassword _ = Left InvalidPassword
-- newtype ProductFile = ProductFile FilePath deriving (Show)
--fileUpload :: SimpleForm RoutedServer ProductFile
--fileUpload =
-- ProductFile <$> inputFile <* inputSubmit "Submit"
loginForm :: String -> SimpleForm RoutedServer Login
loginForm pwd =
Login <$> label "Bitte Passwort eingeben " ++> lgin <* inputSubmit "Submit"
where lgin = -- errors listErrors ++> label "Login:" ++>
(inputPassword `transformEither` checkPassword)
checkPassword str =
if str == pwd
then Right "Ok, you're in!"
else Left InvalidPassword
myPolicy :: BodyPolicy
myPolicy = (defaultBodyPolicy "/tmp/" 0 1000 1000)
checkLogin :: RoutedServer ()
checkLogin = do
LoginToken tok <- ask >>= getLoginToken
cv <- lookCookieValue cretetoken
when (tok /= Just cv) mzero
login :: Login -> RoutedServer XML
login (Login str) = do
time <- liftIO $ getCurrentTime
let h = show $ H.hash $ hash $ BS.pack $ show time ++ str
config <- ask
setLoginToken config h
addCookie cookieLife (mkCookie cretetoken h)
adminTemplate (WithLang German LoginPage) "Logged in ..."
adminTemplate ::
EmbedAsChild RoutedServer result => Url -> result -> RoutedServer XML
adminTemplate url res =
Page.template url "Verwaltung" $
<div>
<h1>Aktionen</h1>
<% todoList %>
<h1>Ergebnis der letzten Aktion</h1>
<% res %>
</div>
todoList :: XMLGenT RoutedServer (XMLType RoutedServer)
todoList =
<ul>
<li><a href=(slashUrlToStr (WithLang German LookToken))>Look token</a></li>
<li><a href=(slashUrlToStr (WithLang German LoadProd))>Produktliste neu laden</a></li>
<li><a href=(slashUrlToStr (WithLang German LoadMarkup))>Seiten neu laden</a></li>
<li><a href=(slashUrlToStr (WithLang German Restart))>Server neu starten und dabei die Konfiguration neu laden</a></li>
<li><a href=(slashUrlToStr (WithLang German LogoutPage))>Logout</a></li>
</ul>
loginPage :: RoutedServer XML
loginPage = do
decodeBody myPolicy
config <- ask
let pwd = cnfPassword $ cnf config
nextpage = form $ slashUrlToStr (WithLang German LoginPage)
res <- happstackEitherForm nextpage "fieldId" (loginForm pwd)
case res of
(Left formHtml) ->
Page.template (WithLang German LoginPage) "Verwaltung" formHtml
(Right l) -> login l
productListError :: [String] -> XMLGenT RoutedServer (XMLType RoutedServer)
productListError [] =
<div>
<h2>Produktliste in Ordnung</h2>
<a href=(slashUrlToStr (WithLang German PublishProd))>Veröffentlichen</a>
</div>
productListError es =
<div>
<h2><font color="#FF0000">Fehler in der Produktliste</font></h2>
<ul>
<li>Haben Sie eine csv-Datei übermittelt?</li>
<li>Sind die Spalten durch Kommas getrennt?</li>
<li>Steht der Text in den Zellen in Gänsefüßchen?</li>
</ul>
<h2><font color="#FF0000">Folgende Spalten enthalten Fehler</font></h2>
<ul><% map (\x -> <li><% x %></li>) es %></ul>
</div>
productListPreview :: ProductMap -> XMLGenT RoutedServer (XMLType RoutedServer)
productListPreview pm | Map.null pm = <div></div>
productListPreview pm =
<div>
<h2>Vorschau der Produktliste</h2>
<% Map.foldlWithKey f emptyTable pm %>
</div>
where emptyTable =
<table class="products" rules="rows">
<tr class="productline">
<td><b>Name</b></td>
<td><b>Menge</b></td>
<td><b>Beschreibung</b></td>
<td><b>Bild</b></td>
<td><b>Einheit</b></td>
<td><b>Preis</b></td> </tr>
</table>
f t name p = t <:
(<tr class="productline">
<td><% name %></td>
<td><% show (productQuantity p) %></td>
<td><% productDescription p %></td>
<td> <img src=("/img/" ++ productPicture p) width="120px"/>
<% productPicture p %> </td>
<td><% productUnit p %></td>
<td><% show (productPrice p) %></td> </tr>)
productList ::
([String], ProductMap) -> XMLGenT RoutedServer (XMLType RoutedServer)
productList (es, pm) =
<div>
<% productListError es %>
<% productListPreview pm %>
</div>
|
fphh/crete
|
src/Crete/Stats/Stats.hs
|
bsd-3-clause
| 5,697 | 131 | 49 | 1,206 | 1,965 | 1,006 | 959 | -1 | -1 |
{-# language CPP #-}
-- | = Name
--
-- VK_FUCHSIA_imagepipe_surface - instance extension
--
-- == VK_FUCHSIA_imagepipe_surface
--
-- [__Name String__]
-- @VK_FUCHSIA_imagepipe_surface@
--
-- [__Extension Type__]
-- Instance extension
--
-- [__Registered Extension Number__]
-- 215
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_surface@
--
-- [__Contact__]
--
-- - Craig Stout
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_FUCHSIA_imagepipe_surface] @cdotstout%0A<<Here describe the issue or question you have about the VK_FUCHSIA_imagepipe_surface extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2018-07-27
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Craig Stout, Google
--
-- - Ian Elliott, Google
--
-- - Jesse Hall, Google
--
-- == Description
--
-- The @VK_FUCHSIA_imagepipe_surface@ extension is an instance extension.
-- It provides a mechanism to create a
-- 'Vulkan.Extensions.Handles.SurfaceKHR' object (defined by the
-- @VK_KHR_surface@ extension) that refers to a Fuchsia @imagePipeHandle@.
--
-- == New Commands
--
-- - 'createImagePipeSurfaceFUCHSIA'
--
-- == New Structures
--
-- - 'ImagePipeSurfaceCreateInfoFUCHSIA'
--
-- == New Bitmasks
--
-- - 'ImagePipeSurfaceCreateFlagsFUCHSIA'
--
-- == New Enum Constants
--
-- - 'FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME'
--
-- - 'FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA'
--
-- == Version History
--
-- - Revision 1, 2018-07-27 (Craig Stout)
--
-- - Initial draft.
--
-- == See Also
--
-- 'ImagePipeSurfaceCreateFlagsFUCHSIA',
-- 'ImagePipeSurfaceCreateInfoFUCHSIA', 'createImagePipeSurfaceFUCHSIA'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_FUCHSIA_imagepipe_surface Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_FUCHSIA_imagepipe_surface ( createImagePipeSurfaceFUCHSIA
, ImagePipeSurfaceCreateInfoFUCHSIA(..)
, ImagePipeSurfaceCreateFlagsFUCHSIA(..)
, FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION
, pattern FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION
, FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME
, pattern FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME
, Zx_handle_t
, SurfaceKHR(..)
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import GHC.Show (showString)
import Numeric (showHex)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero)
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Data.Word (Word32)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Vulkan.NamedType ((:::))
import Vulkan.Core10.AllocationCallbacks (AllocationCallbacks)
import Vulkan.Core10.FundamentalTypes (Flags)
import Vulkan.Core10.Handles (Instance)
import Vulkan.Core10.Handles (Instance(..))
import Vulkan.Core10.Handles (Instance(Instance))
import Vulkan.Dynamic (InstanceCmds(pVkCreateImagePipeSurfaceFUCHSIA))
import Vulkan.Core10.Handles (Instance_T)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Extensions.Handles (SurfaceKHR)
import Vulkan.Extensions.Handles (SurfaceKHR(..))
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
import Vulkan.Extensions.Handles (SurfaceKHR(..))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkCreateImagePipeSurfaceFUCHSIA
:: FunPtr (Ptr Instance_T -> Ptr ImagePipeSurfaceCreateInfoFUCHSIA -> Ptr AllocationCallbacks -> Ptr SurfaceKHR -> IO Result) -> Ptr Instance_T -> Ptr ImagePipeSurfaceCreateInfoFUCHSIA -> Ptr AllocationCallbacks -> Ptr SurfaceKHR -> IO Result
-- | vkCreateImagePipeSurfaceFUCHSIA - Create a
-- 'Vulkan.Extensions.Handles.SurfaceKHR' object for a Fuchsia ImagePipe
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkCreateImagePipeSurfaceFUCHSIA-instance-parameter# @instance@
-- /must/ be a valid 'Vulkan.Core10.Handles.Instance' handle
--
-- - #VUID-vkCreateImagePipeSurfaceFUCHSIA-pCreateInfo-parameter#
-- @pCreateInfo@ /must/ be a valid pointer to a valid
-- 'ImagePipeSurfaceCreateInfoFUCHSIA' structure
--
-- - #VUID-vkCreateImagePipeSurfaceFUCHSIA-pAllocator-parameter# If
-- @pAllocator@ is not @NULL@, @pAllocator@ /must/ be a valid pointer
-- to a valid 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks'
-- structure
--
-- - #VUID-vkCreateImagePipeSurfaceFUCHSIA-pSurface-parameter# @pSurface@
-- /must/ be a valid pointer to a
-- 'Vulkan.Extensions.Handles.SurfaceKHR' handle
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_FUCHSIA_imagepipe_surface VK_FUCHSIA_imagepipe_surface>,
-- 'Vulkan.Core10.AllocationCallbacks.AllocationCallbacks',
-- 'ImagePipeSurfaceCreateInfoFUCHSIA', 'Vulkan.Core10.Handles.Instance',
-- 'Vulkan.Extensions.Handles.SurfaceKHR'
createImagePipeSurfaceFUCHSIA :: forall io
. (MonadIO io)
=> -- | @instance@ is the instance to associate with the surface.
Instance
-> -- | @pCreateInfo@ is a pointer to a 'ImagePipeSurfaceCreateInfoFUCHSIA'
-- structure containing parameters affecting the creation of the surface
-- object.
ImagePipeSurfaceCreateInfoFUCHSIA
-> -- | @pAllocator@ is the allocator used for host memory allocated for the
-- surface object when there is no more specific allocator available (see
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#memory-allocation Memory Allocation>).
("allocator" ::: Maybe AllocationCallbacks)
-> io (SurfaceKHR)
createImagePipeSurfaceFUCHSIA instance' createInfo allocator = liftIO . evalContT $ do
let vkCreateImagePipeSurfaceFUCHSIAPtr = pVkCreateImagePipeSurfaceFUCHSIA (case instance' of Instance{instanceCmds} -> instanceCmds)
lift $ unless (vkCreateImagePipeSurfaceFUCHSIAPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkCreateImagePipeSurfaceFUCHSIA is null" Nothing Nothing
let vkCreateImagePipeSurfaceFUCHSIA' = mkVkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIAPtr
pCreateInfo <- ContT $ withCStruct (createInfo)
pAllocator <- case (allocator) of
Nothing -> pure nullPtr
Just j -> ContT $ withCStruct (j)
pPSurface <- ContT $ bracket (callocBytes @SurfaceKHR 8) free
r <- lift $ traceAroundEvent "vkCreateImagePipeSurfaceFUCHSIA" (vkCreateImagePipeSurfaceFUCHSIA' (instanceHandle (instance')) pCreateInfo pAllocator (pPSurface))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pSurface <- lift $ peek @SurfaceKHR pPSurface
pure $ (pSurface)
-- | VkImagePipeSurfaceCreateInfoFUCHSIA - Structure specifying parameters of
-- a newly created ImagePipe surface object
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_FUCHSIA_imagepipe_surface VK_FUCHSIA_imagepipe_surface>,
-- 'ImagePipeSurfaceCreateFlagsFUCHSIA',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'createImagePipeSurfaceFUCHSIA'
data ImagePipeSurfaceCreateInfoFUCHSIA = ImagePipeSurfaceCreateInfoFUCHSIA
{ -- | @flags@ is reserved for future use.
--
-- #VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-flags-zerobitmask# @flags@
-- /must/ be @0@
flags :: ImagePipeSurfaceCreateFlagsFUCHSIA
, -- | @imagePipeHandle@ is a @zx_handle_t@ referring to the ImagePipe to
-- associate with the surface.
--
-- #VUID-VkImagePipeSurfaceCreateInfoFUCHSIA-imagePipeHandle-04863#
-- @imagePipeHandle@ /must/ be a valid @zx_handle_t@
imagePipeHandle :: Zx_handle_t
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (ImagePipeSurfaceCreateInfoFUCHSIA)
#endif
deriving instance Show ImagePipeSurfaceCreateInfoFUCHSIA
instance ToCStruct ImagePipeSurfaceCreateInfoFUCHSIA where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p ImagePipeSurfaceCreateInfoFUCHSIA{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr ImagePipeSurfaceCreateFlagsFUCHSIA)) (flags)
poke ((p `plusPtr` 20 :: Ptr Zx_handle_t)) (imagePipeHandle)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 20 :: Ptr Zx_handle_t)) (zero)
f
instance FromCStruct ImagePipeSurfaceCreateInfoFUCHSIA where
peekCStruct p = do
flags <- peek @ImagePipeSurfaceCreateFlagsFUCHSIA ((p `plusPtr` 16 :: Ptr ImagePipeSurfaceCreateFlagsFUCHSIA))
imagePipeHandle <- peek @Zx_handle_t ((p `plusPtr` 20 :: Ptr Zx_handle_t))
pure $ ImagePipeSurfaceCreateInfoFUCHSIA
flags imagePipeHandle
instance Storable ImagePipeSurfaceCreateInfoFUCHSIA where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero ImagePipeSurfaceCreateInfoFUCHSIA where
zero = ImagePipeSurfaceCreateInfoFUCHSIA
zero
zero
-- | VkImagePipeSurfaceCreateFlagsFUCHSIA - Reserved for future use
--
-- = Description
--
-- 'ImagePipeSurfaceCreateFlagsFUCHSIA' is a bitmask type for setting a
-- mask, but is currently reserved for future use.
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_FUCHSIA_imagepipe_surface VK_FUCHSIA_imagepipe_surface>,
-- 'ImagePipeSurfaceCreateInfoFUCHSIA'
newtype ImagePipeSurfaceCreateFlagsFUCHSIA = ImagePipeSurfaceCreateFlagsFUCHSIA Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
conNameImagePipeSurfaceCreateFlagsFUCHSIA :: String
conNameImagePipeSurfaceCreateFlagsFUCHSIA = "ImagePipeSurfaceCreateFlagsFUCHSIA"
enumPrefixImagePipeSurfaceCreateFlagsFUCHSIA :: String
enumPrefixImagePipeSurfaceCreateFlagsFUCHSIA = ""
showTableImagePipeSurfaceCreateFlagsFUCHSIA :: [(ImagePipeSurfaceCreateFlagsFUCHSIA, String)]
showTableImagePipeSurfaceCreateFlagsFUCHSIA = []
instance Show ImagePipeSurfaceCreateFlagsFUCHSIA where
showsPrec = enumShowsPrec enumPrefixImagePipeSurfaceCreateFlagsFUCHSIA
showTableImagePipeSurfaceCreateFlagsFUCHSIA
conNameImagePipeSurfaceCreateFlagsFUCHSIA
(\(ImagePipeSurfaceCreateFlagsFUCHSIA x) -> x)
(\x -> showString "0x" . showHex x)
instance Read ImagePipeSurfaceCreateFlagsFUCHSIA where
readPrec = enumReadPrec enumPrefixImagePipeSurfaceCreateFlagsFUCHSIA
showTableImagePipeSurfaceCreateFlagsFUCHSIA
conNameImagePipeSurfaceCreateFlagsFUCHSIA
ImagePipeSurfaceCreateFlagsFUCHSIA
type FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION"
pattern FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION :: forall a . Integral a => a
pattern FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION = 1
type FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME = "VK_FUCHSIA_imagepipe_surface"
-- No documentation found for TopLevel "VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME"
pattern FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME = "VK_FUCHSIA_imagepipe_surface"
type Zx_handle_t = Word32
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_FUCHSIA_imagepipe_surface.hs
|
bsd-3-clause
| 14,622 | 58 | 15 | 2,707 | 2,074 | 1,265 | 809 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.TimeGrain.PL.Rules
( rules ) where
import Data.Text (Text)
import Prelude
import Data.String
import Duckling.Dimensions.Types
import qualified Duckling.TimeGrain.Types as TG
import Duckling.Types
grains :: [(Text, String, TG.Grain)]
grains = [ ("second (grain)", "sekund(y|zie|(e|ę)|om|ami|ach|o|a)?|s", TG.Second)
, ("minute (grain)", "minut(y|cie|(e|ę)|om|o|ami|ach|(a|ą))?|m", TG.Minute)
, ("hour (grain)", "h|godzin(y|(e|ę)|ie|om|o|ami|ach|(a|ą))?", TG.Hour)
, ("day (grain)", "dzie(n|ń|ni(a|ą))|dni(owi|ach|a|ą)?", TG.Day)
, ("week (grain)", "tydzie(n|ń|)|tygod(ni(owi|u|a|em))|tygodn(iach|iami|iom|ie|i)|tyg\\.?", TG.Week)
, ("month (grain)", "miesi(a|ą)c(owi|em|u|e|om|ami|ach|a)?", TG.Month)
, ("quarter (grain)", "kwarta(l|ł)(u|owi|em|e|(o|ó)w|om|ach|ami|y)?", TG.Quarter)
, ("year (grain)", "rok(u|owi|iem)?|lat(ami|ach|a|om)?", TG.Year)
]
rules :: [Rule]
rules = map go grains
where
go (name, regexPattern, grain) = Rule
{ name = name
, pattern = [regex regexPattern]
, prod = \_ -> Just $ Token TimeGrain grain
}
|
facebookincubator/duckling
|
Duckling/TimeGrain/PL/Rules.hs
|
bsd-3-clause
| 1,427 | 0 | 11 | 259 | 271 | 172 | 99 | 25 | 1 |
{-# Language BangPatterns #-}
module Histo (
readSalida
) where
import Control.Concurrent.Async
import Control.Lens
import Control.Monad
import qualified Data.ByteString.Char8 as C
import Data.List as DL
import Data.List.Split (chunksOf)
import qualified Data.Vector.Unboxed as VU
import Text.Parsec
import Text.Parsec.ByteString (parseFromFile)
-- Internal modules
-- =======> <===========
import APIparser
import CommonTypes
import Constants
import ParsecNumbers
import ParsecText
-- ===========> <============
readSalida :: FilePath -> IO [Double]
readSalida file = do
r <- parseFromFile parseGaps file
case r of
Left msg -> error . show $ msg
Right xs -> return xs
parseGaps :: MyParser () [Double]
parseGaps = many1 parseEnergies
parseEnergies :: MyParser () Double
parseEnergies = do
count 2 anyLine
manyTill anyChar (char ':')
spaces
(e0:e1:_) <- count 2 (spaces >> realNumber )
count 2 anyLine
let gap = (e0 - e1) * ehKcal
return gap
|
felipeZ/Dynamics
|
src/Histo.hs
|
bsd-3-clause
| 1,112 | 0 | 12 | 304 | 296 | 160 | 136 | 34 | 2 |
-- |
-- Module : Crypto.Hash.SHA1
-- License : BSD-style
-- Maintainer : Herbert Valerio Riedel <[email protected]>
-- Stability : stable
-- Portability : unknown
--
-- A module containing <https://en.wikipedia.org/wiki/SHA-1 SHA-1> bindings
--
module Crypto.Hash.SHA1
(
-- * Incremental API
--
-- | This API is based on 4 different functions, similar to the
-- lowlevel operations of a typical hash:
--
-- - 'init': create a new hash context
-- - 'update': update non-destructively a new hash context with a strict bytestring
-- - 'updates': same as update, except that it takes a list of strict bytestrings
-- - 'finalize': finalize the context and returns a digest bytestring.
--
-- all those operations are completely pure, and instead of
-- changing the context as usual in others language, it
-- re-allocates a new context each time.
--
-- Example:
--
-- > import qualified Data.ByteString
-- > import qualified Crypto.Hash.SHA1 as SHA1
-- >
-- > main = print digest
-- > where
-- > digest = SHA1.finalize ctx
-- > ctx = foldl SHA1.update ctx0 (map Data.ByteString.pack [ [1,2,3], [4,5,6] ])
-- > ctx0 = SHA1.init
Ctx(..)
, init -- :: Ctx
, update -- :: Ctx -> ByteString -> Ctx
, updates -- :: Ctx -> [ByteString] -> Ctx
, finalize -- :: Ctx -> ByteString
, start -- :: ByteString -> Ct
, startlazy -- :: L.ByteString -> Ctx
-- * Single Pass API
--
-- | This API use the incremental API under the hood to provide
-- the common all-in-one operations to create digests out of a
-- 'ByteString' and lazy 'L.ByteString'.
--
-- - 'hash': create a digest ('init' + 'update' + 'finalize') from a strict 'ByteString'
-- - 'hashlazy': create a digest ('init' + 'update' + 'finalize') from a lazy 'L.ByteString'
--
-- Example:
--
-- > import qualified Data.ByteString
-- > import qualified Crypto.Hash.SHA1 as SHA1
-- >
-- > main = print $ SHA1.hash (Data.ByteString.pack [0..255])
--
-- __NOTE__: The returned digest is a binary 'ByteString'. For
-- converting to a base16/hex encoded digest the
-- <https://hackage.haskell.org/package/base16-bytestring base16-bytestring>
-- package is recommended.
, hash -- :: ByteString -> ByteString
, hashlazy -- :: L.ByteString -> ByteString
-- ** HMAC-SHA1
--
-- | <https://tools.ietf.org/html/rfc2104 RFC2104>-compatible
-- <https://en.wikipedia.org/wiki/HMAC HMAC>-SHA1 digests
, hmac -- :: ByteString -> ByteString -> ByteString
, hmaclazy -- :: ByteString -> L.ByteString -> ByteString
) where
import Prelude hiding (init)
import Foreign.C.Types
import Foreign.Ptr
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Marshal.Alloc
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString as B
import Data.ByteString (ByteString)
import Data.ByteString.Unsafe (unsafeUseAsCStringLen)
import Data.ByteString.Internal (create, toForeignPtr, memcpy)
import Data.Bits (xor)
import Data.Word
import System.IO.Unsafe (unsafeDupablePerformIO)
-- | perform IO for hashes that do allocation and ffi.
-- unsafeDupablePerformIO is used when possible as the
-- computation is pure and the output is directly linked
-- to the input. we also do not modify anything after it has
-- been returned to the user.
unsafeDoIO :: IO a -> a
unsafeDoIO = unsafeDupablePerformIO
-- | SHA-1 Context
--
-- The context data is exactly 92 bytes long, however
-- the data in the context is stored in host-endianness.
--
-- The context data is made up of
--
-- * a 'Word64' representing the number of bytes already feed to hash algorithm so far,
--
-- * a 64-element 'Word8' buffer holding partial input-chunks, and finally
--
-- * a 5-element 'Word32' array holding the current work-in-progress digest-value.
--
-- Consequently, a SHA-1 digest as produced by 'hash', 'hashlazy', or 'finalize' is 20 bytes long.
newtype Ctx = Ctx ByteString
deriving (Eq)
-- keep this synchronised with cbits/sha1.h
{-# INLINE digestSize #-}
digestSize :: Int
digestSize = 20
{-# INLINE sizeCtx #-}
sizeCtx :: Int
sizeCtx = 92
{-# INLINE withByteStringPtr #-}
withByteStringPtr :: ByteString -> (Ptr Word8 -> IO a) -> IO a
withByteStringPtr b f =
withForeignPtr fptr $ \ptr -> f (ptr `plusPtr` off)
where (fptr, off, _) = toForeignPtr b
copyCtx :: Ptr Ctx -> Ptr Ctx -> IO ()
copyCtx dst src = memcpy (castPtr dst) (castPtr src) (fromIntegral sizeCtx)
withCtxCopy :: Ctx -> (Ptr Ctx -> IO ()) -> IO Ctx
withCtxCopy (Ctx ctxB) f = Ctx `fmap` createCtx
where
createCtx = create sizeCtx $ \dstPtr ->
withByteStringPtr ctxB $ \srcPtr -> do
copyCtx (castPtr dstPtr) (castPtr srcPtr)
f (castPtr dstPtr)
withCtxThrow :: Ctx -> (Ptr Ctx -> IO a) -> IO a
withCtxThrow (Ctx ctxB) f =
allocaBytes sizeCtx $ \dstPtr ->
withByteStringPtr ctxB $ \srcPtr -> do
copyCtx (castPtr dstPtr) (castPtr srcPtr)
f (castPtr dstPtr)
withCtxNew :: (Ptr Ctx -> IO ()) -> IO Ctx
withCtxNew f = Ctx `fmap` create sizeCtx (f . castPtr)
withCtxNewThrow :: (Ptr Ctx -> IO a) -> IO a
withCtxNewThrow f = allocaBytes sizeCtx (f . castPtr)
foreign import ccall unsafe "sha1.h hs_cryptohash_sha1_init"
c_sha1_init :: Ptr Ctx -> IO ()
foreign import ccall unsafe "sha1.h hs_cryptohash_sha1_update"
c_sha1_update_unsafe :: Ptr Ctx -> Ptr Word8 -> CSize -> IO ()
foreign import ccall safe "sha1.h hs_cryptohash_sha1_update"
c_sha1_update_safe :: Ptr Ctx -> Ptr Word8 -> CSize -> IO ()
-- 'safe' call overhead is negligible for 8KiB and more
c_sha1_update :: Ptr Ctx -> Ptr Word8 -> CSize -> IO ()
c_sha1_update pctx pbuf sz
| sz < 8192 = c_sha1_update_unsafe pctx pbuf sz
| otherwise = c_sha1_update_safe pctx pbuf sz
foreign import ccall unsafe "sha1.h hs_cryptohash_sha1_finalize"
c_sha1_finalize :: Ptr Ctx -> Ptr Word8 -> IO ()
updateInternalIO :: Ptr Ctx -> ByteString -> IO ()
updateInternalIO ptr d =
unsafeUseAsCStringLen d (\(cs, len) -> c_sha1_update ptr (castPtr cs) (fromIntegral len))
finalizeInternalIO :: Ptr Ctx -> IO ByteString
finalizeInternalIO ptr = create digestSize (c_sha1_finalize ptr)
{-# NOINLINE init #-}
-- | create a new hash context
init :: Ctx
init = unsafeDoIO $ withCtxNew $ c_sha1_init
validCtx :: Ctx -> Bool
validCtx (Ctx b) = B.length b == sizeCtx
{-# NOINLINE update #-}
-- | update a context with a bytestring
update :: Ctx -> ByteString -> Ctx
update ctx d
| validCtx ctx = unsafeDoIO $ withCtxCopy ctx $ \ptr -> updateInternalIO ptr d
| otherwise = error "SHA1.update: invalid Ctx"
{-# NOINLINE updates #-}
-- | updates a context with multiple bytestrings
updates :: Ctx -> [ByteString] -> Ctx
updates ctx d
| validCtx ctx = unsafeDoIO $ withCtxCopy ctx $ \ptr -> mapM_ (updateInternalIO ptr) d
| otherwise = error "SHA1.updates: invalid Ctx"
{-# NOINLINE finalize #-}
-- | finalize the context into a digest bytestring (20 bytes)
finalize :: Ctx -> ByteString
finalize ctx
| validCtx ctx = unsafeDoIO $ withCtxThrow ctx finalizeInternalIO
| otherwise = error "SHA1.finalize: invalid Ctx"
{-# NOINLINE hash #-}
-- | hash a strict bytestring into a digest bytestring (20 bytes)
hash :: ByteString -> ByteString
hash d = unsafeDoIO $ withCtxNewThrow $ \ptr -> do
c_sha1_init ptr >> updateInternalIO ptr d >> finalizeInternalIO ptr
{-# NOINLINE start #-}
-- | hash a strict bytestring into a Ctx
start :: ByteString -> Ctx
start d = unsafeDoIO $ withCtxNew $ \ptr -> do
c_sha1_init ptr >> updateInternalIO ptr d
{-# NOINLINE hashlazy #-}
-- | hash a lazy bytestring into a digest bytestring (20 bytes)
hashlazy :: L.ByteString -> ByteString
hashlazy l = unsafeDoIO $ withCtxNewThrow $ \ptr -> do
c_sha1_init ptr >> mapM_ (updateInternalIO ptr) (L.toChunks l) >> finalizeInternalIO ptr
{-# NOINLINE startlazy #-}
-- | hash a lazy bytestring into a Ctx
startlazy :: L.ByteString -> Ctx
startlazy l = unsafeDoIO $ withCtxNew $ \ptr -> do
c_sha1_init ptr >> mapM_ (updateInternalIO ptr) (L.toChunks l)
{-# NOINLINE hmac #-}
-- | Compute 20-byte <https://tools.ietf.org/html/rfc2104 RFC2104>-compatible
-- HMAC-SHA1 digest for a strict bytestring message
--
-- @since 0.11.100.0
hmac :: ByteString -- ^ secret
-> ByteString -- ^ message
-> ByteString
hmac secret msg = hash $ B.append opad (hash $ B.append ipad msg)
where
opad = B.map (xor 0x5c) k'
ipad = B.map (xor 0x36) k'
k' = B.append kt pad
kt = if B.length secret > 64 then hash secret else secret
pad = B.replicate (64 - B.length kt) 0
{-# NOINLINE hmaclazy #-}
-- | Compute 20-byte <https://tools.ietf.org/html/rfc2104 RFC2104>-compatible
-- HMAC-SHA1 digest for a lazy bytestring message
--
-- @since 0.11.100.0
hmaclazy :: ByteString -- ^ secret
-> L.ByteString -- ^ message
-> ByteString
hmaclazy secret msg = hash $ B.append opad (hashlazy $ L.append ipad msg)
where
opad = B.map (xor 0x5c) k'
ipad = L.fromChunks [B.map (xor 0x36) k']
k' = B.append kt pad
kt = if B.length secret > 64 then hash secret else secret
pad = B.replicate (64 - B.length kt) 0
|
hvr/cryptohash-sha1
|
src/Crypto/Hash/SHA1.hs
|
bsd-3-clause
| 9,305 | 0 | 15 | 2,006 | 1,868 | 1,007 | 861 | 132 | 2 |
module Main where
import Database.Persist.Sqlite
main :: IO ()
main = print "hello world!"
|
rubenmoor/persistent-sqlite-ld-fail
|
app/Main.hs
|
bsd-3-clause
| 93 | 0 | 6 | 16 | 28 | 16 | 12 | 4 | 1 |
module Main (main) where
import Prelude
import Criterion.Main
import Data.ByteString (ByteString)
import Urbit.Atom (Atom)
import qualified Urbit.Atom.Fast as Fast
import qualified Urbit.Atom.Slow as Slow
-- Examples --------------------------------------------------------------------
a64, a32768 :: Atom
a64 = (2^64) - 1
a32768 = (2^32768)-1
bDog, bBigDog :: ByteString
bDog = "The quick brown fox jumps over the lazy dog."
bBigDog = mconcat (replicate 800 bDog)
-- Benchmarks ------------------------------------------------------------------
maiDump = Fast.atomBytes
maiLoad = Fast.bytesAtom
sloDump = Slow.atomBytes
sloLoad = Slow.bytesAtom
gmpDump = Fast.exportBytes
gmpLoad = Fast.importBytes
main = defaultMain
[ bgroup "lit-dump" [ bench "slo" $ whnf sloDump a64
, bench "gmp" $ whnf gmpDump a64
, bench "mai" $ whnf maiDump a64
]
, bgroup "big-dump" [ bench "gmp" $ whnf gmpDump a32768
, bench "mai" $ whnf maiDump a32768
]
, bgroup "lit-load" [ bench "slo" $ whnf sloLoad bDog
, bench "gmp" $ whnf gmpLoad bDog
, bench "mai" $ whnf maiLoad bDog
]
, bgroup "big-load" [ bench "gmp" $ whnf gmpLoad bBigDog
, bench "mai" $ whnf maiLoad bBigDog
]
]
|
jfranklin9000/urbit
|
pkg/hs/urbit-atom/bench/Main.hs
|
mit
| 1,412 | 0 | 10 | 427 | 356 | 192 | 164 | 30 | 1 |
data X = X (Int -> Int) deriving (Eq, Show)
data Y = Y Ordering deriving Eq
|
roberth/uu-helium
|
test/staticerrors/NonDerivableEq.hs
|
gpl-3.0
| 77 | 0 | 8 | 18 | 40 | 22 | 18 | 2 | 0 |
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
-- Improved Bash parser for Aura, built with Parsec.
{-
Copyright 2012, 2013, 2014 Colin Woodbury <[email protected]>
This file is part of Aura.
Aura is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Aura is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Aura. If not, see <http://www.gnu.org/licenses/>.
-}
module Bash.Parser ( parseBash ) where
import Text.ParserCombinators.Parsec
import Data.Maybe (catMaybes)
import Bash.Base
---
parseBash :: String -> String -> Either ParseError [Field]
parseBash p input = parse bashFile filename input
where filename = "(" ++ p ++ ")"
-- | A Bash file could have many fields, or none.
bashFile :: Parser [Field]
bashFile = spaces *> many field <* spaces
-- | There are many kinds of fields. Commands need to be parsed last.
field :: Parser Field
field = choice [ try comment, try variable, try function
, try ifBlock, try command ]
<* spaces <?> "valid field"
-- | A comment looks like: # blah blah blah
comment :: Parser Field
comment = Comment <$> comment' <?> "valid comment"
where comment' = spaces *> char '#' *> many (noneOf "\n")
-- | A command looks like: name -flags target
-- Arguments are optional.
-- In its current form, this parser gets too zealous, and happily parses
-- over other fields it shouldn't. Making it last in `field` avoids this.
-- The culprit is `option`, which returns [] as if it parsed no args,
-- even when its actually parsing a function or a variable.
-- Note: `args` is a bit of a hack.
command :: Parser Field
command = spaces *> (Command <$> many1 commandChar <*> option [] (try args))
where commandChar = alphaNum <|> oneOf "./"
args = char ' ' >> unwords <$> line >>= \ls ->
case parse (many1 single) "(command)" ls of
Left _ -> fail "Failed parsing strings in a command"
Right bs -> return $ concat bs
line = (:) <$> many (noneOf "\n\\") <*> next
next = ([] <$ char '\n') <|> (char '\\' *> spaces *> line)
-- | A function looks like: name() { ... \n} and is filled with fields.
function :: Parser Field
function = Function <$> name <*> body <?> "valid function definition"
where name = spaces *> many1 (noneOf " =(}\n")
body = string "() {" *> spaces *> manyTill field (char '}')
-- | A variable looks like: `name=string`, `name=(string string string)`
-- or even `name=`
variable :: Parser Field
variable = Variable <$> name <*> (blank <|> array <|> single) <?> "valid var definition"
where name = spaces *> many1 (alphaNum <|> char '_') <* char '='
blank = [] <$ space
array :: Parser [BashString]
array = concat . catMaybes <$> array' <?> "valid array"
where array' = char '(' *> spaces *> manyTill single' (char ')')
single' = choice [ Nothing <$ comment <* spaces
, Nothing <$ many1 (space <|> char '\\')
, Just <$> single <* many (space <|> char '\\') ]
-- | Strings can be surrounded by single quotes, double quotes, backticks,
-- or nothing.
single :: Parser [BashString]
single = (singleQuoted <|> doubleQuoted <|> backticked <|> try unQuoted)
<* spaces <?> "valid Bash string"
-- | Literal string. ${...} comes out as-is. No string extrapolation.
singleQuoted :: Parser [BashString]
singleQuoted = between (char '\'') (char '\'')
((\s -> [SingleQ s]) <$> many1 (noneOf ['\n','\'']))
<?> "single quoted string"
-- | Replaces ${...}. No string extrapolation.
doubleQuoted :: Parser [BashString]
doubleQuoted = between (char '"') (char '"')
((\s -> [DoubleQ s]) <$> many1 (noneOf ['\n','"']))
<?> "double quoted string"
-- | Contains commands.
backticked :: Parser [BashString]
backticked = between (char '`') (char '`') ((\c -> [Backtic c]) <$> command)
<?> "backticked string"
-- | Replaces ${...}. Strings can be extrapolated!
unQuoted :: Parser [BashString]
unQuoted = map NoQuote <$> extrapolated []
-- | Bash strings are extrapolated when they contain a brace pair
-- with two or more substrings separated by commas within them.
-- Example: sandwiches-are-{beautiful,fine}
-- Note that strings like: empty-{} or lamp-{shade}
-- will not be expanded and will retain their braces.
extrapolated :: [Char] -> Parser [String]
extrapolated stops = do
xs <- plain <|> bracePair
ys <- option [""] $ try (extrapolated stops)
return [ x ++ y | x <- xs, y <- ys ]
where plain = (: []) `fmap` many1 (noneOf $ " \n{}()" ++ stops)
bracePair :: Parser [String]
bracePair = between (char '{') (char '}') innards <?> "valid {...} string"
where innards = concatInnards <$> (extrapolated ",}" `sepBy` char ',')
concatInnards [] = ["{}"]
concatInnards [xs] = map (\s -> "{" ++ s ++ "}") xs
concatInnards xss = concat xss
------------------
-- `IF` STATEMENTS
------------------
ifBlock :: Parser Field
ifBlock = IfBlock <$> (realIfBlock <|> andStatement)
realIfBlock :: Parser BashIf
realIfBlock = realIfBlock' "if " fiElifElse
realIfBlock' :: String -> Parser sep -> Parser BashIf
realIfBlock' word sep =
spaces *> string word *> (If <$> ifCond <*> ifBody sep <*> rest)
where rest = fi <|> try elif <|> elys
-- Inefficient?
fiElifElse :: Parser (Maybe BashIf)
fiElifElse = choice $ map (try . lookAhead) [fi, elif, elys]
fi, elif, elys :: Parser (Maybe BashIf)
fi = Nothing <$ (string "fi" <* space)
elif = Just <$> realIfBlock' "elif " fiElifElse
elys = Just <$> (string "else" >> space >> Else `fmap` ifBody fi)
ifCond :: Parser Comparison
ifCond = comparison <* string "; then"
ifBody :: Parser sep -> Parser [Field]
ifBody sep = manyTill field sep
-- Note: Don't write Bash like this:
-- [ some comparison ] && normal bash code
andStatement :: Parser BashIf
andStatement = do
spaces
cond <- comparison <* string " && "
body <- field
return $ If cond [body] Nothing
comparison :: Parser Comparison
comparison = do
spaces >> leftBs >> spaces
left <- head `fmap` single
compOp <- comparisonOp
right <- head `fmap` single
rightBs
return (compOp left right) <?> "valid comparison"
where leftBs = skipMany1 $ char '['
rightBs = skipMany1 $ char ']'
comparisonOp :: Parser (BashString -> BashString -> Comparison)
comparisonOp = choice [eq, ne, gt, ge, lt, le]
where eq = CompEq <$ (try (string "= ") <|> string "== " <|> string "-eq ")
ne = CompNe <$ (string "!= " <|> string "-ne ")
gt = CompGt <$ (string "> " <|> string "-gt ")
ge = CompGe <$ string "-ge "
lt = CompLt <$ (string "< " <|> string "-lt ")
le = CompLe <$ string "-le "
|
joehillen/aura
|
src/Bash/Parser.hs
|
gpl-3.0
| 7,193 | 0 | 13 | 1,671 | 1,851 | 965 | 886 | 110 | 3 |
{-# LANGUAGE DeriveDataTypeable, CPP, BangPatterns, RankNTypes,
ForeignFunctionInterface, MagicHash, UnboxedTuples,
UnliftedFFITypes #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Unsafe #-}
#endif
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.ByteString.Short.Internal
-- Copyright : (c) Duncan Coutts 2012-2013
-- License : BSD-style
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : ghc only
--
-- Internal representation of ShortByteString
--
module Data.ByteString.Short.Internal (
-- * The @ShortByteString@ type and representation
ShortByteString(..),
-- * Conversions
toShort,
fromShort,
pack,
unpack,
-- * Other operations
empty, null, length, index, unsafeIndex,
-- * Low level operations
createFromPtr, copyToPtr
) where
import Data.ByteString.Internal (ByteString(..), inlinePerformIO)
import Data.Typeable (Typeable)
import Data.Data (Data(..), mkNoRepType)
import Data.Monoid (Monoid(..))
import Data.String (IsString(..))
import Control.DeepSeq (NFData(..))
import qualified Data.List as List (length)
#if MIN_VERSION_base(4,7,0)
import Foreign.C.Types (CSize(..), CInt(..))
#elif MIN_VERSION_base(4,4,0)
import Foreign.C.Types (CSize(..), CInt(..), CLong(..))
#else
import Foreign.C.Types (CSize, CInt, CLong)
#endif
import Foreign.Ptr
import Foreign.ForeignPtr (touchForeignPtr)
#if MIN_VERSION_base(4,5,0)
import Foreign.ForeignPtr.Unsafe (unsafeForeignPtrToPtr)
#else
import Foreign.ForeignPtr (unsafeForeignPtrToPtr)
#endif
#if MIN_VERSION_base(4,5,0)
import qualified GHC.Exts
#endif
import GHC.Exts ( Int(I#), Int#, Ptr(Ptr), Addr#, Char(C#)
, State#, RealWorld
, ByteArray#, MutableByteArray#
, newByteArray#
#if MIN_VERSION_base(4,6,0)
, newPinnedByteArray#
, byteArrayContents#
, unsafeCoerce#
#endif
#if MIN_VERSION_base(4,3,0)
, sizeofByteArray#
#endif
, indexWord8Array#, indexCharArray#
, writeWord8Array#, writeCharArray#
, unsafeFreezeByteArray# )
import GHC.IO
#if MIN_VERSION_base(4,6,0)
import GHC.ForeignPtr (ForeignPtr(ForeignPtr), ForeignPtrContents(PlainPtr))
#else
import GHC.ForeignPtr (mallocPlainForeignPtrBytes)
#endif
import GHC.ST (ST(ST), runST)
import GHC.Word
import Prelude ( Eq(..), Ord(..), Ordering(..), Read(..), Show(..)
, ($), error, (++)
, Bool(..), (&&), otherwise
, (+), (-), fromIntegral
, return )
-- | A compact representation of a 'Word8' vector.
--
-- It has a lower memory overhead than a 'ByteString' and and does not
-- contribute to heap fragmentation. It can be converted to or from a
-- 'ByteString' (at the cost of copying the string data). It supports very few
-- other operations.
--
-- It is suitable for use as an internal representation for code that needs
-- to keep many short strings in memory, but it /should not/ be used as an
-- interchange type. That is, it should not generally be used in public APIs.
-- The 'ByteString' type is usually more suitable for use in interfaces; it is
-- more flexible and it supports a wide range of operations.
--
data ShortByteString = SBS ByteArray#
#if !(MIN_VERSION_base(4,3,0))
Int -- ^ Prior to ghc-7.0.x, 'ByteArray#'s reported
-- their length rounded up to the nearest word.
-- This means we have to store the true length
-- separately, wasting a word.
#define LEN(x) (x)
#else
#define _len /* empty */
#define LEN(x) /* empty */
#endif
deriving Typeable
-- The ByteArray# representation is always word sized and aligned but with a
-- known byte length. Our representation choice for ShortByteString is to leave
-- the 0--3 trailing bytes undefined. This means we can use word-sized writes,
-- but we have to be careful with reads, see equateBytes and compareBytes below.
instance Eq ShortByteString where
(==) = equateBytes
instance Ord ShortByteString where
compare = compareBytes
instance Monoid ShortByteString where
mempty = empty
mappend = append
mconcat = concat
instance NFData ShortByteString
instance Show ShortByteString where
showsPrec p ps r = showsPrec p (unpackChars ps) r
instance Read ShortByteString where
readsPrec p str = [ (packChars x, y) | (x, y) <- readsPrec p str ]
instance IsString ShortByteString where
fromString = packChars
instance Data ShortByteString where
gfoldl f z txt = z packBytes `f` (unpackBytes txt)
toConstr _ = error "Data.ByteString.Short.ShortByteString.toConstr"
gunfold _ _ = error "Data.ByteString.Short.ShortByteString.gunfold"
#if MIN_VERSION_base(4,2,0)
dataTypeOf _ = mkNoRepType "Data.ByteString.Short.ShortByteString"
#else
dataTypeOf _ = mkNorepType "Data.ByteString.Short.ShortByteString"
#endif
------------------------------------------------------------------------
-- Simple operations
-- | /O(1)/. The empty 'ShortByteString'.
empty :: ShortByteString
empty = create 0 (\_ -> return ())
-- | /O(1)/ The length of a 'ShortByteString'.
length :: ShortByteString -> Int
#if MIN_VERSION_base(4,3,0)
length (SBS barr#) = I# (sizeofByteArray# barr#)
#else
length (SBS _ len) = len
#endif
-- | /O(1)/ Test whether a 'ShortByteString' is empty.
null :: ShortByteString -> Bool
null sbs = length sbs == 0
-- | /O(1)/ 'ShortByteString' index (subscript) operator, starting from 0.
index :: ShortByteString -> Int -> Word8
index sbs i
| i >= 0 && i < length sbs = unsafeIndex sbs i
| otherwise = indexError sbs i
unsafeIndex :: ShortByteString -> Int -> Word8
unsafeIndex sbs = indexWord8Array (asBA sbs)
indexError :: ShortByteString -> Int -> a
indexError sbs i =
error $ "Data.ByteString.Short.index: error in array index; " ++ show i
++ " not in range [0.." ++ show (length sbs) ++ ")"
------------------------------------------------------------------------
-- Internal utils
asBA :: ShortByteString -> BA
asBA (SBS ba# _len) = BA# ba#
create :: Int -> (forall s. MBA s -> ST s ()) -> ShortByteString
create len fill =
runST (do
mba <- newByteArray len
fill mba
BA# ba# <- unsafeFreezeByteArray mba
return (SBS ba# LEN(len)))
{-# INLINE create #-}
------------------------------------------------------------------------
-- Conversion to and from ByteString
-- | /O(n)/. Convert a 'ByteString' into a 'ShortByteString'.
--
-- This makes a copy, so does not retain the input string.
--
toShort :: ByteString -> ShortByteString
toShort !bs = unsafeDupablePerformIO (toShortIO bs)
toShortIO :: ByteString -> IO ShortByteString
toShortIO (PS fptr off len) = do
mba <- stToIO (newByteArray len)
let ptr = unsafeForeignPtrToPtr fptr
stToIO (copyAddrToByteArray (ptr `plusPtr` off) mba 0 len)
touchForeignPtr fptr
BA# ba# <- stToIO (unsafeFreezeByteArray mba)
return (SBS ba# LEN(len))
-- | /O(n)/. Convert a 'ShortByteString' into a 'ByteString'.
--
fromShort :: ShortByteString -> ByteString
fromShort !sbs = unsafeDupablePerformIO (fromShortIO sbs)
fromShortIO :: ShortByteString -> IO ByteString
fromShortIO sbs = do
#if MIN_VERSION_base(4,6,0)
let len = length sbs
mba@(MBA# mba#) <- stToIO (newPinnedByteArray len)
stToIO (copyByteArray (asBA sbs) 0 mba 0 len)
let fp = ForeignPtr (byteArrayContents# (unsafeCoerce# mba#))
(PlainPtr mba#)
return (PS fp 0 len)
#else
-- Before base 4.6 ForeignPtrContents is not exported from GHC.ForeignPtr
-- so we cannot get direct access to the mbarr#
let len = length sbs
fptr <- mallocPlainForeignPtrBytes len
let ptr = unsafeForeignPtrToPtr fptr
stToIO (copyByteArrayToAddr (asBA sbs) 0 ptr len)
touchForeignPtr fptr
return (PS fptr 0 len)
#endif
------------------------------------------------------------------------
-- Packing and unpacking from lists
-- | /O(n)/. Convert a list into a 'ShortByteString'
pack :: [Word8] -> ShortByteString
pack = packBytes
-- | /O(n)/. Convert a 'ShortByteString' into a list.
unpack :: ShortByteString -> [Word8]
unpack = unpackBytes
packChars :: [Char] -> ShortByteString
packChars cs = packLenChars (List.length cs) cs
packBytes :: [Word8] -> ShortByteString
packBytes cs = packLenBytes (List.length cs) cs
packLenChars :: Int -> [Char] -> ShortByteString
packLenChars len cs0 =
create len (\mba -> go mba 0 cs0)
where
go :: MBA s -> Int -> [Char] -> ST s ()
go !_ !_ [] = return ()
go !mba !i (c:cs) = do
writeCharArray mba i c
go mba (i+1) cs
packLenBytes :: Int -> [Word8] -> ShortByteString
packLenBytes len ws0 =
create len (\mba -> go mba 0 ws0)
where
go :: MBA s -> Int -> [Word8] -> ST s ()
go !_ !_ [] = return ()
go !mba !i (w:ws) = do
writeWord8Array mba i w
go mba (i+1) ws
-- Unpacking bytestrings into lists effeciently is a tradeoff: on the one hand
-- we would like to write a tight loop that just blats the list into memory, on
-- the other hand we want it to be unpacked lazily so we don't end up with a
-- massive list data structure in memory.
--
-- Our strategy is to combine both: we will unpack lazily in reasonable sized
-- chunks, where each chunk is unpacked strictly.
--
-- unpackChars does the lazy loop, while unpackAppendBytes and
-- unpackAppendChars do the chunks strictly.
unpackChars :: ShortByteString -> [Char]
unpackChars bs = unpackAppendCharsLazy bs []
unpackBytes :: ShortByteString -> [Word8]
unpackBytes bs = unpackAppendBytesLazy bs []
-- Why 100 bytes you ask? Because on a 64bit machine the list we allocate
-- takes just shy of 4k which seems like a reasonable amount.
-- (5 words per list element, 8 bytes per word, 100 elements = 4000 bytes)
unpackAppendCharsLazy :: ShortByteString -> [Char] -> [Char]
unpackAppendCharsLazy sbs cs0 =
go 0 (length sbs) cs0
where
sz = 100
go off len cs
| len <= sz = unpackAppendCharsStrict sbs off len cs
| otherwise = unpackAppendCharsStrict sbs off sz remainder
where remainder = go (off+sz) (len-sz) cs
unpackAppendBytesLazy :: ShortByteString -> [Word8] -> [Word8]
unpackAppendBytesLazy sbs ws0 =
go 0 (length sbs) ws0
where
sz = 100
go off len ws
| len <= sz = unpackAppendBytesStrict sbs off len ws
| otherwise = unpackAppendBytesStrict sbs off sz remainder
where remainder = go (off+sz) (len-sz) ws
-- For these unpack functions, since we're unpacking the whole list strictly we
-- build up the result list in an accumulator. This means we have to build up
-- the list starting at the end. So our traversal starts at the end of the
-- buffer and loops down until we hit the sentinal:
unpackAppendCharsStrict :: ShortByteString -> Int -> Int -> [Char] -> [Char]
unpackAppendCharsStrict !sbs off len cs =
go (off-1) (off-1 + len) cs
where
go !sentinal !i !acc
| i == sentinal = acc
| otherwise = let !c = indexCharArray (asBA sbs) i
in go sentinal (i-1) (c:acc)
unpackAppendBytesStrict :: ShortByteString -> Int -> Int -> [Word8] -> [Word8]
unpackAppendBytesStrict !sbs off len ws =
go (off-1) (off-1 + len) ws
where
go !sentinal !i !acc
| i == sentinal = acc
| otherwise = let !w = indexWord8Array (asBA sbs) i
in go sentinal (i-1) (w:acc)
------------------------------------------------------------------------
-- Eq and Ord implementations
equateBytes :: ShortByteString -> ShortByteString -> Bool
equateBytes sbs1 sbs2 =
let !len1 = length sbs1
!len2 = length sbs2
in len1 == len2
&& 0 == inlinePerformIO (memcmp_ByteArray (asBA sbs1) (asBA sbs2) len1)
compareBytes :: ShortByteString -> ShortByteString -> Ordering
compareBytes sbs1 sbs2 =
let !len1 = length sbs1
!len2 = length sbs2
!len = min len1 len2
in case inlinePerformIO (memcmp_ByteArray (asBA sbs1) (asBA sbs2) len) of
i | i < 0 -> LT
| i > 0 -> GT
| len2 > len1 -> LT
| len2 < len1 -> GT
| otherwise -> EQ
------------------------------------------------------------------------
-- Appending and concatenation
append :: ShortByteString -> ShortByteString -> ShortByteString
append src1 src2 =
let !len1 = length src1
!len2 = length src2
in create (len1 + len2) $ \dst -> do
copyByteArray (asBA src1) 0 dst 0 len1
copyByteArray (asBA src2) 0 dst len1 len2
concat :: [ShortByteString] -> ShortByteString
concat sbss =
create (totalLen 0 sbss) (\dst -> copy dst 0 sbss)
where
totalLen !acc [] = acc
totalLen !acc (sbs: sbss) = totalLen (acc + length sbs) sbss
copy :: MBA s -> Int -> [ShortByteString] -> ST s ()
copy !_ !_ [] = return ()
copy !dst !off (src : sbss) = do
let !len = length src
copyByteArray (asBA src) 0 dst off len
copy dst (off + len) sbss
------------------------------------------------------------------------
-- Exported low level operations
copyToPtr :: ShortByteString -- ^ source data
-> Int -- ^ offset into source
-> Ptr a -- ^ destination
-> Int -- ^ number of bytes to copy
-> IO ()
copyToPtr src off dst len =
stToIO $
copyByteArrayToAddr (asBA src) off dst len
createFromPtr :: Ptr a -- ^ source data
-> Int -- ^ number of bytes to copy
-> IO ShortByteString
createFromPtr !ptr len =
stToIO $ do
mba <- newByteArray len
copyAddrToByteArray ptr mba 0 len
BA# ba# <- unsafeFreezeByteArray mba
return (SBS ba# LEN(len))
------------------------------------------------------------------------
-- Primop wrappers
data BA = BA# ByteArray#
data MBA s = MBA# (MutableByteArray# s)
indexCharArray :: BA -> Int -> Char
indexCharArray (BA# ba#) (I# i#) = C# (indexCharArray# ba# i#)
indexWord8Array :: BA -> Int -> Word8
indexWord8Array (BA# ba#) (I# i#) = W8# (indexWord8Array# ba# i#)
newByteArray :: Int -> ST s (MBA s)
newByteArray (I# len#) =
ST $ \s -> case newByteArray# len# s of
(# s, mba# #) -> (# s, MBA# mba# #)
#if MIN_VERSION_base(4,6,0)
newPinnedByteArray :: Int -> ST s (MBA s)
newPinnedByteArray (I# len#) =
ST $ \s -> case newPinnedByteArray# len# s of
(# s, mba# #) -> (# s, MBA# mba# #)
#endif
unsafeFreezeByteArray :: MBA s -> ST s BA
unsafeFreezeByteArray (MBA# mba#) =
ST $ \s -> case unsafeFreezeByteArray# mba# s of
(# s, ba# #) -> (# s, BA# ba# #)
writeCharArray :: MBA s -> Int -> Char -> ST s ()
writeCharArray (MBA# mba#) (I# i#) (C# c#) =
ST $ \s -> case writeCharArray# mba# i# c# s of
s -> (# s, () #)
writeWord8Array :: MBA s -> Int -> Word8 -> ST s ()
writeWord8Array (MBA# mba#) (I# i#) (W8# w#) =
ST $ \s -> case writeWord8Array# mba# i# w# s of
s -> (# s, () #)
copyAddrToByteArray :: Ptr a -> MBA RealWorld -> Int -> Int -> ST RealWorld ()
copyAddrToByteArray (Ptr src#) (MBA# dst#) (I# dst_off#) (I# len#) =
ST $ \s -> case copyAddrToByteArray# src# dst# dst_off# len# s of
s -> (# s, () #)
copyByteArrayToAddr :: BA -> Int -> Ptr a -> Int -> ST RealWorld ()
copyByteArrayToAddr (BA# src#) (I# src_off#) (Ptr dst#) (I# len#) =
ST $ \s -> case copyByteArrayToAddr# src# src_off# dst# len# s of
s -> (# s, () #)
copyByteArray :: BA -> Int -> MBA s -> Int -> Int -> ST s ()
copyByteArray (BA# src#) (I# src_off#) (MBA# dst#) (I# dst_off#) (I# len#) =
ST $ \s -> case copyByteArray# src# src_off# dst# dst_off# len# s of
s -> (# s, () #)
------------------------------------------------------------------------
-- FFI imports
memcmp_ByteArray :: BA -> BA -> Int -> IO CInt
memcmp_ByteArray (BA# ba1#) (BA# ba2#) len =
c_memcmp_ByteArray ba1# ba2# (fromIntegral len)
foreign import ccall unsafe "string.h memcmp"
c_memcmp_ByteArray :: ByteArray# -> ByteArray# -> CSize -> IO CInt
------------------------------------------------------------------------
-- Primop replacements
copyAddrToByteArray# :: Addr#
-> MutableByteArray# RealWorld -> Int#
-> Int#
-> State# RealWorld -> State# RealWorld
copyByteArrayToAddr# :: ByteArray# -> Int#
-> Addr#
-> Int#
-> State# RealWorld -> State# RealWorld
copyByteArray# :: ByteArray# -> Int#
-> MutableByteArray# s -> Int#
-> Int#
-> State# s -> State# s
#if MIN_VERSION_base(4,7,0)
-- These exist as real primops in ghc-7.8, and for before that we use
-- FFI to C memcpy.
copyAddrToByteArray# = GHC.Exts.copyAddrToByteArray#
copyByteArrayToAddr# = GHC.Exts.copyByteArrayToAddr#
#else
copyAddrToByteArray# src dst dst_off len s =
unIO_ (memcpy_AddrToByteArray dst (clong dst_off) src 0 (csize len)) s
copyAddrToByteArray0 :: Addr# -> MutableByteArray# s -> Int#
-> State# RealWorld -> State# RealWorld
copyAddrToByteArray0 src dst len s =
unIO_ (memcpy_AddrToByteArray0 dst src (csize len)) s
{-# INLINE [0] copyAddrToByteArray# #-}
{-# RULES "copyAddrToByteArray# dst_off=0"
forall src dst len s.
copyAddrToByteArray# src dst 0# len s
= copyAddrToByteArray0 src dst len s #-}
foreign import ccall unsafe "fpstring.h fps_memcpy_offsets"
memcpy_AddrToByteArray :: MutableByteArray# s -> CLong -> Addr# -> CLong -> CSize -> IO ()
foreign import ccall unsafe "string.h memcpy"
memcpy_AddrToByteArray0 :: MutableByteArray# s -> Addr# -> CSize -> IO ()
copyByteArrayToAddr# src src_off dst len s =
unIO_ (memcpy_ByteArrayToAddr dst 0 src (clong src_off) (csize len)) s
copyByteArrayToAddr0 :: ByteArray# -> Addr# -> Int#
-> State# RealWorld -> State# RealWorld
copyByteArrayToAddr0 src dst len s =
unIO_ (memcpy_ByteArrayToAddr0 dst src (csize len)) s
{-# INLINE [0] copyByteArrayToAddr# #-}
{-# RULES "copyByteArrayToAddr# src_off=0"
forall src dst len s.
copyByteArrayToAddr# src 0# dst len s
= copyByteArrayToAddr0 src dst len s #-}
foreign import ccall unsafe "fpstring.h fps_memcpy_offsets"
memcpy_ByteArrayToAddr :: Addr# -> CLong -> ByteArray# -> CLong -> CSize -> IO ()
foreign import ccall unsafe "string.h memcpy"
memcpy_ByteArrayToAddr0 :: Addr# -> ByteArray# -> CSize -> IO ()
unIO_ :: IO () -> State# RealWorld -> State# RealWorld
unIO_ io s = case unIO io s of (# s, _ #) -> s
clong :: Int# -> CLong
clong i# = fromIntegral (I# i#)
csize :: Int# -> CSize
csize i# = fromIntegral (I# i#)
#endif
#if MIN_VERSION_base(4,5,0)
copyByteArray# = GHC.Exts.copyByteArray#
#else
copyByteArray# src src_off dst dst_off len s =
unST_ (unsafeIOToST
(memcpy_ByteArray dst (clong dst_off) src (clong src_off) (csize len))) s
where
unST (ST st) = st
unST_ st s = case unST st s of (# s, _ #) -> s
foreign import ccall unsafe "fpstring.h fps_memcpy_offsets"
memcpy_ByteArray :: MutableByteArray# s -> CLong
-> ByteArray# -> CLong -> CSize -> IO ()
#endif
|
jwiegley/ghc-release
|
libraries/bytestring/Data/ByteString/Short/Internal.hs
|
gpl-3.0
| 19,603 | 0 | 14 | 4,689 | 4,230 | 2,214 | 2,016 | 314 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.TerminateInstances
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Shuts down one or more instances. This operation is idempotent; if you
-- terminate an instance more than once, each call succeeds.
--
-- Terminated instances remain visible after termination (for approximately
-- one hour).
--
-- By default, Amazon EC2 deletes all EBS volumes that were attached when
-- the instance launched. Volumes attached after instance launch continue
-- running.
--
-- You can stop, start, and terminate EBS-backed instances. You can only
-- terminate instance store-backed instances. What happens to an instance
-- differs if you stop it or terminate it. For example, when you stop an
-- instance, the root device and any other devices attached to the instance
-- persist. When you terminate an instance, any attached EBS volumes with
-- the 'DeleteOnTermination' block device mapping parameter set to 'true'
-- are automatically deleted. For more information about the differences
-- between stopping and terminating instances, see
-- <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-lifecycle.html Instance Lifecycle>
-- in the /Amazon Elastic Compute Cloud User Guide/.
--
-- For more information about troubleshooting, see
-- <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/TroubleshootingInstancesShuttingDown.html Troubleshooting Terminating Your Instance>
-- in the /Amazon Elastic Compute Cloud User Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-TerminateInstances.html AWS API Reference> for TerminateInstances.
module Network.AWS.EC2.TerminateInstances
(
-- * Creating a Request
terminateInstances
, TerminateInstances
-- * Request Lenses
, tiDryRun
, tiInstanceIds
-- * Destructuring the Response
, terminateInstancesResponse
, TerminateInstancesResponse
-- * Response Lenses
, tirsTerminatingInstances
, tirsResponseStatus
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'terminateInstances' smart constructor.
data TerminateInstances = TerminateInstances'
{ _tiDryRun :: !(Maybe Bool)
, _tiInstanceIds :: ![Text]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'TerminateInstances' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiDryRun'
--
-- * 'tiInstanceIds'
terminateInstances
:: TerminateInstances
terminateInstances =
TerminateInstances'
{ _tiDryRun = Nothing
, _tiInstanceIds = mempty
}
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
tiDryRun :: Lens' TerminateInstances (Maybe Bool)
tiDryRun = lens _tiDryRun (\ s a -> s{_tiDryRun = a});
-- | One or more instance IDs.
tiInstanceIds :: Lens' TerminateInstances [Text]
tiInstanceIds = lens _tiInstanceIds (\ s a -> s{_tiInstanceIds = a}) . _Coerce;
instance AWSRequest TerminateInstances where
type Rs TerminateInstances =
TerminateInstancesResponse
request = postQuery eC2
response
= receiveXML
(\ s h x ->
TerminateInstancesResponse' <$>
(x .@? "instancesSet" .!@ mempty >>=
may (parseXMLList "item"))
<*> (pure (fromEnum s)))
instance ToHeaders TerminateInstances where
toHeaders = const mempty
instance ToPath TerminateInstances where
toPath = const "/"
instance ToQuery TerminateInstances where
toQuery TerminateInstances'{..}
= mconcat
["Action" =: ("TerminateInstances" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
"DryRun" =: _tiDryRun,
toQueryList "InstanceId" _tiInstanceIds]
-- | /See:/ 'terminateInstancesResponse' smart constructor.
data TerminateInstancesResponse = TerminateInstancesResponse'
{ _tirsTerminatingInstances :: !(Maybe [InstanceStateChange])
, _tirsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'TerminateInstancesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tirsTerminatingInstances'
--
-- * 'tirsResponseStatus'
terminateInstancesResponse
:: Int -- ^ 'tirsResponseStatus'
-> TerminateInstancesResponse
terminateInstancesResponse pResponseStatus_ =
TerminateInstancesResponse'
{ _tirsTerminatingInstances = Nothing
, _tirsResponseStatus = pResponseStatus_
}
-- | Information about one or more terminated instances.
tirsTerminatingInstances :: Lens' TerminateInstancesResponse [InstanceStateChange]
tirsTerminatingInstances = lens _tirsTerminatingInstances (\ s a -> s{_tirsTerminatingInstances = a}) . _Default . _Coerce;
-- | The response status code.
tirsResponseStatus :: Lens' TerminateInstancesResponse Int
tirsResponseStatus = lens _tirsResponseStatus (\ s a -> s{_tirsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/TerminateInstances.hs
|
mpl-2.0
| 6,017 | 0 | 15 | 1,158 | 678 | 413 | 265 | 82 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
-- Module : Khan.CLI.Group
-- Copyright : (c) 2013 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Khan.CLI.SSH (commands) where
import Data.List (isPrefixOf)
import qualified Filesystem.Path.CurrentOS as Path
import Khan.Internal
import qualified Khan.Model.Host as Host
import qualified Khan.Model.Key as Key
import Khan.Model.SSH (Mode(..))
import qualified Khan.Model.SSH as SSH
import Khan.Prelude
import Network.AWS.EC2
import System.Environment
data SSH = SSH
{ sshRKeys :: !RKeysBucket
, sshRole :: !Role
, sshEnv :: !Env
, sshKey :: Maybe FilePath
, sshUser :: !Text
, sshArgs :: [String]
} deriving (Show)
sshParser :: EnvMap -> Parser SSH
sshParser env = SSH
<$> rKeysOption env
<*> roleOption
<*> envOption env
<*> keyOption
<*> userOption
<*> argsOption str mempty "Pass through arugments to ssh."
instance Options SSH where
validate SSH{..} =
check sshEnv "--env must be specified."
instance Naming SSH where
names SSH{..} = unversioned sshRole sshEnv
data SCP = SCP
{ scpRKeys :: !RKeysBucket
, scpRole :: !Role
, scpEnv :: !Env
, scpKey :: Maybe FilePath
, scpUser :: !Text
, scpMode :: !Mode
, scpArgs :: [String]
} deriving (Show)
scpParser :: EnvMap -> (FilePath -> FilePath -> Mode) -> Parser SCP
scpParser env mode = SCP
<$> rKeysOption env
<*> roleOption
<*> envOption env
<*> keyOption
<*> userOption
<*> modeParser
<*> argsOption str mempty "Pass through arugments to scp."
where
modeParser = mode
<$> pathOption "source" (short 's' <> action "file")
"Source path."
<*> pathOption "destination" (short 'd' <> action "file")
"Destination path."
instance Options SCP where
validate SCP{..} =
check scpEnv "--env must be specified."
instance Naming SCP where
names SCP{..} = unversioned scpRole scpEnv
commands :: EnvMap -> Mod CommandFields Command
commands env = mconcat
[ command "ssh" ssh (sshParser env)
"Display a multiple choice list of matching hosts to SSH into."
, group "scp" "Manage Artifacts over SCP." $ mconcat
[ command "upload" scp (scpParser env Upload)
"Upload."
, command "download" scp (scpParser env Download)
"Download."
]
]
ssh :: Common -> SSH -> AWS ()
ssh Common{..} s@SSH{..} = do
key <- maybe (Key.path sshRKeys s cLKeys) return sshKey
Host.choose cVPN sshEnv sshRole $ \x ->
SSH.execSSH (Host.address x) sshUser key sshArgs
scp :: Common -> SCP -> AWS ()
scp Common{..} s@SCP{..} = do
verify
key <- maybe (Key.path scpRKeys s cLKeys) return scpKey
Host.findAll cVPN scpEnv scpRole >>= go key
where
go _ [] = log_ "No hosts found."
go k xs = mapM (async . exec k) xs >>= mapM_ wait_
exec k x = SSH.execSCP scpMode (Host.address x) scpUser k scpArgs
-- FIXME: ghetto check to ensure bash expansion hasn't occured accidently
-- for a remote home directory
verify = do
h <- fromMaybe "" <$> liftIO (lookupEnv "HOME")
when (h `isPrefixOf` path scpMode) $
throwAWS "Unexpected bash expansion of $HOME: {}" [show scpMode]
path (Upload _ p) = Path.encodeString p
path (Download p _) = Path.encodeString p
|
zinfra/khan
|
khan-cli/src/Khan/CLI/SSH.hs
|
mpl-2.0
| 4,006 | 0 | 13 | 1,129 | 1,019 | 530 | 489 | 110 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving, CPP, TypeFamilies, FlexibleInstances, FlexibleContexts, DeriveDataTypeable, TypeOperators #-}
import Development.Shake
import qualified Development.Shake.Core as Core
import Control.DeepSeq
import Control.Monad.IO.Class
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.Typeable
#include "MyOracle.inc"
instance Binary (Question MyOracle) where
get = return (MOQ ())
put (MOQ ()) = return ()
-- Make the answer shorter (tests that we check that all input is consumed by the deserializer)
instance Binary (Answer MyOracle) where
get = fmap (MOA . fromIntegral) getWord16le
put (MOA i) = putWord16le (fromIntegral i)
main :: IO ()
main = (Core.shake :: Shake (Question MyOracle :+: CanonicalFilePath) () -> IO ()) $ do
installOracle (MO 1)
"examplefile" *> \x -> do
MOA 1 <- query $ MOQ ()
liftIO $ writeFile "examplefile" "OK2"
want ["examplefile"]
|
beni55/openshake
|
tests/deserialization-changes/Shakefile-2.hs
|
bsd-3-clause
| 978 | 0 | 15 | 186 | 274 | 142 | 132 | 22 | 1 |
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE FlexibleContexts #-}
module Propellor.Property where
import System.Directory
import System.FilePath
import Control.Monad
import Data.Monoid
import Control.Monad.IfElse
import "mtl" Control.Monad.RWS.Strict
import Propellor.Types
import Propellor.Info
import Utility.Monad
-- | Constructs a Property, from a description and an action to run to
-- ensure the Property is met.
property :: Desc -> Propellor Result -> Property NoInfo
property d s = simpleProperty d s mempty
-- | Makes a perhaps non-idempotent Property be idempotent by using a flag
-- file to indicate whether it has run before.
-- Use with caution.
flagFile :: Property i -> FilePath -> Property i
flagFile p = flagFile' p . return
flagFile' :: Property i -> IO FilePath -> Property i
flagFile' p getflagfile = adjustPropertySatisfy p $ \satisfy -> do
flagfile <- liftIO getflagfile
go satisfy flagfile =<< liftIO (doesFileExist flagfile)
where
go _ _ True = return NoChange
go satisfy flagfile False = do
r <- satisfy
when (r == MadeChange) $ liftIO $
unlessM (doesFileExist flagfile) $ do
createDirectoryIfMissing True (takeDirectory flagfile)
writeFile flagfile ""
return r
-- | Whenever a change has to be made for a Property, causes a hook
-- Property to also be run, but not otherwise.
onChange
:: (Combines (Property x) (Property y))
=> Property x
-> Property y
-> CombinedType (Property x) (Property y)
onChange = combineWith $ \p hook -> do
r <- p
case r of
MadeChange -> do
r' <- hook
return $ r <> r'
_ -> return r
-- | Alias for @flip describe@
(==>) :: IsProp (Property i) => Desc -> Property i -> Property i
(==>) = flip describe
infixl 1 ==>
-- | Makes a Property only need to do anything when a test succeeds.
check :: IO Bool -> Property i -> Property i
check c p = adjustPropertySatisfy p $ \satisfy -> ifM (liftIO c)
( satisfy
, return NoChange
)
-- | Tries the first property, but if it fails to work, instead uses
-- the second.
fallback :: (Combines (Property p1) (Property p2)) => Property p1 -> Property p2 -> Property (CInfo p1 p2)
fallback = combineWith $ \a1 a2 -> do
r <- a1
if r == FailedChange
then a2
else return r
-- | Marks a Property as trivial. It can only return FailedChange or
-- NoChange.
--
-- Useful when it's just as expensive to check if a change needs
-- to be made as it is to just idempotently assure the property is
-- satisfied. For example, chmodding a file.
trivial :: Property i -> Property i
trivial p = adjustPropertySatisfy p $ \satisfy -> do
r <- satisfy
if r == MadeChange
then return NoChange
else return r
-- | Makes a property that is satisfied differently depending on the host's
-- operating system.
--
-- Note that the operating system may not be declared for some hosts.
withOS :: Desc -> (Maybe System -> Propellor Result) -> Property NoInfo
withOS desc a = property desc $ a =<< getOS
-- | Undoes the effect of a property.
revert :: RevertableProperty -> RevertableProperty
revert (RevertableProperty p1 p2) = RevertableProperty p2 p1
makeChange :: IO () -> Propellor Result
makeChange a = liftIO a >> return MadeChange
noChange :: Propellor Result
noChange = return NoChange
doNothing :: Property NoInfo
doNothing = property "noop property" noChange
-- | Registers an action that should be run at the very end,
endAction :: Desc -> (Result -> Propellor Result) -> Propellor ()
endAction desc a = tell [EndAction desc a]
|
avengerpenguin/propellor
|
src/Propellor/Property.hs
|
bsd-2-clause
| 3,484 | 18 | 15 | 678 | 931 | 469 | 462 | 71 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeOperators #-}
module Kind00003 where
{-
-- FIXME: Waiting on https://github.com/haskell-suite/haskell-src-exts/issues/125
data HList :: [*] -> * where
HNil :: HList '[]
HCons :: a -> HList t -> HList (a ': t)
-- FIXME: Waiting on https://github.com/haskell-suite/haskell-src-exts/issues/124
data Tuple :: (*,*) -> * where
Tuple :: a -> b -> Tuple '(a,b)
-}
class C1 (a :: Bool {- 1 -}) where
c :: proxy a -> Int
class C2 (a :: [ * ])
class C3 (a :: [(Int, Double)])
class C4 (a :: ( * ))
data X (a :: [*])
x1 = undefined :: X '[Int]
x2 = undefined :: X '[Int, Double]
data Y (a :: (*, Bool))
y1 = undefined :: Y '(Double, True)
y2 = undefined :: Y '(Double, 'False {-comment-})
|
charleso/intellij-haskforce
|
tests/gold/parser/Kind00003.hs
|
apache-2.0
| 800 | 0 | 9 | 165 | 196 | 118 | 78 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
---------------------------------------------------------
--
-- Module : Yesod.Sitemap
-- Copyright : Michael Snoyman
-- License : BSD3
--
-- Maintainer : Michael Snoyman <[email protected]>
-- Stability : Stable
-- Portability : portable
--
-- Generating Google sitemap files.
--
---------------------------------------------------------
-- | Generates XML sitemap files.
--
-- See <http://www.sitemaps.org/>.
module Yesod.Sitemap
( sitemap
, sitemapList
, sitemapConduit
, robots
, SitemapUrl (..)
, SitemapChangeFreq (..)
) where
import Yesod.Core
import Data.Time (UTCTime)
import Text.XML.Stream.Render (renderBuilder)
import Data.Text (Text, pack)
import Data.XML.Types
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Default (def)
import qualified Data.Text as T
data SitemapChangeFreq = Always
| Hourly
| Daily
| Weekly
| Monthly
| Yearly
| Never
showFreq :: SitemapChangeFreq -> Text
showFreq Always = "always"
showFreq Hourly = "hourly"
showFreq Daily = "daily"
showFreq Weekly = "weekly"
showFreq Monthly = "monthly"
showFreq Yearly = "yearly"
showFreq Never = "never"
data SitemapUrl url = SitemapUrl
{ sitemapLoc :: url
, sitemapLastMod :: Maybe UTCTime
, sitemapChangeFreq :: Maybe SitemapChangeFreq
, sitemapPriority :: Maybe Double
}
-- | A basic robots file which just lists the "Sitemap: " line.
robots :: MonadHandler m
=> Route (HandlerSite m) -- ^ sitemap url
-> m Text
robots smurl = do
ur <- getUrlRender
return $ T.unlines
[ "Sitemap: " `T.append` ur smurl
, "User-agent: *"
]
-- | Serve a stream of @SitemapUrl@s as a sitemap.
--
-- Since 1.2.0
sitemap :: ConduitT () (SitemapUrl (Route site)) (HandlerFor site) ()
-> HandlerFor site TypedContent
sitemap urls = do
render <- getUrlRender
respondSource typeXml $ do
yield Flush
urls .| sitemapConduit render .| renderBuilder def .| CL.map Chunk
-- | Convenience wrapper for @sitemap@ for the case when the input is an
-- in-memory list.
--
-- Since 1.2.0
sitemapList :: [SitemapUrl (Route site)] -> HandlerFor site TypedContent
sitemapList = sitemap . mapM_ yield
-- | Convert a stream of @SitemapUrl@s to XML @Event@s using the given URL
-- renderer.
--
-- This function is fully general for usage outside of Yesod.
--
-- Since 1.2.0
sitemapConduit :: Monad m
=> (a -> Text)
-> ConduitT (SitemapUrl a) Event m ()
sitemapConduit render = do
yield EventBeginDocument
element "urlset" [] $ awaitForever goUrl
yield EventEndDocument
where
namespace = "http://www.sitemaps.org/schemas/sitemap/0.9"
element name' attrs inside = do
yield $ EventBeginElement name attrs
() <- inside
yield $ EventEndElement name
where
name = Name name' (Just namespace) Nothing
goUrl SitemapUrl {..} = element "url" [] $ do
element "loc" [] $ yield $ EventContent $ ContentText $ render sitemapLoc
case sitemapLastMod of
Nothing -> return ()
Just lm -> element "lastmod" [] $ yield $ EventContent $ ContentText $ formatW3 lm
case sitemapChangeFreq of
Nothing -> return ()
Just scf -> element "changefreq" [] $ yield $ EventContent $ ContentText $ showFreq scf
case sitemapPriority of
Nothing -> return ()
Just p -> element "priority" [] $ yield $ EventContent $ ContentText $ pack $ show p
|
s9gf4ult/yesod
|
yesod-sitemap/Yesod/Sitemap.hs
|
mit
| 3,741 | 0 | 19 | 998 | 854 | 448 | 406 | 79 | 4 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>AMF Support</title>
<maps>
<homeID>amf</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/amf/src/main/javahelp/help_ko_KR/helpset_ko_KR.hs
|
apache-2.0
| 956 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
module InfixIn3 where
data Inf a b = Nil | a :* b
data Try = C1 Try2
data Try2 = C2 [Int]
data T = forall a. MkT [a]
f :: Inf [Int] (Either Int Int) -> [Int]
f Nil = []
f (a :* b) = a
h :: Inf [Int] (Either Int Int) -> [Int]
h Nil = []
h (a@[] :* b) = a
h (a@(x:xs) :* b) = a
j :: Int -> Try -> [Int]
j v x@(C1 b_1@(C2 b_2)) = []
j v x@(C1 b_1) = []
j v x = []
p :: [a] -> a
p (x:xs) = x
|
SAdams601/HaRe
|
old/testing/subIntroPattern/InfixIn3.hs
|
bsd-3-clause
| 398 | 0 | 11 | 118 | 300 | 165 | 135 | -1 | -1 |
{-# LANGUAGE TypeFamilies, PolyKinds #-}
-- See #10982
module UnusedTyVarWarnings where
type family C a b where
C a b = a -- should warn
type family C2 a b
type instance C2 a b = a -- should warn
type family D a b where
D a _b = a -- should not warn
type family D2 a b
type instance D2 a _b = a -- should not warn
type family E a b where
E a _ = a -- should not warn
type family E2 a b
type instance E2 a _ = a -- should not warn
type family X a b where
X a a = Int -- a is considered used, do not warn
X a Int = Bool -- here a is unused
type family Y a b c where
Y a b b = a -- b is used, do no warn
data family I a b c
data instance I a b c = IDC1 a | IDC2 c -- should warn
data family J a b
data instance J a _b = JDC a -- should not warn
data family K a b
data instance K a _ = KDC a -- should not warn
|
sdiehl/ghc
|
testsuite/tests/indexed-types/should_compile/UnusedTyVarWarnings.hs
|
bsd-3-clause
| 930 | 0 | 6 | 329 | 252 | 159 | 93 | 25 | 0 |
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies, MultiParamTypeClasses, OverloadedStrings #-}
module YesodCoreTest.Redirect (specs, Widget) where
import YesodCoreTest.YesodTest
import Yesod.Core.Handler (redirectWith, setEtag)
import qualified Network.HTTP.Types as H
data Y = Y
mkYesod "Y" [parseRoutes|
/ RootR GET POST
/r301 R301 GET
/r303 R303 GET
/r307 R307 GET
/rregular RRegular GET
/etag EtagR GET
|]
instance Yesod Y where approot = ApprootStatic "http://test"
app :: Session () -> IO ()
app = yesod Y
getRootR :: Handler ()
getRootR = return ()
postRootR :: Handler ()
postRootR = return ()
getR301, getR303, getR307, getRRegular, getEtagR :: Handler ()
getR301 = redirectWith H.status301 RootR
getR303 = redirectWith H.status303 RootR
getR307 = redirectWith H.status307 RootR
getRRegular = redirect RootR
getEtagR = setEtag "hello world"
specs :: Spec
specs = describe "Redirect" $ do
it "no redirect" $ app $ do
res <- request defaultRequest { pathInfo = [], requestMethod = "POST" }
assertStatus 200 res
assertBodyContains "" res
it "301 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r301"] }
assertStatus 301 res
assertBodyContains "" res
it "303 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r303"] }
assertStatus 303 res
assertBodyContains "" res
it "307 redirect" $ app $ do
res <- request defaultRequest { pathInfo = ["r307"] }
assertStatus 307 res
assertBodyContains "" res
it "303 redirect for regular, HTTP 1.1" $ app $ do
res <- request defaultRequest {
pathInfo = ["rregular"],
httpVersion = H.http11
}
assertStatus 303 res
assertBodyContains "" res
it "302 redirect for regular, HTTP 1.0" $ app $ do
res <- request defaultRequest {
pathInfo = ["rregular"]
, httpVersion = H.http10
}
assertStatus 302 res
assertBodyContains "" res
describe "etag" $ do
it "no if-none-match" $ app $ do
res <- request defaultRequest { pathInfo = ["etag"] }
assertStatus 200 res
assertHeader "etag" "\"hello world\"" res
it "single, unquoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "hello world")]
}
assertStatus 304 res
it "different if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "hello world!")]
}
assertStatus 200 res
assertHeader "etag" "\"hello world\"" res
it "single, quoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"hello world\"")]
}
assertStatus 304 res
it "multiple quoted if-none-match" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"foo\", \"hello world\"")]
}
assertStatus 304 res
it "ignore weak" $ app $ do
res <- request defaultRequest
{ pathInfo = ["etag"]
, requestHeaders = [("if-none-match", "\"foo\", W/\"hello world\"")]
}
assertStatus 200 res
|
ygale/yesod
|
yesod-core/test/YesodCoreTest/Redirect.hs
|
mit
| 3,416 | 0 | 19 | 957 | 922 | 462 | 460 | 81 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
{-
- This is a test framework for Arrays, using QuickCheck
-
-}
import qualified Data.Array as Array
import Data.List
import Control.Monad ( liftM2, liftM3, liftM4 )
import System.Random
import Data.Ix
import Data.List( (\\) )
infixl 9 !, //
infixr 0 ==>
infix 1 `classify`
prop_array =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
Array.array b vs
`same_arr`
array b vs
prop_listArray =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (vector (length [fst b..snd b]))
$ \ (vs :: [Bool]) ->
Array.listArray b vs == Array.array b (zipWith (\ a b -> (a,b))
(Array.range b) vs)
prop_indices =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
let arr = Array.array b vs
in Array.indices arr == ((Array.range . Array.bounds) arr)
prop_elems =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
let arr = Array.array b vs
in Array.elems arr == [arr Array.! i | i <- Array.indices arr]
prop_assocs =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
let arr = Array.array b vs
in Array.assocs arr == [(i, arr Array.! i) | i <- Array.indices arr]
prop_slashslash =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
let arr = Array.array b vs
us = []
in arr Array.// us == Array.array (Array.bounds arr)
([(i,arr Array.! i)
| i <- Array.indices arr \\ [i | (i,_) <- us]]
++ us)
prop_accum =
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
forAll (genIVPs b 10) $ \ (us :: [(Int,Int)]) ->
forAll (choose (0,length us))
$ \ n ->
let us' = take n us in
forAll arbitrary $ \ (fn :: Int -> Int -> Int) ->
let arr = Array.array b vs
in Array.accum fn arr us'
== foldl (\a (i,v) -> a Array.// [(i,fn (a Array.! i) v)]) arr us'
prop_accumArray =
forAll arbitrary $ \ (f :: Int -> Int -> Int) ->
forAll arbitrary $ \ (z :: Int) ->
forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (vs :: [(Int,Int)]) ->
Array.accumArray f z b vs == Array.accum f
(Array.array b [(i,z) | i <- Array.range b]) vs
same_arr :: (Eq b) => Array.Array Int b -> Array Int b -> Bool
same_arr a1 a2 = a == c && b == d
&& all (\ n -> (a1 Array.! n) == (a2 ! n)) [a..b]
where (a,b) = Array.bounds a1 :: (Int,Int)
(c,d) = bounds a2 :: (Int,Int)
genBounds :: Gen (Int,Int)
genBounds = do m <- choose (0,20)
n <- choose (minBound,maxBound-m)
return (n,n+m-1)
genIVP :: Arbitrary a => (Int,Int) -> Gen (Int,a)
genIVP b = do { i <- choose b
; v <- arbitrary
; return (i,v)
}
genIVPs :: Arbitrary a => (Int,Int) -> Int -> Gen [(Int,a)]
genIVPs b@(low,high) s
= do { let is = [low..high]
; vs <- vector (length is)
; shuffle s (zip is vs)
}
prop_id = forAll genBounds $ \ (b :: (Int,Int)) ->
forAll (genIVPs b 10) $ \ (ivps :: [(Int,Int)]) ->
label (show (ivps :: [(Int,Int)])) True
-- rift takes a list, split it (using an Int argument),
-- and then rifts together the split lists into one.
-- Think: rifting a pack of cards.
rift :: Int -> [a] -> [a]
rift n xs = comb (drop n xs) (take n xs)
where
comb (a:as) (b:bs) = a : b : comb as bs
comb (a:as) [] = a : as
comb [] (b:bs) = b : bs
comb [] [] = []
-- suffle makes n random rifts. Typically after
-- log n rifts, the list is in a pretty random order.
-- (where n is the number of elements in the list)
shuffle :: Int -> [a] -> Gen [a]
shuffle 0 m = return m
shuffle n m = do { r <- choose (1,length m)
; shuffle (n-1) (rift r m)
}
prop_shuffle =
forAll (shuffle 10 [1..10::Int]) $ \ lst ->
label (show lst) True
------------------------------------------------------------------------------
main = do test prop_array
test prop_listArray
test prop_indices
test prop_elems
test prop_assocs
test prop_slashslash
test prop_accum
test prop_accumArray
instance Show (a -> b) where { show _ = "<FN>" }
------------------------------------------------------------------------------
data Array a b = MkArray (a,a) (a -> b) deriving ()
array :: (Ix a) => (a,a) -> [(a,b)] -> Array a b
array b ivs =
if and [inRange b i | (i,_) <- ivs]
then MkArray b
(\j -> case [v | (i,v) <- ivs, i == j] of
[v] -> v
[] -> error "Array.!: \
\undefined array element"
_ -> error "Array.!: \
\multiply defined array element")
else error "Array.array: out-of-range array association"
listArray :: (Ix a) => (a,a) -> [b] -> Array a b
listArray b vs = array b (zipWith (\ a b -> (a,b)) (range b) vs)
(!) :: (Ix a) => Array a b -> a -> b
(!) (MkArray _ f) = f
bounds :: (Ix a) => Array a b -> (a,a)
bounds (MkArray b _) = b
indices :: (Ix a) => Array a b -> [a]
indices = range . bounds
elems :: (Ix a) => Array a b -> [b]
elems a = [a!i | i <- indices a]
assocs :: (Ix a) => Array a b -> [(a,b)]
assocs a = [(i, a!i) | i <- indices a]
(//) :: (Ix a) => Array a b -> [(a,b)] -> Array a b
a // us = array (bounds a)
([(i,a!i) | i <- indices a \\ [i | (i,_) <- us]]
++ us)
accum :: (Ix a) => (b -> c -> b) -> Array a b -> [(a,c)]
-> Array a b
accum f = foldl (\a (i,v) -> a // [(i,f (a!i) v)])
accumArray :: (Ix a) => (b -> c -> b) -> b -> (a,a) -> [(a,c)]
-> Array a b
accumArray f z b = accum f (array b [(i,z) | i <- range b])
ixmap :: (Ix a, Ix b) => (a,a) -> (a -> b) -> Array b c
-> Array a c
ixmap b f a = array b [(i, a ! f i) | i <- range b]
instance (Ix a) => Functor (Array a) where
fmap fn (MkArray b f) = MkArray b (fn . f)
instance (Ix a, Eq b) => Eq (Array a b) where
a == a' = assocs a == assocs a'
instance (Ix a, Ord b) => Ord (Array a b) where
a <= a' = assocs a <= assocs a'
instance (Ix a, Show a, Show b) => Show (Array a b) where
showsPrec p a = showParen (p > 9) (
showString "array " .
shows (bounds a) . showChar ' ' .
shows (assocs a) )
instance (Ix a, Read a, Read b) => Read (Array a b) where
readsPrec p = readParen (p > 9)
(\r -> [(array b as, u) | ("array",s) <- lex r,
(b,t) <- reads s,
(as,u) <- reads t ])
--------------------------------------------------------------------
-- QuickCheck v.0.2
-- DRAFT implementation; last update 000104.
-- Koen Claessen, John Hughes.
-- This file represents work in progress, and might change at a later date.
--------------------------------------------------------------------
-- Generator
newtype Gen a
= Gen (Int -> StdGen -> a)
sized :: (Int -> Gen a) -> Gen a
sized fgen = Gen (\n r -> let Gen m = fgen n in m n r)
resize :: Int -> Gen a -> Gen a
resize n (Gen m) = Gen (\_ r -> m n r)
rand :: Gen StdGen
rand = Gen (\n r -> r)
promote :: (a -> Gen b) -> Gen (a -> b)
promote f = Gen (\n r -> \a -> let Gen m = f a in m n r)
variant :: Int -> Gen a -> Gen a
variant v (Gen m) = Gen (\n r -> m n (rands r !! (v+1)))
where
rands r0 = r1 : rands r2 where (r1, r2) = split r0
generate :: Int -> StdGen -> Gen a -> a
generate n rnd (Gen m) = m size rnd'
where
(size, rnd') = randomR (0, n) rnd
instance Functor Gen where
fmap f m = m >>= return . f
instance Applicative Gen where
pure = return
(<*>) = liftM2 id
instance Monad Gen where
return a = Gen (\n r -> a)
Gen m >>= k =
Gen (\n r0 -> let (r1,r2) = split r0
Gen m' = k (m n r1)
in m' n r2)
-- derived
--choose :: Random a => (a, a) -> Gen a
choose bounds = ((fst . randomR bounds) `fmap` rand)
elements :: [a] -> Gen a
elements xs = (xs !!) `fmap` choose (0, length xs - 1)
vector :: Arbitrary a => Int -> Gen [a]
vector n = sequence [ arbitrary | i <- [1..n] ]
oneof :: [Gen a] -> Gen a
oneof gens = elements gens >>= id
frequency :: [(Int, Gen a)] -> Gen a
frequency xs = choose (1, tot) >>= (`pick` xs)
where
tot = sum (map fst xs)
pick n ((k,x):xs)
| n <= k = x
| otherwise = pick (n-k) xs
-- general monadic
two :: Monad m => m a -> m (a, a)
two m = liftM2 (,) m m
three :: Monad m => m a -> m (a, a, a)
three m = liftM3 (,,) m m m
four :: Monad m => m a -> m (a, a, a, a)
four m = liftM4 (,,,) m m m m
--------------------------------------------------------------------
-- Arbitrary
class Arbitrary a where
arbitrary :: Gen a
coarbitrary :: a -> Gen b -> Gen b
instance Arbitrary () where
arbitrary = return ()
coarbitrary _ = variant 0
instance Arbitrary Bool where
arbitrary = elements [True, False]
coarbitrary b = if b then variant 0 else variant 1
instance Arbitrary Int where
arbitrary = sized $ \n -> choose (-n,n)
coarbitrary n = variant (if n >= 0 then 2*n else 2*(-n) + 1)
instance Arbitrary Integer where
arbitrary = sized $ \n -> choose (-fromIntegral n,fromIntegral n)
coarbitrary n = variant (fromInteger (if n >= 0 then 2*n else 2*(-n) + 1))
instance Arbitrary Float where
arbitrary = liftM3 fraction arbitrary arbitrary arbitrary
coarbitrary x = coarbitrary (decodeFloat x)
instance Arbitrary Double where
arbitrary = liftM3 fraction arbitrary arbitrary arbitrary
coarbitrary x = coarbitrary (decodeFloat x)
fraction a b c = fromInteger a + (fromInteger b / (abs (fromInteger c) + 1))
instance (Arbitrary a, Arbitrary b) => Arbitrary (a, b) where
arbitrary = liftM2 (,) arbitrary arbitrary
coarbitrary (a, b) = coarbitrary a . coarbitrary b
instance (Arbitrary a, Arbitrary b, Arbitrary c) => Arbitrary (a, b, c) where
arbitrary = liftM3 (,,) arbitrary arbitrary arbitrary
coarbitrary (a, b, c) = coarbitrary a . coarbitrary b . coarbitrary c
instance (Arbitrary a, Arbitrary b, Arbitrary c, Arbitrary d)
=> Arbitrary (a, b, c, d)
where
arbitrary = liftM4 (,,,) arbitrary arbitrary arbitrary arbitrary
coarbitrary (a, b, c, d) =
coarbitrary a . coarbitrary b . coarbitrary c . coarbitrary d
instance Arbitrary a => Arbitrary [a] where
arbitrary = sized (\n -> choose (0,n) >>= vector)
coarbitrary [] = variant 0
coarbitrary (a:as) = coarbitrary a . variant 1 . coarbitrary as
instance (Arbitrary a, Arbitrary b) => Arbitrary (a -> b) where
arbitrary = promote (`coarbitrary` arbitrary)
coarbitrary f gen = arbitrary >>= ((`coarbitrary` gen) . f)
--------------------------------------------------------------------
-- Testable
data Result
= Result { ok :: Maybe Bool, stamp :: [String], arguments :: [String] }
nothing :: Result
nothing = Result{ ok = Nothing, stamp = [], arguments = [] }
newtype Property
= Prop (Gen Result)
result :: Result -> Property
result res = Prop (return res)
evaluate :: Testable a => a -> Gen Result
evaluate a = gen where Prop gen = property a
class Testable a where
property :: a -> Property
instance Testable () where
property _ = result nothing
instance Testable Bool where
property b = result (nothing{ ok = Just b })
instance Testable Result where
property res = result res
instance Testable Property where
property prop = prop
instance (Arbitrary a, Show a, Testable b) => Testable (a -> b) where
property f = forAll arbitrary f
forAll :: (Show a, Testable b) => Gen a -> (a -> b) -> Property
forAll gen body = Prop $
do a <- gen
res <- evaluate (body a)
return (argument a res)
where
argument a res = res{ arguments = show a : arguments res }
(==>) :: Testable a => Bool -> a -> Property
True ==> a = property a
False ==> a = property ()
label :: Testable a => String -> a -> Property
label s a = Prop (add `fmap` evaluate a)
where
add res = res{ stamp = s : stamp res }
classify :: Testable a => Bool -> String -> a -> Property
classify True name = label name
classify False _ = property
trivial :: Testable a => Bool -> a -> Property
trivial = (`classify` "trivial")
collect :: (Show a, Testable b) => a -> b -> Property
collect v = label (show v)
--------------------------------------------------------------------
-- Testing
data Config = Config
{ configMaxTest :: Int
, configMaxFail :: Int
, configSize :: Int -> Int
, configEvery :: Int -> [String] -> String
}
quick :: Config
quick = Config
{ configMaxTest = 100
, configMaxFail = 1000
, configSize = (+ 3) . (`div` 2)
, configEvery = \n args -> let s = show n in s ++ ","
}
verbose :: Config
verbose = quick
{ configEvery = \n args -> show n ++ ":\n" ++ unlines args
}
test, quickCheck, verboseCheck :: Testable a => a -> IO ()
test = check quick
quickCheck = check quick
verboseCheck = check verbose
check :: Testable a => Config -> a -> IO ()
check config a =
do rnd <- newStdGen
tests config (evaluate a) rnd 0 0 []
tests :: Config -> Gen Result -> StdGen -> Int -> Int -> [[String]] -> IO ()
tests config gen rnd0 ntest nfail stamps
| ntest == configMaxTest config = do done "OK, passed" ntest stamps
| nfail == configMaxFail config = do done "Arguments exhausted after" ntest stamps
| otherwise =
do putStr (configEvery config ntest (arguments result))
case ok result of
Nothing ->
tests config gen rnd1 ntest (nfail+1) stamps
Just True ->
tests config gen rnd1 (ntest+1) nfail (stamp result:stamps)
Just False ->
putStr ( "Falsifiable, after "
++ show ntest
++ " tests:\n"
++ unlines (arguments result)
)
where
result = generate (configSize config ntest) rnd2 gen
(rnd1,rnd2) = split rnd0
done :: String -> Int -> [[String]] -> IO ()
done mesg ntest stamps =
do putStr ( mesg ++ " " ++ show ntest ++ " tests" ++ table )
where
table = display
. map entry
. reverse
. sort
. map pairLength
. group
. sort
. filter (not . null)
$ stamps
display [] = ".\n"
display [x] = " (" ++ x ++ ").\n"
display xs = ".\n" ++ unlines (map (++ ".") xs)
pairLength xss@(xs:_) = (length xss, xs)
entry (n, xs) = percentage n ntest
++ " "
++ concat (intersperse ", " xs)
percentage n m = show ((100 * n) `div` m) ++ "%"
--------------------------------------------------------------------
-- the end.
{-
instance Observable StdGen where { observer = observeBase }
instance Observable a => Observable (Gen a) where
observer (Gen a) = send "Gen" (return (Gen) << a)
-}
|
urbanslug/ghc
|
testsuite/tests/array/should_run/arr016.hs
|
bsd-3-clause
| 15,850 | 25 | 28 | 5,030 | 6,805 | 3,574 | 3,231 | -1 | -1 |
-- Kind error message test
module ShouldFail where
type IntMap a = [a]
data SymDict a = SymDict {idcounter:: Int, itot::IntMap a}
data SymTable = SymTable { dict::SymDict }
|
ezyang/ghc
|
testsuite/tests/typecheck/should_fail/tcfail136.hs
|
bsd-3-clause
| 180 | 0 | 9 | 36 | 55 | 35 | 20 | 4 | 0 |
module Vehicles where
--data Price = Price Integer deriving (Eq, Show)
newtype Price = Price Integer deriving (Eq, Show)
class TooExpensive a where
tooExpensive :: a -> Bool
instance TooExpensive Price where
tooExpensive (Price n) = n > 15000
data Manufacturer =
Mini
| Mazda
| Tata
deriving (Eq, Show)
data Airline =
PapuAir
| CatapultsR'Us
| TakeYourChancesUnited
deriving (Eq, Show)
data Vehicle =
Car Manufacturer Price
| Plane Airline
deriving (Eq, Show)
isCar :: Vehicle -> Bool
isCar (Car _ _) = True
isCar _ = False
isPlane :: Vehicle -> Bool
isPlane (Plane _) = True
isPlane _ = False
areCars :: [Vehicle] -> [Bool]
areCars = map isCar
areAllCars :: [Vehicle] -> Bool
areAllCars = all isCar
getManu :: Vehicle -> Manufacturer
getManu (Car m _) = m
getManu _ = undefined
myCar = Car Mini (Price 14000)
urCar = Car Mazda (Price 20000)
clownCar = Car Tata (Price 7000)
doge = Plane PapuAir
|
JoshuaGross/haskell-learning-log
|
Code/Haskellbook/Vehicles.hs
|
mit
| 950 | 0 | 8 | 211 | 339 | 182 | 157 | 37 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.Normalise #-}
{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-}
{-# OPTIONS_GHC -fconstraint-solver-iterations=20 #-}
module AI.Funn.CL.Batched.GLOW (
Invertible(..),
invert
) where
import Control.Applicative
import Control.Category
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Data.Foldable
import Data.Monoid
import Data.Proxy
import Data.Random
import Data.Traversable
import GHC.TypeLits
import Prelude hiding (id)
import System.IO.Unsafe
import AI.Funn.CL.Batched.BTensor (BTensor(..))
import qualified AI.Funn.CL.Batched.BTensor as BT
import AI.Funn.CL.Batched.Param (Param(..))
import qualified AI.Funn.CL.Batched.Param as Param
import AI.Funn.CL.Batched.Network (Network(..))
import qualified AI.Funn.CL.Batched.Network as Network
import AI.Funn.CL.Tensor (Tensor)
import qualified AI.Funn.CL.Tensor as T
import qualified AI.Funn.CL.TensorLazy as TL
import AI.Funn.Diff.Diff (Diff(..), Derivable(..))
import qualified AI.Funn.Diff.Diff as Diff
import qualified AI.Funn.Flat.Blob as Blob
import AI.Funn.Indexed.Indexed
import AI.Funn.Space
data Invertible m (ω :: Nat) (p :: Nat) a b = Invertible {
invForward :: Network m ω p a b,
invBackward :: Network m ω p b a
}
params :: Invertible m ω p a b -> Proxy p
params _ = Proxy
batchSize :: Invertible m ω p a b -> Proxy ω
batchSize _ = Proxy
invert :: Invertible m ω p a b -> Invertible m ω p b a
invert (Invertible ab ba) = Invertible ba ab
-- Note: connect has asymmetry in the ordering of parameters.
-- one ~>> two != invert (invert two ~>> invert one)
-- They compute the same mapping, but the former takes 'one's
-- parameters first, followed by 'two's parameters. The latter takes
-- the parameters in the opposite order.
-- This is so that 'invert one' takes the same parameters as 'one'.
connect :: (KnownDimsF [i,j,ω], Monad m) => Invertible m ω i a b -> Invertible m ω j b c -> Invertible m ω (i+j) a c
connect (Invertible ab ba) (Invertible bc cb) = Invertible ac ca
where
ac = ab ~>> bc
ca = Network (Diff runBack) (netInit ac)
runBack (par, c) = do
let (p1, p2) = Param.split par
(b, k2) <- runDiff (netDiff cb) (p2, c)
(a, k1) <- runDiff (netDiff ba) (p1, b)
let back da = do
(dp1, db) <- k1 da
(dp2, dc) <- k2 db
return (Param.appendD dp1 dp2, dc)
return (a, back)
instance (KnownNat ω, Monad m) => Indexed (Invertible m ω) where
iid = Invertible iid iid
(~>>) = connect
pfirst :: (KnownNat ω, Monad m) => Invertible m ω p a b -> Invertible m ω p (a,c) (b,c)
pfirst (Invertible ab ba) = Invertible (first ab) (first ba)
psecond :: (KnownNat ω, Monad m) => Invertible m ω p a b -> Invertible m ω p (c,a) (c,b)
psecond (Invertible ab ba) = Invertible (second ab) (second ba)
pswap :: (KnownNat ω, Monad m) => Invertible m ω 0 (a,b) (b,a)
pswap = Invertible swap swap
passocL :: (KnownNat ω, Monad m) => Invertible m ω 0 (a,(b,c)) ((a,b),c)
passocL = Invertible assocL assocR
passocR :: (KnownNat ω, Monad m) => Invertible m ω 0 ((a,b),c) (a,(b,c))
passocR = Invertible assocR assocL
instance (KnownNat ω, Monad m) => Assoc (Invertible m ω) where
first = pfirst
second = psecond
swap = pswap
assocL = passocL
assocR = passocR
|
nshepperd/funn
|
AI/Funn/CL/Batched/GLOW.hs
|
mit
| 3,757 | 0 | 17 | 865 | 1,235 | 707 | 528 | -1 | -1 |
-- lsim - läpsy simulator - Juho Rinta-Paavola 2015
import System.Random
import Data.List
(??) :: [a] -> [Int] -> [a]
xs ?? [] = []
xs ?? (i:is) = (xs !! i):(xs ?? is)
replaceIndex :: [a] -> Int -> a -> [a]
replaceIndex (x:xs) 0 new = new:xs
replaceIndex (x:xs) n new = x:(replaceIndex xs (n-1) new)
data Suit = Club | Diamond | Heart | Spade
deriving (Read, Show, Enum, Eq, Ord)
data CardValue = Two | Three | Four | Five | Six | Seven | Eight | Nine | Ten | Jack | Queen | King | Ace
deriving (Read, Show, Enum, Eq, Ord)
data Card = Card {value :: CardValue,
suit :: Suit}
deriving (Read, Show, Eq)
instance Ord Card where
compare c1 c2 = compare (value c1, suit c1) (value c2, suit c2)
instance Enum Card where
toEnum n = let (v,s) = n `quotRem` 4 in Card (toEnum v) (toEnum s)
fromEnum c = fromEnum (value c) * 4 + fromEnum (suit c)
isFaceCard :: Card -> Bool
isFaceCard Card {value=Jack} = True
isFaceCard Card {value=Queen} = True
isFaceCard Card {value=King} = True
isFaceCard Card {value=Ace} = True
isFaceCard _ = False
isNotFaceCard = not.isFaceCard
type Deck = [Card]
deck :: Deck
deck = [Card value suit | value <- [Two .. Ace], suit <- [Club .. Spade]]
mixedDeck :: Int -> Deck
mixedDeck seed = map toEnum random52
where random52 = take 52 $ randomRs (0,51) gen
gen = mkStdGen seed
deal :: Deck -> Int -> [Deck]
deal deck n
| n == 1 = [deck]
| n < 1 = []
| otherwise = thisHand:(deal (deck \\ thisHand) (n-1))
where thisHand = deck ?? [0,n .. (length deck)-1]
checkFaceCardFall :: Deck -> Bool
checkFaceCardFall table
| (length table >= 2) && value (table !! 1) == Jack && first1NotFace = True
| (length table >= 3) && value (table !! 2) == Queen && first2NotFace = True
| (length table >= 4) && value (table !! 3) == King && first3NotFace = True
| (length table >= 5) && value (table !! 4) == Ace && first4NotFace = True
| otherwise = False
where first1NotFace = isNotFaceCard $ head table
first2NotFace = first1NotFace && (isNotFaceCard $ table !! 1)
first3NotFace = first2NotFace && (isNotFaceCard $ table !! 2)
first4NotFace = first3NotFace && (isNotFaceCard $ table !! 3)
play :: Deck -> Int -> Int -> Integer
play deck playerCount seed = playRound hands [] g 0 0
where hands = deal deck playerCount
g = mkStdGen seed
playRound :: [Deck] -> Deck -> StdGen -> Integer -> Int -> Integer
playRound hands table g roundNo playerNo
| length (filter (not.null) hands) == 1 =
roundNo
| (length table) >= 2 && value (head table) == value (table !! 1) =
playRound (appendTabletoPlayer rand) [] nextG nextRound rand
| checkFaceCardFall table == True =
playRound (appendTabletoPlayer prevPlayer) [] g nextRound prevPlayer
| null $ hands !! playerNo =
playRound hands table g roundNo nextPlayer
| (not.null) (filter isFaceCard $ take 4 table) && isNotFaceCard cardToPlay =
playRound (removePlayerCard playerNo) (cardToPlay:table) g nextRound playerNo
| otherwise =
playRound (removePlayerCard playerNo) (cardToPlay:table) g nextRound nextPlayer
where n = length hands
(rand,nextG) = randomR (0,n-1) g
appendTabletoPlayer pl = replaceIndex hands pl $ (hands !! pl) ++ (reverse table)
removePlayerCard pl = replaceIndex hands pl $ tail $ hands !! pl
cardToPlay = head $ hands !! playerNo
prevPlayer = (playerNo + n-1) `rem` n
nextRound = roundNo + 1
nextPlayer = (playerNo + 1) `rem` n
|
jrp6/lsim
|
main.hs
|
mit
| 3,709 | 0 | 13 | 1,012 | 1,548 | 804 | 744 | 77 | 1 |
{-# LANGUAGE TemplateHaskell, TypeSynonymInstances, FlexibleInstances #-}
module Asm.QQ (asm, asmf) where
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import qualified Language.Haskell.Meta.Parse as P
import Data.Generics
import Asm.Preprocess
import Asm.Parser
import Asm.Expr
-- Quotes assembly code
asm :: QuasiQuoter
asm = QuasiQuoter {
quoteExp = quoteAsmExp,
quotePat = notImplemented,
quoteType = notImplemented,
quoteDec = notImplemented
}
-- Reads an assembly file and handles includes as well
asmf :: QuasiQuoter
asmf = QuasiQuoter {
quoteExp = quoteAsmFile,
quotePat = notImplemented,
quoteType = notImplemented,
quoteDec = notImplemented
}
notImplemented = fail "Feature not implemented"
quoteAsmExp :: String -> Q Exp
quoteAsmExp x = do
file <- loc_filename `fmap` location
let tree = parseText file . preprocess $ x
case tree of
Left err -> fail err
Right x -> exprsToExp x
quoteAsmFile :: String -> Q Exp
quoteAsmFile fname = do
contents <- runIO $ preprocessFile fname
let tree = parseText fname contents
case tree of
Left err -> fail err
Right x -> exprsToExp x
exprsToExp :: [Expr] -> Q Exp
exprsToExp = listE . map conv
where conv (AntiQuote s) = aqExpr s
conv (AntiQuoteStr s) = aqStr s
conv x = dataToExpQ (const Nothing `extQ` convArg) x
-- All valid expression results that we can feed to the parser
-- String is special cased so string results will be parsed as code
class (Show a) => ExpShow a where
expShow :: a -> String
expShow = show
instance ExpShow String where
expShow = id
instance ExpShow Int
instance ExpShow Integer
instance ExpShow Char where
expShow x = ['\'', x, '\'']
-- Antiquoter
aqExpr :: String -> Q Exp
aqExpr = parseExp [|stmnt . expShow|]
aqStr :: String -> Q Exp
aqStr = parseExp [|String . expShow|]
convArg :: Expr -> Maybe (Q Exp)
convArg (AntiQuoteStr s) = Just $ aqStr s
convArg (AntiQuote s) = Just $ parseExp [|arg . expShow|] s
convArg _ = Nothing
parseExp f s = case P.parseExp s of
Left err -> fail err
Right x -> f `appE` return x
parseOrErr :: (String -> Either String Expr) -> String -> Expr
parseOrErr f x = case f x of
Left e -> error e
Right xpr -> xpr
stmnt = parseOrErr parseStatement
arg = parseOrErr parseArg
|
unknownloner/calccomp
|
Asm/QQ.hs
|
mit
| 2,409 | 0 | 12 | 586 | 725 | 380 | 345 | 67 | 3 |
module ExpressionProblem1b
(
perimeter
)
where
import ExpressionProblem1a
-- Extension to API
-- New operation: perimeter
-- Cannot define new shape since Shape is closed
perimeter :: Shape -> Double
perimeter s =
case s of
Square side -> 4.0 * side
Circle radius -> 2 * pi * radius
|
rcook/expression-problem
|
src/ExpressionProblem1b.hs
|
mit
| 306 | 0 | 9 | 73 | 66 | 36 | 30 | 9 | 2 |
{-# LANGUAGE RecordWildCards #-}
import Stackage.Types
import Stackage.Build (build, defaultBuildSettings)
import Stackage.Init (stackageInit)
import Stackage.Util (allowPermissive)
import System.Environment (getArgs, getProgName)
import Data.Set (fromList)
import System.IO (hFlush, stdout)
data BuildArgs = BuildArgs
{ noClean :: Bool
, excluded :: [String]
, noPlatform :: Bool
, onlyPermissive :: Bool
, allowed :: [String]
}
parseBuildArgs :: [String] -> IO BuildArgs
parseBuildArgs =
loop BuildArgs
{ noClean = False
, excluded = []
, noPlatform = False
, onlyPermissive = False
, allowed = []
}
where
loop x [] = return x
loop x ("--no-clean":rest) = loop x { noClean = True } rest
loop x ("--exclude":y:rest) = loop x { excluded = y : excluded x } rest
loop x ("--no-platform":rest) = loop x { noPlatform = True } rest
loop x ("--only-permissive":rest) = loop x { onlyPermissive = True } rest
loop x ("--allow":y:rest) = loop x { allowed = y : allowed x } rest
loop _ (y:_) = error $ "Did not understand argument: " ++ y
main :: IO ()
main = do
args <- getArgs
case args of
"build":rest -> do
BuildArgs {..} <- parseBuildArgs rest
build defaultBuildSettings
{ cleanBeforeBuild = not noClean
, excludedPackages = fromList $ map PackageName excluded
, requireHaskellPlatform = not noPlatform
, allowedPackage =
if onlyPermissive
then allowPermissive allowed
else const $ Right ()
}
["init"] -> do
putStrLn "Note: init isn't really ready for prime time use."
putStrLn "Using it may make it impossible to build stackage."
putStr "Are you sure you want continue (y/n)? "
hFlush stdout
x <- getLine
case x of
c:_ | c `elem` "yY" -> stackageInit
_ -> putStrLn "Probably a good decision, exiting."
["update"] -> stackageInit >> error "FIXME update"
_ -> do
pn <- getProgName
putStrLn $ "Usage: " ++ pn ++ " <command>"
putStrLn "Available commands:"
putStrLn " update Download updated Stackage databases. Automatically calls init."
putStrLn " init Initialize your cabal file to use Stackage"
putStrLn " build [--no-clean] [--no-platform] [--exclude package...] [--only-permissive] [--allow package]"
putStrLn " Build the package databases (maintainers only)"
|
ekmett/stackage
|
app/stackage.hs
|
mit
| 2,830 | 0 | 18 | 1,014 | 668 | 347 | 321 | 61 | 7 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Handler.RepoSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "valid request" $ do
it "gives a 200" $ do
get ReposR
statusIs 200
|
rgrempel/frelm.org
|
test/Handler/RepoSpec.hs
|
mit
| 272 | 0 | 14 | 75 | 62 | 31 | 31 | 10 | 1 |
a `blurb` b = c
|
chreekat/vim-haskell-syntax
|
test/golden/toplevel/infix-backtick.hs
|
mit
| 16 | 0 | 5 | 5 | 14 | 7 | 7 | 1 | 1 |
{-# htermination (gtMyInt :: MyInt -> MyInt -> MyBool) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ordering = LT | EQ | GT ;
primCmpNat :: Nat -> Nat -> Ordering;
primCmpNat Zero Zero = EQ;
primCmpNat Zero (Succ y) = LT;
primCmpNat (Succ x) Zero = GT;
primCmpNat (Succ x) (Succ y) = primCmpNat x y;
primCmpInt :: MyInt -> MyInt -> Ordering;
primCmpInt (Pos Zero) (Pos Zero) = EQ;
primCmpInt (Pos Zero) (Neg Zero) = EQ;
primCmpInt (Neg Zero) (Pos Zero) = EQ;
primCmpInt (Neg Zero) (Neg Zero) = EQ;
primCmpInt (Pos x) (Pos y) = primCmpNat x y;
primCmpInt (Pos x) (Neg y) = GT;
primCmpInt (Neg x) (Pos y) = LT;
primCmpInt (Neg x) (Neg y) = primCmpNat y x;
compareMyInt :: MyInt -> MyInt -> Ordering
compareMyInt = primCmpInt;
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
gtMyInt :: MyInt -> MyInt -> MyBool
gtMyInt x y = esEsOrdering (compareMyInt x y) GT;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/GT_6.hs
|
mit
| 1,282 | 0 | 8 | 273 | 542 | 293 | 249 | 34 | 1 |
import Control.Monad
import Data.List
import Data.Array
limit :: Int
limit = 28123
main = print $ sum
[ n
| n <- [1..limit]
, not $ or [ abounds ! (n - a1) | a1 <- takeWhile (< n) abundants ]
]
abundants = filter (abounds !) [1..limit]
-- Memoized abounds
abounds = listArray (1, limit)
[ n < sum ( init
. foldl (liftM2 (*)) [1]
. map (scanl (*) 1)
. group
$ reduce primes n
)
| n <- [1..limit]
]
reduce _ 1 = []
reduce y@(x:xs) n = case n `mod` x of
0 -> x : reduce y (div n x)
_ -> reduce xs n
-- Faster than reducing from all numbers
primes = sieve [2..]
sieve (p:xs) = p : sieve [ x | x <- xs, x `mod` p > 0 ]
-- Slow abounds
-- abounds n = n < aux (n - 1)
-- where
-- aux 0 = 0
-- aux d = case n `mod` d of
-- 0 -> aux (d - 1) + d
-- _ -> aux (d - 1)
-- lazyDiff [] _ = []
-- lazyDiff a [] = a
-- lazyDiff a@(x:xs) b@(y:ys)
-- | x > y = lazyDiff a ys
-- | x < y = x : lazyDiff xs b
-- | otherwise = lazyDiff xs ys
|
nickspinale/euler
|
complete/023.hs
|
mit
| 1,100 | 0 | 16 | 410 | 356 | 197 | 159 | 23 | 2 |
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.Parser.Kinds
-- Copyright : (c) Phil Freeman 2013
-- License : MIT
--
-- Maintainer : Phil Freeman <[email protected]>
-- Stability : experimental
-- Portability :
--
-- |
-- A parser for kinds
--
-----------------------------------------------------------------------------
module Language.PureScript.Parser.Kinds (
parseKind
) where
import Prelude ()
import Prelude.Compat
import Language.PureScript.Kinds
import Language.PureScript.Parser.Common
import Language.PureScript.Parser.Lexer
import qualified Text.Parsec as P
import qualified Text.Parsec.Expr as P
parseStar :: TokenParser Kind
parseStar = const Star <$> symbol' "*"
parseBang :: TokenParser Kind
parseBang = const Bang <$> symbol' "!"
parseTypeAtom :: TokenParser Kind
parseTypeAtom = indented *> P.choice (map P.try
[ parseStar
, parseBang
, parens parseKind ])
-- |
-- Parse a kind
--
parseKind :: TokenParser Kind
parseKind = P.buildExpressionParser operators parseTypeAtom P.<?> "kind"
where
operators = [ [ P.Prefix (symbol' "#" >> return Row) ]
, [ P.Infix (P.try rarrow >> return FunKind) P.AssocRight ] ]
|
michaelficarra/purescript
|
src/Language/PureScript/Parser/Kinds.hs
|
mit
| 1,273 | 0 | 13 | 233 | 255 | 149 | 106 | 22 | 1 |
import Test.HUnit
import Q04
test1 = TestCase (assertEqual "mylength [] should be 0. " (0) (mylength [] ))
test2 = TestCase (assertEqual "mylength [1] should be 1. " (1) (mylength [1] ))
test3 = TestCase (assertEqual "mylength [1,2] should be 2. " (2) (mylength [1,2]))
main = runTestTT $ TestList [test1,test2,test3]
|
cshung/MiscLab
|
Haskell99/q04.test.hs
|
mit
| 327 | 0 | 10 | 60 | 122 | 66 | 56 | 6 | 1 |
module Main where
import Types
import System.Random
import Control.Monad.State
randomF _ = state random
randomStrat :: Strategy
randomStrat = S ("random", randomF)
main = dilemmaMain randomStrat
|
barkmadley/etd-retreat-2014-hteam
|
src/Random/Main.hs
|
mit
| 200 | 0 | 6 | 31 | 56 | 32 | 24 | 8 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.Arrow ((&&&))
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC
import Data.Char (chr, isDigit, isPunctuation, ord)
import Data.FileEmbed (embedFile)
import Data.Foldable (traverse_)
import Data.List (sortBy)
import Data.Maybe (fromJust)
data Room = Room
{ _name :: ByteString
, _sectorID :: Int
, _checkSum :: ByteString
} deriving (Eq, Show)
mkRoom :: ByteString -> Room
mkRoom bs = Room n s c
where
n = BC.init . fst $ split1
s = fst . fromJust . BC.readInt . fst $ split2
c = BC.filter (not . isPunctuation) . snd $ split2
split1 = BC.span (not . isDigit) bs
split2 = BC.span (not . isPunctuation) . snd $ split1
myOrder :: (Int, Char) -> (Int, Char) -> Ordering
(i1, c1) `myOrder` (i2, c2) | i1 == i2 && c1 == c2 = EQ
| i1 < i2 = GT
| i1 == i2 && c1 < c2 = LT
| otherwise = LT
isValidRoom :: Room -> Bool
isValidRoom r = c == _checkSum r
where
g = BC.group . BC.sort . BC.filter (not . isPunctuation) . _name $ r
m = map (BC.length &&& BC.head) g
s = sortBy myOrder m
c = BC.pack . take 5 . map snd $ s
input :: ByteString
input = $(embedFile "input.txt")
rooms :: [Room]
rooms = filter isValidRoom . map mkRoom . BC.lines $ input
decrypt :: Int -> ByteString -> ByteString
decrypt n = BC.map d
where
d '-' = ' '
d c = chr $ oa + (((oc - oa) + n') `mod` 26)
where
oa = ord 'a'
oc = ord c
n' = n `mod` 26
decryptRoom :: Room -> ByteString
decryptRoom r = decrypt (_sectorID r) (_name r)
isNorthPoleRoom :: Room -> Bool
isNorthPoleRoom =
(not . BC.null) . snd . BC.breakSubstring "northpole" . decryptRoom
part1 :: Int
part1 = sum . map _sectorID $ rooms
part2 :: Int
part2 = _sectorID . head . filter isNorthPoleRoom $ rooms
main :: IO ()
main = traverse_ print [part1, part2]
|
genos/online_problems
|
advent_of_code_2016/day4/src/Main.hs
|
mit
| 2,149 | 0 | 13 | 679 | 797 | 430 | 367 | 56 | 2 |
-- making string literals polymorphic over the 'IsString' type class
{-# LANGUAGE OverloadedStrings #-}
module Main where
import qualified IG.AuthenticationApi as A
import qualified IG.LightstreamerApi as L
-- Postgres handle
import qualified IG.PostgresStreamHandle as P
-- For creating efficient string
import Data.ByteString (intercalate, append)
import qualified Data.ByteString.Internal as B
-- C printf-like string formatting
import Text.Printf
import Lightstreamer (ConnectionSettings (..),
StreamHandler, StreamRequest,
SubscriptionMode (..),
TableOperation (..),
TlsSettings (..), streamClosed,
streamData)
import Data.List (isPrefixOf)
import System.Environment (getArgs)
-- To handle child threads
import Control.Concurrent.MVar
import Control.Monad
-- Generate UUID
import Data.UUID
import Data.UUID.V4
-- Postgresql
import Database.HDBC.PostgreSQL
-- Timestamp
import Data.Time.Clock
import Data.Time.Calendar
-- This encapsulates a 'MVar' for communicating with the thread
-- receiving lightStreamer subscription
newtype StatefulHandler = StatefulHandler { notice :: MVar() }
-- 'StatefulHandler' implements 'StreamHandler' such that when stream is
-- being closed, the internal 'MVar' is supplied with an empty value to notify
-- that the program can terminate
instance StreamHandler StatefulHandler where
streamData _ = print
streamClosed handler = putMVar (notice handler) ()
instance P.StatefulStreamHandler StatefulHandler where
wait handler = void ((takeMVar.notice) handler)
-- Connect to lightStreamer server and subscribe according to 'Subscription'
-- this function uses 'StatefulHandler' (an implementation of 'StreamHandler')
-- to wait and get notification about 'streamClosed'.
lightStreamer :: P.StatefulStreamHandler h => h -> L.Subscription -> L.LSSetting -> IO()
lightStreamer handler sub stg = L.connectAndSubscribe stg handler sub >> P.wait handler
authenticate :: [String] -> IO A.AuthenticationResponse
authenticate args =
maybe (error "invalid param") return (A.parse args)
>>= \(ev,ak,ar) -> A.authenticate ev ak ar
-- Convert IG authentication response to IG LightStreamer connecting setting
setting :: A.AuthenticationResponse -> L.LSSetting
setting (A.AuthenticationResponse un tk ip pn tls) = L.LSSetting ip pn tls "DEFAULT" (Just un) (Just tk)
-- epic, env, key, user, pwd
stream :: [String] -> IO()
stream [] =
examplePostgresHandler
>>= \h -> lightStreamer h exampleSubscription exampleLightStreamerSetting
stream (epic:args) =
fmap setting (authenticate args)
>>= \lss -> createPrintHandler
>>= \h -> lightStreamer h (createIgSubscription epic) lss
-- Example Connection setting to a local Lightstreamer server according to
-- the Lightstreamer Network Protocol Tutorial
exampleLightStreamerSetting :: L.LSSetting
exampleLightStreamerSetting = L.LSSetting
{ L.lsIP = "192.168.99.100"
, L.lsPN = 80
, L.lsTLS = False
, L.lsASN = "WELCOME"
, L.lsUsername = Nothing
, L.lsPassword = Nothing
}
-- Example Subscription described in the Lightstreamer Network Protocol Tutorial
exampleSubscription :: L.Subscription
exampleSubscription = L.Subscription
{ L.lsItemNames = ["item2"]
-- , L.lsFieldNames = ["bid", "ask", "min", "max", "time"]
, L.lsFieldNames = ["bid", "ask", "time"]
, L.lsTableId = "2"
, L.lsDataAdapter = Just "STOCKS"
}
-- Connects a local database example
examplePostgresHandler :: IO P.PostgresStreamHandler
examplePostgresHandler =
newEmptyMVar
>>= \var -> connectPostgreSQL "dbname=example"
>>= \con -> return (hr var con)
where hr var con = P.PostgresStreamHandler
{ P.notice = var
, P.conn = con
, P.update = exampleInsert
}
-- Today's date
todayDate :: IO Day
todayDate = fmap utctDay getCurrentTime
-- Expects ["bid", "ask", "min", "max", "time"]
-- Generates an insert statement to table 'public.exampleentry'
exampleInsert :: [B.ByteString] -> IO P.SqlQuery
exampleInsert vs = nextUid >>= \uid -> time >>= \tt -> return (printf format uid values tt)
where format = "INSERT INTO public.exampleentry2 (itemId, bid, ask, entryTime) VALUES (%s,%s,%s)"
-- where format = "INSERT INTO public.exampleentry2 (itemId, bid, ask, min, max, entryTime) VALUES (%s,%s,%s)"
-- generate UUID
nextUid = fmap (quote.toString) nextRandom
-- generate timestamp without time zone
time = fmap (quote.(++ (B.unpackChars.last) vs).(++ " ").show) todayDate
-- generate printf argument
values = B.unpackChars (intercalate "," (init vs))
-- quote SQL value
quote :: String -> String
quote s = "\'" ++ s ++ "\'"
createPrintHandler :: IO StatefulHandler
createPrintHandler = fmap StatefulHandler newEmptyMVar
-- Example IG Lightstreamer Subscription
createIgSubscription :: String -> L.Subscription
createIgSubscription epic = L.Subscription
{ L.lsItemNames = [epic]
, L.lsFieldNames = ["BID", "OFFER", "HIGH", "LOW"]
, L.lsTableId = "1"
, L.lsDataAdapter = Nothing
}
main :: IO()
main = getArgs >>= stream
|
peteryhwong/ig-haskell-client
|
src/Main.hs
|
mit
| 5,498 | 2 | 14 | 1,295 | 1,092 | 623 | 469 | 88 | 1 |
module ProjectRosalind.Lcsm.ToListFull where
import Data.Vector as V
import Data.List as L
import Data.Hashable (hash)
import ProjectRosalind.Fasta_Types
import ProjectRosalind.FileFasta (filePathToFastas)
import Data.Time
import Data.List (maximumBy)
import Data.Function (on)
lengthToStartRunList :: Int -> [(Int, Int)]
lengthToStartRunList len =
Prelude.concat
[[ (l, (r - l) + 1)
| l <- [0..(len - 1)], l <= r ]
| r <- [(len - 1), (len - 2)..0] ]
startRunListForLength1000 = lengthToStartRunList 1000
substringsForLength :: Int -> Int
substringsForLength n = (n * (n + 1)) `div` 2
substringsForLength1000 = substringsForLength 1000
--Observed:
-- Ss count for a 1k string is ~500k
-- Intersecting two 500k sets is slow
-- Intersection of two DNA’s results in ~1% the size
--
--Hypothesis:
-- If from after this “seed” intersection we can create very tiny sets but guarantee we get all ss of length equal or less than the longest item in the intersection we may have a path forward on improving the performance
--
--Pseudo code:
--
--Dna1 all ss into set
--DNA 2 all ss into set
--Intersect
--Get longest string
--Next dna: filter all ss slices to be <= longest string in intersection
--Build vector of string based on the length of this filtered slice list
--Add to set
--Intersect
--Repeat
-- Pass along progressively filtered slice list
slicesToList :: String -> [(Int, Int)] -> [Vector Char]
slicesToList str sliceInstructions =
Prelude.foldr f [] sliceInstructions
where
f :: (Int, Int) -> [Vector Char] -> [Vector Char]
f (start, run) acc = (V.slice start run vstr) : acc
vstr :: Vector Char
vstr = V.fromList str
drawShrinkingList :: [String] -> [Vector Char] -> [Vector Char]
drawShrinkingList dnas startList = Prelude.foldr f startList dnas
where
f :: String -> [Vector Char] -> [Vector Char]
f dna prevList = L.intersect prevList thisList
where
filteredSlices :: [(Int, Int)]
filteredSlices = lengthToStartRunList $ lengthLongest prevList
thisList :: [Vector Char]
thisList = slicesToList dna filteredSlices
lengthLongest :: [Vector Char] -> Int
lengthLongest = Prelude.length . L.maximumBy (compare `on` Prelude.length)
fileName = "/Users/brodyberg/Documents/GitHub/Notes/ProjectRosalind.hsproj/LearnHaskell/FindingASharedMotif/rosalind_lcsm_2.txt"
mainToList :: IO ()
mainToList = do
now <- getZonedTime
putStrLn "START: just one "
putStrLn $ show now
fastas <- filePathToFastas fileName
putStrLn "fasta count: "
putStrLn $ show $ Prelude.length fastas
now <- getZonedTime
putStrLn "START: all substrings on 2"
putStrLn $ show now
-- let twoFastas = L.take 2 fastas
let dnas = fmap fastaSeq fastas
let allSubs1 = slicesToList (dnas !! 0) startRunListForLength1000
let allSubs2 = slicesToList (dnas !! 1) startRunListForLength1000
putStrLn "size 1: "
putStrLn $ show $ L.length allSubs1
putStrLn "size 2: "
putStrLn $ show $ L.length allSubs2
now <- getZonedTime
putStrLn "START intersection of 2"
putStrLn $ show now
let isection = intersect allSubs1 allSubs2
now <- getZonedTime
putStrLn "END intersection of 2"
putStrLn $ show now
putStrLn "Intersection size: "
-- let size = S.size isection
-- putStrLn $ show size
putStrLn $ show $ L.length isection
now <- getZonedTime
putStrLn "END: all substrings on 2"
putStrLn $ show now
now <- getZonedTime
putStrLn "START toList: "
putStrLn $ show now
-- let top10 = L.take 10 $ S.toDescList isection
-- let lst = S.toList isection
let top = L.maximumBy (compare `on` Prelude.length) isection
putStrLn "top: "
putStrLn $ show top
now <- getZonedTime
putStrLn "END toList: "
putStrLn $ show now
-- do this next:
-- now <- getZonedTime
-- putStrLn "START draw 2: "
-- putStrLn $ show now
--
-- let result = drawShrinkingList (L.drop 96 dnas) isection
--
-- putStrLn "result size: "
-- putStrLn $ show $ Prelude.length result
--
-- putStrLn "result: "
-- putStrLn $ show $ result
--
-- now <- getZonedTime
-- putStrLn "END draw 2: "
-- putStrLn $ show now
-- property:
-- if we filter a list to all runs <= given run
-- that list is == in length to the theoretical number
-- of substrings for a list of the given run length
-- honestly, who cares, we can just call
-- lengthToStartRunList $ length of longest item
putStrLn "Done"
|
brodyberg/Notes
|
ProjectRosalind.hsproj/LearnHaskell/lib/ProjectRosalind/Lcsm/ToListFull.hs
|
mit
| 4,664 | 0 | 13 | 1,137 | 995 | 518 | 477 | 77 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Stock.Router.API where
import Control.Applicative
import Control.Monad.IO.Class
import Data.List.Split
import Data.Maybe
import Web.Scotty
--
import Stock.Article
import Stock.Config
import Stock.MongoDB
import Stock.Scotty
import Stock.Types
import Stock.User
-- debug
import Debug.Trace
japiUser conf = do
post "/user" $ do
userid <- param "userid"
password <- param "password"
name <- param "name"
res <- liftIO . runMongo conf $ addUser conf userid password name
maybe (json $ toStatus DataDuplicated "") (\u -> do
token <- liftIO . runMongo conf $ generateToken conf userid
json $ toStatus Token token ) res
post "/user/login" $ do
userid <- param "userid"
password <- param "password"
res <- liftIO . runMongo conf $ authorizeUser conf userid password
if res
then do token <- liftIO . runMongo conf $ generateToken conf userid
json $ toStatus Token token
else json $ toStatus Unauthorized ""
post "/user/token" $ do
userid <- param "userid"
token <- param "token"
res <- liftIO . runMongo conf $ authorizeToken userid token
if res
then json $ toStatus Success ""
else json $ toStatus Unauthorized ""
japiArticle conf = do
post "/article" $ do
userid <- param "userid"
token <- param "token"
region <- read <$> param "region"
title <- htmlEscape <$> param "title"
tags <- splitOn "," <$> htmlEscape <$> param "tags"
body <- htmlEscape <$> param "body"
auth <- liftIO . runMongo conf $ authorizeToken userid token
if auth
then do user <- liftIO . runMongo conf $ (fromJust <$> findUser userid)
a <- liftIO $ runMongo conf $ postArticle region title userid (userName user) tags body
json $ toStatus Success (articleId a)
else json $ toStatus Unauthorized ""
post "/article/:articleid" $ do
articleid <- param "articleid"
userid <- param "userid"
token <- param "token"
region <- read <$> param "region"
title <- htmlEscape <$> param "title"
tags <- splitOn "," <$> htmlEscape <$> param "tags"
body <- htmlEscape <$> param "body"
auth <- liftIO . runMongo conf $ authorizeToken userid token
if auth
then do user <- liftIO . runMongo conf $ (fromJust <$> findUser userid)
a <- liftIO $ runMongo conf $ updateArticle region articleid title tags body
json $ maybe (toStatus Failed "") (\ar -> toStatus Success (articleId ar)) a
else json $ toStatus Unauthorized ""
|
tattsun/stock
|
src/Stock/Router/API.hs
|
mit
| 2,779 | 0 | 19 | 844 | 876 | 403 | 473 | 66 | 3 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for mutli-maps
-}
{-
Copyright (C) 2014 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Utils.MultiMap
( testUtils_MultiMap
) where
import Control.Applicative
import qualified Data.Set as S
import qualified Data.Map as M
import Test.QuickCheck
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Ganeti.Utils.MultiMap as MM
instance (Arbitrary k, Ord k, Arbitrary v, Ord v)
=> Arbitrary (MultiMap k v) where
arbitrary = frequency
[ (1, (multiMap . M.fromList)
<$> listOf ((,) <$> arbitrary
<*> (S.fromList <$> listOf arbitrary)))
, (4, MM.insert <$> arbitrary <*> arbitrary <*> arbitrary)
, (1, MM.fromList <$> listOf ((,) <$> arbitrary <*> arbitrary))
, (3, MM.delete <$> arbitrary <*> arbitrary <*> arbitrary)
, (1, MM.deleteAll <$> arbitrary <*> arbitrary)
]
-- | A data type for testing extensional equality.
data Three = One | Two | Three
deriving (Eq, Ord, Show, Enum, Bounded)
instance Arbitrary Three where
arbitrary = elements [minBound..maxBound]
-- | Tests the extensional equality of multi-maps.
prop_MultiMap_equality
:: MultiMap Three Three -> MultiMap Three Three -> Property
prop_MultiMap_equality m1 m2 =
let testKey k = MM.lookup k m1 == MM.lookup k m2
in printTestCase ("Extensional equality of '" ++ show m1
++ "' and '" ++ show m2 ++ " doesn't match '=='.")
$ all testKey [minBound..maxBound] ==? (m1 == m2)
prop_MultiMap_serialisation :: MultiMap Int Int -> Property
prop_MultiMap_serialisation = testSerialisation
testSuite "Utils/MultiMap"
[ 'prop_MultiMap_equality
, 'prop_MultiMap_serialisation
]
|
ribag/ganeti-experiments
|
test/hs/Test/Ganeti/Utils/MultiMap.hs
|
gpl-2.0
| 2,426 | 0 | 15 | 480 | 474 | 262 | 212 | 37 | 1 |
{- |
Module : $Header$
Description : Central datastructures for development graphs
Copyright : (c) Till Mossakowski, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(Logic)
Fixed CASL axioms needed for translation of CommonLogic to CASL
-}
module CommonLogic.PredefinedCASLAxioms where
import Common.AS_Annotation
import Common.GlobalAnnotations
import Common.Id
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
import CASL.AS_Basic_CASL
import CASL.Sign
import qualified Data.Set as Set
import qualified Data.Map as Map
list :: Id
list = stringToId "list"
append :: Id
append = stringToId "append"
cons :: Id
cons = stringToId "cons"
nil :: Id
nil = stringToId "nil"
individual :: Id
individual = stringToId "individual"
x1 :: Token
x1 = mkSimpleId "X1"
x2 :: Token
x2 = mkSimpleId "X2"
y1 :: Token
y1 = mkSimpleId "Y1"
y2 :: Token
y2 = mkSimpleId "Y2"
nilTypeS :: OpType
nilTypeS = mkTotOpType [] list
consTypeS :: OpType
consTypeS = mkTotOpType [individual, list] list
appendTypeS :: OpType
appendTypeS = mkTotOpType [list, list] list
nilType :: OP_TYPE
nilType = toOP_TYPE nilTypeS
consType :: OP_TYPE
consType = toOP_TYPE consTypeS
appendType :: OP_TYPE
appendType = toOP_TYPE appendTypeS
baseListAxioms :: [Named CASLFORMULA]
baseListAxioms =
[ ga_injective_cons
, ga_disjoint_nil_cons
, ga_generated_list
, ga_nil_append
, ga_cons_append ]
-- currently a list annotation is needed in the .het file %list [__], nil, cons
brId :: Id
brId = mkId [mkSimpleId "[", placeTok, mkSimpleId "]"]
-- | setting casl sign: sorts, cons, nil, append
listSig :: CASLSign
listSig = (emptySign ())
{ sortRel = Rel.fromKeysSet
$ Set.fromList [list, individual]
, opMap = MapSet.fromList
[ (cons, [consTypeS])
, (nil, [nilTypeS])
, (append, [appendTypeS])
]
, globAnnos = emptyGlobalAnnos
{ literal_annos = emptyLiteralAnnos
{ list_lit = Map.singleton brId (nil, cons) }
, literal_map = Map.fromList
[ (cons, ListCons brId nil)
, (nil, ListNull brId)]}
}
vx2 :: VAR_DECL
vx2 = mkVarDecl x2 list
vy1 :: VAR_DECL
vy1 = mkVarDecl y1 individual
vy2 :: VAR_DECL
vy2 = mkVarDecl y2 list
tx1, tx2, ty1, ty2 :: TERM f
tx1 = mkVarTerm x1 individual
tx2 = mkVarTerm x2 list
ty1 = mkVarTerm y1 individual
ty2 = mkVarTerm y2 list
consOp :: OP_SYMB
consOp = mkQualOp cons consType
nilOp :: OP_SYMB
nilOp = mkQualOp nil nilType
mkCons :: TERM f -> TERM f -> TERM f
mkCons t1 t2 = mkAppl consOp [t1, t2]
mkNil :: TERM f
mkNil = mkAppl nilOp []
mkAppend :: TERM f -> TERM f -> TERM f
mkAppend l1 l2 = mkAppl (mkQualOp append appendType) [l1, l2]
ga_injective_cons :: Named CASLFORMULA
ga_injective_cons = makeNamed "ga_injective_cons" $
mkForall
[ mkVarDecl x1 individual
, vy1
, vx2
, vy2
]
$ mkEqv
(mkStEq
(mkCons tx1 tx2)
$ mkCons ty1 ty2
)
$ conjunct
[ mkStEq tx1 ty1
, mkStEq tx2 ty2
]
ga_disjoint_nil_cons :: Named CASLFORMULA
ga_disjoint_nil_cons = makeNamed "ga_disjoint_nil_cons" $
mkForall [vy1, vy2] $ mkNeg $ mkStEq mkNil $ mkCons ty1 ty2
ga_nil_append :: Named CASLFORMULA
ga_nil_append = makeNamed "ga_nil_append"
$ mkForall [vx2]
$ mkStEq (mkAppend mkNil tx2) tx2
ga_cons_append :: Named CASLFORMULA
ga_cons_append = makeNamed "ga_cons_append"
$ mkForall [vy1, vy2, vx2]
$ mkStEq (mkAppend (mkCons ty1 ty2) tx2)
$ mkCons ty1 $ mkAppend ty2 tx2
ga_generated_list :: Named CASLFORMULA
ga_generated_list = makeNamed "ga_generated_list" $
Sort_gen_ax
[ Constraint
{ newSort = list
, opSymbs =
[ (consOp, [-1, 0] )
, (nilOp, [])
]
, origSort = list
}
] True
|
nevrenato/HetsAlloy
|
CommonLogic/PredefinedCASLAxioms.hs
|
gpl-2.0
| 4,131 | 0 | 13 | 1,106 | 1,079 | 592 | 487 | 120 | 1 |
xorArray :: Int -> [Int] -> [Int]
xorArray mask input = map (xor mask) input
|
rdnetto/H2V
|
docs/Design Presentation/src/lst1.hs
|
gpl-2.0
| 77 | 0 | 7 | 15 | 41 | 21 | 20 | 2 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Nightwatch.Webapp (startWebapp) where
import Yesod
data Nightwatch = Nightwatch
mkYesod "Nightwatch" [parseRoutes|
/ HomeR GET
|]
instance Yesod Nightwatch
getHomeR :: Handler Html
getHomeR = defaultLayout [whamlet|Hello World!|]
startWebapp :: IO ()
startWebapp = do
warp 3000 Nightwatch
|
vacationlabs/nightwatch
|
haskell/Nightwatch/Webapp.hs
|
gpl-2.0
| 460 | 0 | 7 | 92 | 87 | 50 | 37 | 14 | 1 |
{- MSigDBRDataConvert
Gregory W. Schwartz
Collections the functions pertaining to converting certain annotations into
pathways using the MSigDB rdata files (tested with
http://bioinf.wehi.edu.au/software/MSigDB/).
-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module MSigDBRDataConvert
( getRData
, toMSigDBPathways
, toMSigDBPathwaysMultiple
) where
-- Standard
-- Cabal
import qualified Data.Text as T
import qualified Foreign.R as R
import Foreign.R (SEXP, SEXPTYPE)
import Language.R.Instance as R
import Language.R.QQ
import Language.R.Literal as R
import H.Prelude
-- Local
import Types
import RGeneConvert
-- | Get the RData object.
getRData :: File -> String -> R s (RData s)
getRData (File file) object = fmap RData
$ [r| load(file_hs)
res = get(object_hs)
res
|]
-- | Get the pathways that contain this entrez gene id.
getPathway :: RData s -> Ann -> IO (Maybe Desc)
getPathway (RData object) (Ann entrezText) = R.runRegion $ do
let entrez = T.unpack entrezText
res <- [r| pathNames = names(object_hs[unlist(lapply(object_hs, function(x) (entrez_hs %in% unlist(x))))]) |]
let pathNames = R.fromSomeSEXP res :: [String]
if null . drop 1 $ pathNames
then return Nothing
else return . Just . Desc . T.intercalate "/" . fmap T.pack $ pathNames
-- | Get the R mapping of a gene to its pathways.
toMSigDBPathways
:: RData s
-> RMart s
-> MSigDBType
-> UnknownAnn
-> IO (Maybe Desc)
toMSigDBPathways _ _ _ (UnknownAnn "") = return Nothing
toMSigDBPathways rData rMart (MSigDBType (_, _, !from)) query = R.runRegion $ do
entrez <- io
. toRGeneAnn
rMart
(RType ("hsapiens_gene_ensembl", from, "entrezgene"))
$ query
maybe (return Nothing) (io . getPathway rData) $ entrez
-- | Get the R mapping of multiple genes to pathways.
toMSigDBPathwaysMultiple
:: RData s
-> RMart s
-> MSigDBType
-> [UnknownAnn]
-> IO [Maybe Desc]
toMSigDBPathwaysMultiple rData rMart (MSigDBType (_, _, !from)) queries = do
entrez <- toRGeneAnnMultiple
rMart
(RType ("hsapiens_gene_ensembl", from, "entrezgene"))
$ queries
mapM (maybe (return Nothing) (getPathway rData)) entrez
|
GregorySchwartz/convert-annotation
|
src/MSigDBRDataConvert.hs
|
gpl-3.0
| 2,476 | 0 | 14 | 682 | 578 | 308 | 270 | 53 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import Web.Scotty
import Data.String
import RouteEulerUC
main = scotty 3000 $ do
get "/" $ do
html "Hello! This is the root-URL for Haskell REST service"
get "/uc/:id" $ do
ucid <- param "id" :: ActionM Int
html $ (fromString (route ucid))
|
tedhag/teuler
|
haskell/rest-euler/src/Main.hs
|
gpl-3.0
| 297 | 0 | 15 | 68 | 88 | 41 | 47 | 10 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.