code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- E1.hs
{-# OPTIONS_GHC -Wall #-}
module Euler.E001( e001 ) where
e001 :: Integer
e001 = sum $ filter (\x -> divisible x 5 || divisible x 3) [1..999]
where
divisible :: (Integral a) => a -> a -> Bool
divisible a b = if mod a b == 0 then True else False
|
ghorn/euler
|
Euler/E001.hs
|
bsd-3-clause
| 267 | 0 | 10 | 67 | 110 | 60 | 50 | 6 | 2 |
{-# LANGUAGE TemplateHaskell #-}
-- check properties using QuickCheck
module QuickSpec where
import BooleanFormula (eval)
import Test.QuickCheck
-- two properties toDigits that should hold for any vaid input n >= 0
-- eval (toDigits n) == n
-- all (\d -> d >= 0 && d < 10) (toDigits n)
prop_toDigits n = n >= 0 ==> eval (toDigits n) == n
prop_toDigits2 n = n >= 0 ==> all (\d -> d >= 0 && d < 10) (toDigits n)
eval xs = foldl (\x y -> y + x*10) 0 xs
-- test in GHCi and should return 'OK, passed 100 tests'
-- quickCheck prop_toDigits
-- quickCheck prop_toDigits2
|
ltfschoen/HelloHaskell
|
src/Chapter2/Section2/QuickSpec.hs
|
mit
| 590 | 0 | 11 | 136 | 134 | 74 | 60 | -1 | -1 |
-- (c) 2000-2005 by Martin Erwig [see file COPYRIGHT]
-- | Inward directed trees as lists of paths.
module Data.Graph.Inductive.Internal.RootPath (
-- * Types
RTree,LRTree,
-- * Operations
getPath,getLPath,
getDistance,
getLPathNodes
) where
import Data.Graph.Inductive.Graph
type LRTree a = [LPath a]
type RTree = [Path]
first :: ([a] -> Bool) -> [[a]] -> [a]
first p xss = case filter p xss of
[] -> []
x:_ -> x
-- | Find the first path in a tree that starts with the given node.
--
-- Returns an empty list if there is no such path.
findP :: Node -> LRTree a -> [LNode a]
findP _ [] = []
findP v (LP []:ps) = findP v ps
findP v (LP (p@((w,_):_)):ps) | v==w = p
| otherwise = findP v ps
getPath :: Node -> RTree -> Path
getPath v = reverse . first (\(w:_)->w==v)
getLPath :: Node -> LRTree a -> LPath a
getLPath v = LP . reverse . findP v
-- | Return the distance to the given node in the given tree.
--
-- Returns 'Nothing' if the given node is not reachable.
getDistance :: Node -> LRTree a -> Maybe a
getDistance v t = case findP v t of
[] -> Nothing
(_,d):_ -> Just d
getLPathNodes :: Node -> LRTree a -> Path
getLPathNodes v = (\(LP p)->map fst p) . getLPath v
|
antalsz/hs-to-coq
|
examples/graph/graph/Data/Graph/Inductive/Internal/RootPath.hs
|
mit
| 1,340 | 0 | 13 | 403 | 465 | 250 | 215 | 27 | 2 |
{-- snippet all --}
import System.IO
import Data.Char(toUpper)
main :: IO ()
main = do
inh <- openFile "input.txt" ReadMode
outh <- openFile "output.txt" WriteMode
inpStr <- hGetContents inh
let result = processData inpStr
hPutStr outh result
hClose inh
hClose outh
processData :: String -> String
processData = map toUpper
{-- /snippet all --}
|
binesiyu/ifl
|
examples/ch07/toupper-lazy1.hs
|
mit
| 398 | 0 | 10 | 107 | 116 | 54 | 62 | 13 | 1 |
{- |
Module : ./HasCASL/AsToLe.hs
Description : final static analysis
Copyright : (c) Christian Maeder and Uni Bremen 2003-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
conversion from As to Le
-}
module HasCASL.AsToLe where
import HasCASL.As
import HasCASL.Le
import HasCASL.ClassAna
import HasCASL.VarDecl
import HasCASL.Unify
import HasCASL.OpDecl
import HasCASL.TypeAna
import HasCASL.TypeDecl
import HasCASL.Builtin
import HasCASL.PrintLe
import HasCASL.Merge
import Common.AS_Annotation
import Common.GlobalAnnotations
import Common.Id
import Common.Result
import Common.ExtSign
import Common.Prec
import Common.Lib.State
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Maybe
-- * extract predicate ids from As for mixfix analysis
type Ids = Set.Set Id
unite :: [Ids] -> Ids
unite = Set.unions
idsOfBasicSpec :: BasicSpec -> Ids
idsOfBasicSpec (BasicSpec l) = unite $ map (idsOfBasicItem . item) l
idsOfBasicItem :: BasicItem -> Ids
idsOfBasicItem bi = case bi of
SigItems i -> idsOfSigItems i
ClassItems _ l _ -> unite $ map (idsOfClassItem . item) l
GenItems l _ -> unite $ map (idsOfSigItems . item) l
Internal l _ -> unite $ map (idsOfBasicItem . item) l
_ -> Set.empty
idsOfClassItem :: ClassItem -> Ids
idsOfClassItem (ClassItem _ l _) = unite $ map (idsOfBasicItem . item) l
idsOfSigItems :: SigItems -> Ids
idsOfSigItems si = case si of
TypeItems {} -> Set.empty
OpItems b l _ -> unite $ map (idsOfOpItem b . item) l
idsOfOpItem :: OpBrand -> OpItem -> Ids
idsOfOpItem b oi = let
stripCompound (PolyId (Id ts _ ps) _ _) = Id ts [] ps
getPolyId (PolyId i _ _) = i
in case oi of
OpDecl os _ _ _ -> case b of
Pred -> Set.union (Set.fromList $ map getPolyId os) $ Set.fromList
$ map stripCompound os
_ -> Set.empty
OpDefn p _ _ _ _ -> case b of
Pred -> Set.fromList [getPolyId p, stripCompound p]
_ -> Set.empty
-- * basic analysis
-- | basic analysis
basicAnalysis :: (BasicSpec, Env, GlobalAnnos) ->
Result (BasicSpec, ExtSign Env Symbol, [Named Sentence])
basicAnalysis (b, e, ga) =
let (nb, ne) = runState (anaBasicSpec ga b) e
in Result (reverse $ envDiags ne) $
Just (nb, ExtSign (cleanEnv ne) $ declSymbs ne,
reverse $ sentences ne)
-- | is the first argument a subsignature of the second?
isSubEnv :: Env -> Env -> Bool
isSubEnv e1 e2 =
let c2 = classMap e2
cm = addCpoMap c2
t2 = typeMap e2
tm = addUnit cm t2
expTy = expand tm . opType
in Map.isSubmapOfBy (\ (ClassInfo _ k1) (ClassInfo _ k2) ->
Set.null $ Set.filter
(\ k -> Set.null $ Set.filter (flip (lesserKind cm) k) k2) k1)
(classMap e1) c2
&& Map.isSubmapOfBy (\ ti1 ti2 -> let
k1 = otherTypeKinds ti1
k2 = otherTypeKinds ti2
in Set.null $ Set.filter (\ k -> Set.null $
Set.filter (flip (lesserKind cm) k) k2) k1)
(typeMap e1) (typeMap e2)
&& Map.isSubmapOfBy (\ s1 s2 ->
all (\ t -> any (instScheme tm 1 (expTy t) . expTy)
$ Set.toList s2)
$ Set.toList s1) (assumps e1) (assumps e2)
-- | compute difference of signatures
diffEnv :: Env -> Env -> Env
diffEnv e1 e2 = let
tm = typeMap e2
cm = diffClassMap (classMap e1) $ classMap e2
acm = addClassMap (classMap e1) $ classMap e2
in initialEnv
{ classMap = cm
, typeMap = diffTypeMap acm (typeMap e1) tm
, assumps = Map.differenceWith diffAss (assumps e1) $ assumps e2
, binders = Map.differenceWith
(\ i1 i2 -> if i1 == i2 then Nothing else Just i1)
(binders e1) $ binders e2 }
-- | compute difference of operations
diffAss :: Set.Set OpInfo -> Set.Set OpInfo
-> Maybe (Set.Set OpInfo)
diffAss s1 s2 =
let s3 = Set.difference s1 s2 in
if Set.null s3 then Nothing else Just s3
-- | clean up finally accumulated environment
cleanEnv :: Env -> Env
cleanEnv e = delPreDefs initialEnv
{ classMap = classMap e
, typeMap = typeMap e
, assumps = assumps e
, binders = binders e }
-- | analyse basic spec
anaBasicSpec :: GlobalAnnos -> BasicSpec -> State Env BasicSpec
anaBasicSpec ga b@(BasicSpec l) = do
e <- get
let newAs = assumps e
preds = Map.keysSet $ Map.filter (not . Set.null . Set.filter ( \ oi ->
case opDefn oi of
NoOpDefn Pred -> True
Definition Pred _ -> True
_ -> False)) newAs
newPreds = idsOfBasicSpec b
rels = Set.union preds newPreds
newGa = addBuiltins ga
precs = mkPrecIntMap $ prec_annos newGa
Result _ (Just ne) = merge preEnv e
put ne { preIds = (precs, rels), globAnnos = newGa }
ul <- mapAnM anaBasicItem l
return $ BasicSpec ul
-- | analyse basic item
anaBasicItem :: BasicItem -> State Env BasicItem
anaBasicItem bi = case bi of
SigItems i -> fmap SigItems $ anaSigItems Loose i
ClassItems inst l ps -> do
ul <- mapAnM (anaClassItem inst) l
return $ ClassItems inst ul ps
GenVarItems l ps -> do
ul <- mapM (anaddGenVarDecl True) l
return $ GenVarItems (catMaybes ul) ps
ProgItems l ps -> do
ul <- mapAnMaybe anaProgEq l
return $ ProgItems ul ps
FreeDatatype l ps -> do
al <- mapAnMaybe ana1Datatype l
tys <- mapM (dataPatToType . item) al
ul <- mapAnMaybe (anaDatatype Free tys) al
addDataSen tys
return $ FreeDatatype ul ps
GenItems l ps -> do
ul <- mapAnM (anaSigItems Generated) l
return $ GenItems ul ps
AxiomItems decls fs ps -> do
tm <- gets localTypeVars -- save type map
vs <- gets localVars -- save vars
ds <- mapM (anaddGenVarDecl True) decls
ts <- mapM anaFormula fs
e <- get
putLocalVars vs -- restore
putLocalTypeVars tm -- restore
let newFs = catMaybes ts
newDs = catMaybes ds
sens = map ( \ (_, f) -> makeNamedSen $ replaceAnnoted (Formula
$ mkEnvForall e (item f) ps) f) newFs
appendSentences sens
return $ AxiomItems newDs (map fst newFs) ps
Internal l ps -> do
ul <- mapAnM anaBasicItem l
return $ Internal ul ps
-- | analyse sig items
anaSigItems :: GenKind -> SigItems -> State Env SigItems
anaSigItems gk si = case si of
TypeItems inst l ps -> do
ul <- anaTypeItems gk l
return $ TypeItems inst ul ps
OpItems b l ps -> do
ul <- mapM (anaOpItem b) l
let al = foldr (\ i -> case item i of
Nothing -> id
Just v -> (replaceAnnoted v i :)) [] ul
return $ OpItems b al ps
-- | analyse a class item
anaClassItem :: Instance -> ClassItem -> State Env ClassItem
anaClassItem _ (ClassItem d l ps) = do
cd <- anaClassDecls d
ul <- mapAnM anaBasicItem l
return $ ClassItem cd ul ps
|
spechub/Hets
|
HasCASL/AsToLe.hs
|
gpl-2.0
| 7,195 | 0 | 23 | 2,144 | 2,461 | 1,215 | 1,246 | 174 | 8 |
module Lamdu.Editor.Exports (exportActions) where
import GUI.Momentu.ModKey (ModKey)
import Lamdu.Config (Config)
import qualified Lamdu.Config as Config
import Lamdu.Data.Db.Layout (ViewM)
import Lamdu.Data.Export.JS (exportFancy)
import qualified Lamdu.Data.Export.JSON as Export
import Lamdu.Eval.Results (EvalResults)
import qualified Lamdu.GUI.IOTrans as IOTrans
import qualified Lamdu.GUI.Main as GUIMain
import Lamdu.Prelude
exportActions :: Config ModKey -> EvalResults -> IO () -> GUIMain.ExportActions ViewM
exportActions config evalResults executeIOProcess =
GUIMain.ExportActions
{ GUIMain.exportReplActions =
GUIMain.ExportRepl
{ GUIMain.exportRepl = fileExport Export.fileExportRepl
, GUIMain.exportFancy = exportFancy evalResults & IOTrans.liftTIO
, GUIMain.executeIOProcess = executeIOProcess
}
, GUIMain.exportAll = fileExport Export.fileExportAll
, GUIMain.exportDef = fileExport . Export.fileExportDef
, GUIMain.exportTag = fileExport . Export.fileExportTag
, GUIMain.exportNominal = fileExport . Export.fileExportNominal
, GUIMain.importAll = importAll
}
where
exportPath = config ^. Config.export . Config.exportPath
fileExport exporter = exporter exportPath & IOTrans.liftTIO
importAll path = Export.fileImportAll path <&> snd & IOTrans.liftIOT
|
lamdu/lamdu
|
src/Lamdu/Editor/Exports.hs
|
gpl-3.0
| 1,430 | 0 | 10 | 295 | 327 | 190 | 137 | -1 | -1 |
module Recovery where
test = (test "text"
-- Must recover here
main = putStrLn "Hello world!!!"
|
Atsky/haskell-idea-plugin
|
data/recoveryTests/Recovery.hs
|
apache-2.0
| 98 | 1 | 6 | 18 | 23 | 13 | 10 | -1 | -1 |
module Main where
import Options.Applicative
import System.FilePath (replaceExtension)
import Haskus.Utils.List (isSuffixOf)
import Haskus.Utils.Flow (forM_,when)
import qualified Haskus.Format.Compression.GZip as GZip
import Haskus.Format.Binary.Buffer
import qualified Haskus.Format.Text as Text
main :: IO ()
main = do
opts <- getOptions
bs <- bufferReadFile (optpath opts)
let ms = GZip.decompress bs
forM_ ms $ \m -> do
let fname = case (Text.unpack (GZip.memberName m), optpath opts) of
("",p) | ".tgz" `isSuffixOf` p -> replaceExtension p ".tar"
(s,_) -> s
putStrLn $ "File: " ++ fname
when (fname /= "") $ do
bufferWriteFile fname (GZip.memberContent m)
data Options = Options
{ optpath :: String
}
options :: Parser Options
options = Options
<$> argument str (
metavar "PATH"
<> help "Path to gzipped file"
)
getOptions :: IO Options
getOptions = execParser opts
where
opts = info (helper <*> options)
( fullDesc
<> progDesc "Unzip a gzip archive"
<> header "GUnzip" )
|
hsyl20/ViperVM
|
haskus-system-tools/src/gunzip/Main.hs
|
bsd-3-clause
| 1,156 | 0 | 20 | 324 | 359 | 190 | 169 | 33 | 2 |
{-|
Module : IRTS.Compiler
Description : Coordinates the compilation process.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards, TypeSynonymInstances, CPP #-}
module IRTS.Compiler(compile, generate) where
import IRTS.Lang
import IRTS.LangOpts
import IRTS.Defunctionalise
import IRTS.Simplified
import IRTS.CodegenCommon
import IRTS.CodegenC
import IRTS.DumpBC
import IRTS.CodegenJavaScript
import IRTS.Inliner
import IRTS.Exports
import IRTS.Portable
import Idris.AbsSyntax
import Idris.AbsSyntaxTree
import Idris.ASTUtils
import Idris.Erasure
import Idris.Error
import Idris.Output
import Debug.Trace
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.Core.CaseTree
import Control.Category
import Prelude hiding (id, (.))
import Control.Applicative
import Control.Monad.State
import Data.Maybe
import Data.List
import Data.Ord
import Data.IntSet (IntSet)
import qualified Data.IntSet as IS
import qualified Data.Map as M
import qualified Data.Set as S
import System.Process
import System.IO
import System.Exit
import System.Directory
import System.Environment
import System.FilePath ((</>), addTrailingPathSeparator)
-- | Compile to simplified forms and return CodegenInfo
compile :: Codegen -> FilePath -> Maybe Term -> Idris CodegenInfo
compile codegen f mtm
= do checkMVs -- check for undefined metavariables
checkTotality -- refuse to compile if there are totality problems
exports <- findExports
let rootNames = case mtm of
Nothing -> []
Just t -> freeNames t
reachableNames <- performUsageAnalysis
(rootNames ++ getExpNames exports)
maindef <- case mtm of
Nothing -> return []
Just tm -> do md <- irMain tm
logCodeGen 1 $ "MAIN: " ++ show md
return [(sMN 0 "runMain", md)]
objs <- getObjectFiles codegen
libs <- getLibs codegen
flags <- getFlags codegen
hdrs <- getHdrs codegen
impdirs <- allImportDirs
defsIn <- mkDecls reachableNames
-- if no 'main term' given, generate interface files
let iface = case mtm of
Nothing -> True
Just _ -> False
let defs = defsIn ++ maindef
-- Inlined top level LDecl made here
let defsInlined = inlineAll defs
let defsUniq = map (allocUnique (addAlist defsInlined emptyContext))
defsInlined
let (nexttag, tagged) = addTags 65536 (liftAll defsUniq)
let ctxtIn = addAlist tagged emptyContext
logCodeGen 1 "Defunctionalising"
let defuns_in = defunctionalise nexttag ctxtIn
logCodeGen 5 $ show defuns_in
logCodeGen 1 "Inlining"
let defuns = inline defuns_in
logCodeGen 5 $ show defuns
logCodeGen 1 "Resolving variables for CG"
let checked = simplifyDefs defuns (toAlist defuns)
outty <- outputTy
dumpCases <- getDumpCases
dumpDefun <- getDumpDefun
case dumpCases of
Nothing -> return ()
Just f -> runIO $ writeFile f (showCaseTrees defs)
case dumpDefun of
Nothing -> return ()
Just f -> runIO $ writeFile f (dumpDefuns defuns)
triple <- Idris.AbsSyntax.targetTriple
cpu <- Idris.AbsSyntax.targetCPU
logCodeGen 1 "Building output"
case checked of
OK c -> do return $ CodegenInfo f outty triple cpu
hdrs impdirs objs libs flags
NONE c (toAlist defuns)
tagged iface exports
Error e -> ierror e
where checkMVs = do i <- getIState
case map fst (idris_metavars i) \\ primDefs of
[] -> return ()
ms -> do iputStrLn $ "WARNING: There are incomplete holes:\n " ++ show ms
iputStrLn "\nEvaluation of any of these will crash at run time."
return ()
checkTotality = do i <- getIState
case idris_totcheckfail i of
[] -> return ()
((fc, msg):fs) -> ierror . At fc . Msg $ "Cannot compile:\n " ++ msg
generate :: Codegen -> FilePath -> CodegenInfo -> IO ()
generate codegen mainmod ir
= case codegen of
-- Built-in code generators (FIXME: lift these out!)
Via _ "c" -> codegenC ir
-- Any external code generator
Via fm cg -> do input <- case fm of
IBCFormat -> return mainmod
JSONFormat -> do
tempdir <- getTemporaryDirectory
(fn, h) <- openTempFile tempdir "idris-cg.json"
writePortable h ir
hClose h
return fn
let cmd = "idris-codegen-" ++ cg
args = [input, "-o", outputFile ir] ++ compilerFlags ir
exit <- rawSystem cmd args
when (exit /= ExitSuccess) $
putStrLn ("FAILURE: " ++ show cmd ++ " " ++ show args)
Bytecode -> dumpBC (simpleDecls ir) (outputFile ir)
irMain :: TT Name -> Idris LDecl
irMain tm = do
i <- irTerm (sMN 0 "runMain") M.empty [] tm
return $ LFun [] (sMN 0 "runMain") [] (LForce i)
mkDecls :: [Name] -> Idris [(Name, LDecl)]
mkDecls used
= do i <- getIState
let ds = filter (\(n, d) -> n `elem` used || isCon d) $ ctxtAlist (tt_ctxt i)
decls <- mapM build ds
return decls
showCaseTrees :: [(Name, LDecl)] -> String
showCaseTrees = showSep "\n\n" . map showCT . sortBy (comparing defnRank)
where
showCT (n, LFun _ f args lexp)
= show n ++ " " ++ showSep " " (map show args) ++ " =\n\t"
++ show lexp
showCT (n, LConstructor c t a) = "data " ++ show n ++ " " ++ show a
defnRank :: (Name, LDecl) -> String
defnRank (n, LFun _ _ _ _) = "1" ++ nameRank n
defnRank (n, LConstructor _ _ _) = "2" ++ nameRank n
nameRank :: Name -> String
nameRank (UN s) = "1" ++ show s
nameRank (MN i s) = "2" ++ show s ++ show i
nameRank (NS n ns) = "3" ++ concatMap show (reverse ns) ++ nameRank n
nameRank (SN sn) = "4" ++ snRank sn
nameRank n = "5" ++ show n
snRank :: SpecialName -> String
snRank (WhereN i n n') = "1" ++ nameRank n' ++ nameRank n ++ show i
snRank (InstanceN n args) = "2" ++ nameRank n ++ concatMap show args
snRank (ParentN n s) = "3" ++ nameRank n ++ show s
snRank (MethodN n) = "4" ++ nameRank n
snRank (CaseN _ n) = "5" ++ nameRank n
snRank (ElimN n) = "6" ++ nameRank n
snRank (InstanceCtorN n) = "7" ++ nameRank n
snRank (WithN i n) = "8" ++ nameRank n ++ show i
isCon (TyDecl _ _) = True
isCon _ = False
build :: (Name, Def) -> Idris (Name, LDecl)
build (n, d)
= do i <- getIState
case getPrim n i of
Just (ar, op) ->
let args = map (\x -> sMN x "op") [0..] in
return (n, (LFun [] n (take ar args)
(LOp op (map (LV . Glob) (take ar args)))))
_ -> do def <- mkLDecl n d
logCodeGen 3 $ "Compiled " ++ show n ++ " =\n\t" ++ show def
return (n, def)
where getPrim n i
| Just (ar, op) <- lookup n (idris_scprims i)
= Just (ar, op)
| Just ar <- lookup n (S.toList (idris_externs i))
= Just (ar, LExternal n)
getPrim n i = Nothing
declArgs args inl n (LLam xs x) = declArgs (args ++ xs) inl n x
declArgs args inl n x = LFun (if inl then [Inline] else []) n args x
mkLDecl n (Function tm _)
= declArgs [] True n <$> irTerm n M.empty [] tm
mkLDecl n (CaseOp ci _ _ _ pats cd)
= declArgs [] (case_inlinable ci || caseName n) n <$> irTree n args sc
where
(args, sc) = cases_runtime cd
-- Always attempt to inline functions arising from 'case' expressions
caseName (SN (CaseN _ _)) = True
caseName (SN (WithN _ _)) = True
caseName (NS n _) = caseName n
caseName _ = False
mkLDecl n (TyDecl (DCon tag arity _) _) =
LConstructor n tag . length <$> fgetState (cg_usedpos . ist_callgraph n)
mkLDecl n (TyDecl (TCon t a) _) = return $ LConstructor n (-1) a
mkLDecl n _ = return $ (declArgs [] True n LNothing) -- postulate, never run
data VarInfo = VI
{ viMethod :: Maybe Name
}
deriving Show
type Vars = M.Map Name VarInfo
irTerm :: Name -> Vars -> [Name] -> Term -> Idris LExp
irTerm top vs env tm@(App _ f a) = do
ist <- getIState
case unApply tm of
(P _ n _, args)
| n `elem` map fst (idris_metavars ist) \\ primDefs
-> return $ LError $ "ABORT: Attempt to evaluate hole " ++ show n
(P _ (UN m) _, args)
| m == txt "mkForeignPrim"
-> doForeign vs env (reverse (drop 4 args)) -- drop implicits
(P _ (UN u) _, [_, arg])
| u == txt "unsafePerformPrimIO"
-> irTerm top vs env arg
(P _ (UN u) _, _)
| u == txt "assert_unreachable"
-> return $ LError $ "ABORT: Reached an unreachable case in " ++ show top
-- TMP HACK - until we get inlining.
(P _ (UN r) _, [_, _, _, _, _, arg])
| r == txt "replace"
-> irTerm top vs env arg
-- 'void' doesn't have any pattern clauses and only gets called on
-- erased things in higher order contexts (also a TMP HACK...)
(P _ (UN r) _, _)
| r == txt "void"
-> return LNothing
-- Laziness, the old way
(P _ (UN l) _, [_, arg])
| l == txt "lazy"
-> error "lazy has crept in somehow"
(P _ (UN l) _, [_, arg])
| l == txt "force"
-> LForce <$> irTerm top vs env arg
-- Laziness, the new way
(P _ (UN l) _, [_, _, arg])
| l == txt "Delay"
-> LLazyExp <$> irTerm top vs env arg
(P _ (UN l) _, [_, _, arg])
| l == txt "Force"
-> LForce <$> irTerm top vs env arg
(P _ (UN a) _, [_, _, _, arg])
| a == txt "assert_smaller"
-> irTerm top vs env arg
(P _ (UN a) _, [_, arg])
| a == txt "assert_total"
-> irTerm top vs env arg
(P _ (UN p) _, [_, arg])
| p == txt "par"
-> do arg' <- irTerm top vs env arg
return $ LOp LPar [LLazyExp arg']
(P _ (UN pf) _, [arg])
| pf == txt "prim_fork"
-> do arg' <- irTerm top vs env arg
return $ LOp LFork [LLazyExp arg']
(P _ (UN m) _, [_,size,t])
| m == txt "malloc"
-> irTerm top vs env t
(P _ (UN tm) _, [_,t])
| tm == txt "trace_malloc"
-> irTerm top vs env t -- TODO
-- This case is here until we get more general inlining. It's just
-- a really common case, and the laziness hurts...
(P _ (NS (UN be) [b,p]) _, [_,x,(App _ (App _ (App _ (P _ (UN d) _) _) _) t),
(App _ (App _ (App _ (P _ (UN d') _) _) _) e)])
| be == txt "ifThenElse"
, d == txt "Delay"
, d' == txt "Delay"
, b == txt "Bool"
, p == txt "Prelude"
-> do
x' <- irTerm top vs env x
t' <- irTerm top vs env t
e' <- irTerm top vs env e
return (LCase Shared x'
[LConCase 0 (sNS (sUN "False") ["Bool","Prelude"]) [] e'
,LConCase 1 (sNS (sUN "True" ) ["Bool","Prelude"]) [] t'
])
-- data constructor
(P (DCon t arity _) n _, args) -> do
detag <- fgetState (opt_detaggable . ist_optimisation n)
used <- map fst <$> fgetState (cg_usedpos . ist_callgraph n)
let isNewtype = length used == 1 && detag
let argsPruned = [a | (i,a) <- zip [0..] args, i `elem` used]
-- The following code removes fields from data constructors
-- and performs the newtype optimisation.
--
-- The general rule here is:
-- Everything we get as input is not touched by erasure,
-- so it conforms to the official arities and types
-- and we can reason about it like it's plain TT.
--
-- It's only the data that leaves this point that's erased
-- and possibly no longer typed as the original TT version.
--
-- Especially, underapplied constructors must yield functions
-- even if all the remaining arguments are erased
-- (the resulting function *will* be applied, to NULLs).
--
-- This will probably need rethinking when we get erasure from functions.
-- "padLams" will wrap our term in LLam-bdas and give us
-- the "list of future unerased args" coming from these lambdas.
--
-- We can do whatever we like with the list of unerased args,
-- hence it takes a lambda: \unerased_argname_list -> resulting_LExp.
let padLams = padLambdas used (length args) arity
case compare (length args) arity of
-- overapplied
GT -> ifail ("overapplied data constructor: " ++ show tm ++
"\nDEBUG INFO:\n" ++
"Arity: " ++ show arity ++ "\n" ++
"Arguments: " ++ show args ++ "\n" ++
"Pruned arguments: " ++ show argsPruned)
-- exactly saturated
EQ | isNewtype
-> irTerm top vs env (head argsPruned)
| otherwise -- not newtype, plain data ctor
-> buildApp (LV $ Glob n) argsPruned
-- not saturated, underapplied
LT | isNewtype -- newtype
, length argsPruned == 1 -- and we already have the value
-> padLams . (\tm [] -> tm) -- the [] asserts there are no unerased args
<$> irTerm top vs env (head argsPruned)
| isNewtype -- newtype but the value is not among args yet
-> return . padLams $ \[vn] -> LApp False (LV $ Glob n) [LV $ Glob vn]
-- not a newtype, just apply to a constructor
| otherwise
-> padLams . applyToNames <$> buildApp (LV $ Glob n) argsPruned
-- type constructor
(P (TCon t a) n _, args) -> return LNothing
-- an external name applied to arguments
(P _ n _, args) | S.member (n, length args) (idris_externs ist) -> do
LOp (LExternal n) <$> mapM (irTerm top vs env) args
-- a name applied to arguments
(P _ n _, args) -> do
case lookup n (idris_scprims ist) of
-- if it's a primitive that is already saturated,
-- compile to the corresponding op here already to save work
Just (arity, op) | length args == arity
-> LOp op <$> mapM (irTerm top vs env) args
-- otherwise, just apply the name
_ -> applyName n ist args
-- turn de bruijn vars into regular named references and try again
(V i, args) -> irTerm top vs env $ mkApp (P Bound (env !! i) Erased) args
(f, args)
-> LApp False
<$> irTerm top vs env f
<*> mapM (irTerm top vs env) args
where
buildApp :: LExp -> [Term] -> Idris LExp
buildApp e [] = return e
buildApp e xs = LApp False e <$> mapM (irTerm top vs env) xs
applyToNames :: LExp -> [Name] -> LExp
applyToNames tm [] = tm
applyToNames tm ns = LApp False tm $ map (LV . Glob) ns
padLambdas :: [Int] -> Int -> Int -> ([Name] -> LExp) -> LExp
padLambdas used startIdx endSIdx mkTerm
= LLam allNames $ mkTerm nonerasedNames
where
allNames = [sMN i "sat" | i <- [startIdx .. endSIdx-1]]
nonerasedNames = [sMN i "sat" | i <- [startIdx .. endSIdx-1], i `elem` used]
applyName :: Name -> IState -> [Term] -> Idris LExp
applyName n ist args =
LApp False (LV $ Glob n) <$> mapM (irTerm top vs env . erase) (zip [0..] args)
where
erase (i, x)
| i >= arity || i `elem` used = x
| otherwise = Erased
arity = case fst4 <$> lookupCtxtExact n (definitions . tt_ctxt $ ist) of
Just (CaseOp ci ty tys def tot cdefs) -> length tys
Just (TyDecl (DCon tag ar _) _) -> ar
Just (TyDecl Ref ty) -> length $ getArgTys ty
Just (Operator ty ar op) -> ar
Just def -> error $ "unknown arity: " ++ show (n, def)
Nothing -> 0 -- no definition, probably local name => can't erase anything
-- name for purposes of usage info lookup
uName
| Just n' <- viMethod =<< M.lookup n vs = n'
| otherwise = n
used = maybe [] (map fst . usedpos) $ lookupCtxtExact uName (idris_callgraph ist)
fst4 (x,_,_,_,_) = x
irTerm top vs env (P _ n _) = return $ LV (Glob n)
irTerm top vs env (V i)
| i >= 0 && i < length env = return $ LV (Glob (env!!i))
| otherwise = ifail $ "bad de bruijn index: " ++ show i
irTerm top vs env (Bind n (Lam _) sc) = LLam [n'] <$> irTerm top vs (n':env) sc
where
n' = uniqueName n env
irTerm top vs env (Bind n (Let _ v) sc)
= LLet n <$> irTerm top vs env v <*> irTerm top vs (n : env) sc
irTerm top vs env (Bind _ _ _) = return $ LNothing
irTerm top vs env (Proj t (-1)) = do
t' <- irTerm top vs env t
return $ LOp (LMinus (ATInt ITBig))
[t', LConst (BI 1)]
irTerm top vs env (Proj t i) = LProj <$> irTerm top vs env t <*> pure i
irTerm top vs env (Constant TheWorld) = return LNothing
irTerm top vs env (Constant c) = return (LConst c)
irTerm top vs env (TType _) = return LNothing
irTerm top vs env Erased = return LNothing
irTerm top vs env Impossible = return LNothing
doForeign :: Vars -> [Name] -> [Term] -> Idris LExp
doForeign vs env (ret : fname : world : args)
= do args' <- mapM splitArg args
let fname' = toFDesc fname
let ret' = toFDesc ret
return $ LForeign ret' fname' args'
where
splitArg tm | (_, [_,_,l,r]) <- unApply tm -- pair, two implicits
= do let l' = toFDesc l
r' <- irTerm (sMN 0 "__foreignCall") vs env r
return (l', r')
splitArg _ = ifail "Badly formed foreign function call"
toFDesc (Constant (Str str)) = FStr str
toFDesc tm
| (P _ n _, []) <- unApply tm = FCon (deNS n)
| (P _ n _, as) <- unApply tm = FApp (deNS n) (map toFDesc as)
toFDesc _ = FUnknown
deNS (NS n _) = n
deNS n = n
doForeign vs env xs = ifail "Badly formed foreign function call"
irTree :: Name -> [Name] -> SC -> Idris LExp
irTree top args tree = do
logCodeGen 3 $ "Compiling " ++ show args ++ "\n" ++ show tree
LLam args <$> irSC top M.empty tree
irSC :: Name -> Vars -> SC -> Idris LExp
irSC top vs (STerm t) = irTerm top vs [] t
irSC top vs (UnmatchedCase str) = return $ LError str
irSC top vs (ProjCase tm alts) = do
tm' <- irTerm top vs [] tm
alts' <- mapM (irAlt top vs tm') alts
return $ LCase Shared tm' alts'
-- Transform matching on Delay to applications of Force.
irSC top vs (Case up n [ConCase (UN delay) i [_, _, n'] sc])
| delay == txt "Delay"
= do sc' <- irSC top vs $ mkForce n' n sc
return $ LLet n' (LForce (LV (Glob n))) sc'
-- There are two transformations in this case:
--
-- 1. Newtype-case elimination:
-- case {e0} of
-- wrap({e1}) -> P({e1}) ==> P({e0})
--
-- This is important because newtyped constructors are compiled away entirely
-- and we need to do that everywhere.
--
-- 2. Unused-case elimination (only valid for singleton branches):
-- case {e0} of ==> P
-- C(x,y) -> P[... x,y not used ...]
--
-- This is important for runtime because sometimes we case on irrelevant data:
--
-- In the example above, {e0} will most probably have been erased
-- so this vain projection would make the resulting program segfault
-- because the code generator still emits a PROJECT(...) G-machine instruction.
--
-- Hence, we check whether the variables are used at all
-- and erase the casesplit if they are not.
--
irSC top vs (Case up n [alt]) = do
replacement <- case alt of
ConCase cn a ns sc -> do
detag <- fgetState (opt_detaggable . ist_optimisation cn)
used <- map fst <$> fgetState (cg_usedpos . ist_callgraph cn)
if detag && length used == 1
then return . Just $ substSC (ns !! head used) n sc
else return Nothing
_ -> return Nothing
case replacement of
Just sc -> irSC top vs sc
_ -> do
alt' <- irAlt top vs (LV (Glob n)) alt
return $ case namesBoundIn alt' `usedIn` subexpr alt' of
[] -> subexpr alt' -- strip the unused top-most case
_ -> LCase up (LV (Glob n)) [alt']
where
namesBoundIn :: LAlt -> [Name]
namesBoundIn (LConCase cn i ns sc) = ns
namesBoundIn (LConstCase c sc) = []
namesBoundIn (LDefaultCase sc) = []
subexpr :: LAlt -> LExp
subexpr (LConCase _ _ _ e) = e
subexpr (LConstCase _ e) = e
subexpr (LDefaultCase e) = e
-- FIXME: When we have a non-singleton case-tree of the form
--
-- case {e0} of
-- C(x) => ...
-- ... => ...
--
-- and C is detaggable (the only constructor of the family), we can be sure
-- that the first branch will be always taken -- so we add special handling
-- to remove the dead default branch.
--
-- If we don't do so and C is newtype-optimisable, we will miss this newtype
-- transformation and the resulting code will probably segfault.
--
-- This work-around is not entirely optimal; the best approach would be
-- to ensure that such case trees don't arise in the first place.
--
irSC top vs (Case up n alts@[ConCase cn a ns sc, DefaultCase sc']) = do
detag <- fgetState (opt_detaggable . ist_optimisation cn)
if detag
then irSC top vs (Case up n [ConCase cn a ns sc])
else LCase up (LV (Glob n)) <$> mapM (irAlt top vs (LV (Glob n))) alts
irSC top vs sc@(Case up n alts) = do
-- check that neither alternative needs the newtype optimisation,
-- see comment above
goneWrong <- or <$> mapM isDetaggable alts
when goneWrong
$ ifail ("irSC: non-trivial case-match on detaggable data: " ++ show sc)
-- everything okay
LCase up (LV (Glob n)) <$> mapM (irAlt top vs (LV (Glob n))) alts
where
isDetaggable (ConCase cn _ _ _) = fgetState $ opt_detaggable . ist_optimisation cn
isDetaggable _ = return False
irSC top vs ImpossibleCase = return LNothing
irAlt :: Name -> Vars -> LExp -> CaseAlt -> Idris LAlt
-- this leaves out all unused arguments of the constructor
irAlt top vs _ (ConCase n t args sc) = do
used <- map fst <$> fgetState (cg_usedpos . ist_callgraph n)
let usedArgs = [a | (i,a) <- zip [0..] args, i `elem` used]
LConCase (-1) n usedArgs <$> irSC top (methodVars `M.union` vs) sc
where
methodVars = case n of
SN (InstanceCtorN className)
-> M.fromList [(v, VI
{ viMethod = Just $ mkFieldName n i
}) | (v,i) <- zip args [0..]]
_
-> M.empty -- not an instance constructor
irAlt top vs _ (ConstCase x rhs)
| matchable x = LConstCase x <$> irSC top vs rhs
| matchableTy x = LDefaultCase <$> irSC top vs rhs
where
matchable (I _) = True
matchable (BI _) = True
matchable (Ch _) = True
matchable (Str _) = True
matchable (B8 _) = True
matchable (B16 _) = True
matchable (B32 _) = True
matchable (B64 _) = True
matchable _ = False
matchableTy (AType (ATInt ITNative)) = True
matchableTy (AType (ATInt ITBig)) = True
matchableTy (AType (ATInt ITChar)) = True
matchableTy StrType = True
matchableTy (AType (ATInt (ITFixed IT8))) = True
matchableTy (AType (ATInt (ITFixed IT16))) = True
matchableTy (AType (ATInt (ITFixed IT32))) = True
matchableTy (AType (ATInt (ITFixed IT64))) = True
matchableTy _ = False
irAlt top vs tm (SucCase n rhs) = do
rhs' <- irSC top vs rhs
return $ LDefaultCase (LLet n (LOp (LMinus (ATInt ITBig))
[tm,
LConst (BI 1)]) rhs')
irAlt top vs _ (ConstCase c rhs)
= ifail $ "Can't match on (" ++ show c ++ ")"
irAlt top vs _ (DefaultCase rhs)
= LDefaultCase <$> irSC top vs rhs
|
tpsinnem/Idris-dev
|
src/IRTS/Compiler.hs
|
bsd-3-clause
| 25,102 | 0 | 27 | 8,567 | 8,413 | 4,163 | 4,250 | 462 | 33 |
{-# LANGUAGE OverloadedStrings #-}
module GitHub.CommitsSpec where
import GitHub.Auth (Auth (..))
import GitHub.Endpoints.Repos.Commits (commitSha, commitsForR, diffR, mkCommitName, FetchCount (..))
import GitHub.Request (github)
import Control.Monad (forM_)
import Data.Either.Compat (isRight)
import Data.List (nub, sort)
import Data.String (fromString)
import System.Environment (lookupEnv)
import Test.Hspec (Spec, describe, it, pendingWith, shouldBe,
shouldSatisfy)
import qualified Data.Vector as V
fromRightS :: Show a => Either a b -> b
fromRightS (Right b) = b
fromRightS (Left a) = error $ "Expected a Right and got a Left" ++ show a
withAuth :: (Auth -> IO ()) -> IO ()
withAuth action = do
mtoken <- lookupEnv "GITHUB_TOKEN"
case mtoken of
Nothing -> pendingWith "no GITHUB_TOKEN"
Just token -> action (OAuth $ fromString token)
spec :: Spec
spec = do
describe "commitsFor" $ do
it "works" $ withAuth $ \auth -> do
cs <- github auth commitsForR "phadej" "github" FetchAll
cs `shouldSatisfy` isRight
V.length (fromRightS cs) `shouldSatisfy` (> 300)
-- Page size is 30, so we get 60 commits
it "limits the response" $ withAuth $ \auth -> do
cs <- github auth commitsForR "phadej" "github" (FetchAtLeast 40)
cs `shouldSatisfy` isRight
let cs' = fromRightS cs
V.length cs' `shouldSatisfy` (< 70)
let hashes = sort $ map commitSha $ V.toList cs'
hashes `shouldBe` nub hashes
describe "diff" $ do
it "works" $ withAuth $ \auth -> do
cs <- github auth commitsForR "phadej" "github" (FetchAtLeast 30)
cs `shouldSatisfy` isRight
let commits = take 10 . V.toList . fromRightS $ cs
let pairs = zip commits $ drop 1 commits
forM_ pairs $ \(a, b) -> do
d <- github auth diffR "phadej" "github" (commitSha a) (commitSha b)
d `shouldSatisfy` isRight
it "issue #155" $ withAuth $ \auth -> do
d <- github auth diffR "nomeata" "codespeed" (mkCommitName "ghc") (mkCommitName "tobami:master")
d `shouldSatisfy` isRight
-- diff that includes a commit where a submodule is removed
it "issue #339" $ withAuth $ \auth -> do
d <- github auth diffR "scott-fleischman" "repo-remove-submodule" "d03c152482169d809be9b1eab71dcf64d7405f76" "42cfd732b20cd093534f246e630b309186eb485d"
d `shouldSatisfy` isRight
|
jwiegley/github
|
spec/GitHub/CommitsSpec.hs
|
bsd-3-clause
| 2,462 | 0 | 21 | 598 | 778 | 398 | 380 | 51 | 2 |
{- | Module : $Header$
- Description : Implementation of logic instance Graded Modal Logic
- Copyright : (c) Daniel Hausmann & Georgel Calin & Lutz Schroeder, DFKI Lab Bremen,
- Rob Myers & Dirk Pattinson, Department of Computing, ICL
- License : GPLv2 or higher, see LICENSE.txt
- Maintainer : [email protected]
- Stability : provisional
- Portability : portable
-
- Provides the implementation of the matching functions of graded modal logic.
-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, FlexibleContexts #-}
module GMP.Logics.G where
import Data.List
import Data.Ratio
import Data.Maybe
import Debug.Trace
import Text.ParserCombinators.Parsec
import GMP.Logics.Generic
import GMP.Parser
import GMP.Logics.IneqSolver
{- ------------------------------------------------------------------------------
instance of feature for graded modal logic
------------------------------------------------------------------------------ -}
data G a = G Int [Formula a] deriving (Eq, Show)
instance (SigFeature b c d, Eq (b (c d)), Eq (c d)) => NonEmptyFeature G b c d where
nefMatch flags seq =
let poslits = keep_poslits seq
neglits = keep_neglits seq
-- take all combinations of positive and negative modal literals
all_combinations = [ (pos, neg) |
pos <- powerList poslits, neg <- powerList neglits] \\ [([], []) ]
multiplicities = map (\ (Mod (G k _)) -> k)
strip_neg (Neg phi) = phi
side_condition_tuples (p, n) =
let switch = map (\ (x, y) -> (y, map negate x))
in switch $ ineqSolver (Coeffs (map (1 +)
(multiplicities (map strip_neg n))) (multiplicities p))
(gml_bound (multiplicities p,
multiplicities (map strip_neg n)))
gml_match (ps, ns) = map (gml_build_matches (ps, ns))
(gml_filter_tuples (side_condition_tuples (ps, ns)) [] )
in map gml_match all_combinations
nefPretty d = case d of
G i [] -> "[G]" ++ show i ++ "nothing contained"
G i e -> "[G]" ++ show i ++ pretty (head e)
nefFeatureFromSignature sig = G 1
nefFeatureFromFormula phi = G 1
nefStripFeature (G i phis) = phis
nefDisj2Conj (Mod (G i phi)) = Mod (G i [disj2conj (head phi)])
nefNegNorm (Mod (G i phi)) = Mod (G i [negNorm (head phi)])
nefParser sig = do n <- natural
return $ G (fromInteger n)
{- ------------------------------------------------------------------------------
additional functions for the matching function of this logic
------------------------------------------------------------------------------ -}
gml_build_matches :: (SigFeature a b c, Eq (a (b c))) =>
([Formula (G (a (b c)))], [Formula (G (a (b c)))]) ->
([Int], [Int]) -> [Sequent]
gml_build_matches (poslits, neglits) (prs, nrs) =
let (pos_inds, neg_inds) = (to_inds prs, to_inds nrs)
all_inds = [(pos, neg) | pos <- powerList pos_inds,
neg <- powerList neg_inds]
(sposlits, sneglits) = ([phi | Mod (G k [phi]) <- poslits],
[phi | Neg (Mod (G k [phi])) <- neglits])
relevant_inds = filter (\ (pos, neg) -> sum (imgInt pos prs) <
sum (imgInt neg nrs)) all_inds
in [Sequent (map (\ (ps, ns) -> (Neg (andify (map nneg
(img (pos_inds \\ ps) sposlits ++ img (neg_inds \\ ns) sneglits) ++
(img ps sposlits ++ img ns sneglits)))) ) relevant_inds)]
-- GML bound on integer magnitude
gml_bound :: ([Int], [Int]) -> Int
gml_bound (kps, kns) =
let n = length kps + length kns
logint k x = ceiling $ logBase 2 (k + x)
logsum ls k = sum $ map (logint k . fromIntegral) ls
in 12 * n * (1 + n) + 6 * n * (logsum kps 1 + logsum kns 2)
gml_filter_tuples :: [([Int], [Int])] -> [([Int], [Int])] -> [([Int], [Int])]
gml_filter_tuples [] bs = bs
gml_filter_tuples (a : as) bs
| any (`gml_geq` a) bs = gml_filter_tuples as bs
| otherwise = a : filter (\ x -> not (gml_leq x a)) bs
gml_leq :: ([Int], [Int]) -> ([Int], [Int]) -> Bool
gml_leq (p1, n1) (p2, n2) = all (uncurry (<=)) (zip p1 p2 ++ zip n1 n2)
gml_geq :: ([Int], [Int]) -> ([Int], [Int]) -> Bool
gml_geq (p1, n1) (p2, n2) = all (uncurry (>=)) (zip p1 p2 ++ zip n1 n2)
{- ------------------------------------------------------------------------------
instance of sigFeature for graded modal logic
------------------------------------------------------------------------------ -}
instance (SigFeature b c d, Eq (c d), Eq (b (c d))) => NonEmptySigFeature G b c d where
neGoOn = genericPGoOn
|
mariefarrell/Hets
|
GMP/GMP-CoLoSS/GMP/Logics/G.hs
|
gpl-2.0
| 4,750 | 0 | 24 | 1,224 | 1,650 | 876 | 774 | 69 | 1 |
{-# LANGUAGE RankNTypes #-}
module T11514 where
foo :: forall a. (Show a => a -> a) -> ()
foo = undefined
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_fail/T11514.hs
|
bsd-3-clause
| 108 | 0 | 9 | 24 | 40 | 23 | 17 | 4 | 1 |
{-# LANGUAGE Rank2Types #-}
module LiftToTopLevel.Signature2 where
{- Lifting baz to the top level should bring in xx and a as parameters,
and update the signature to include these.
The refactoring can be completed as the Rank2Types extension is enabled
-}
foo a = (baz xx a)
where
xx :: (Num t) => t -> t -> t
xx p1 p2 = p1 + p2
baz:: (forall t. Num t => t -> t -> t) -> Int ->Int
baz xx a= xx 1 a
|
RefactoringTools/HaRe
|
test/testdata/LiftToToplevel/Signature2r.expected.hs
|
bsd-3-clause
| 422 | 0 | 10 | 108 | 113 | 60 | 53 | 7 | 1 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
module IHaskell.Eval.Parser (
parseString,
CodeBlock(..),
StringLoc(..),
DirectiveType(..),
LineNumber,
ColumnNumber,
ErrMsg,
layoutChunks,
parseDirective,
getModuleName,
Located(..),
PragmaType(..),
) where
import IHaskellPrelude
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Char8 as CBS
import Data.List (maximumBy, inits)
import Prelude (head, tail)
import Control.Monad (msum)
import GHC hiding (Located)
import Language.Haskell.GHC.Parser
import IHaskell.Eval.Util
import StringUtils (strip, split)
-- | A block of code to be evaluated. Each block contains a single element - one declaration,
-- statement, expression, etc. If parsing of the block failed, the block is instead a ParseError,
-- which has the error location and error message.
data CodeBlock = Expression String -- ^ A Haskell expression.
| Declaration String -- ^ A data type or function declaration.
| Statement String -- ^ A Haskell statement (as if in a `do` block).
| Import String -- ^ An import statement.
| TypeSignature String -- ^ A lonely type signature (not above a function
-- declaration).
| Directive DirectiveType String -- ^ An IHaskell directive.
| Module String -- ^ A full Haskell module, to be compiled and loaded.
| ParseError StringLoc ErrMsg -- ^ An error indicating that parsing the code block
-- failed.
| Pragma PragmaType [String] -- ^ A list of GHC pragmas (from a {-# LANGUAGE ... #-}
-- block)
deriving (Show, Eq)
-- | Directive types. Each directive is associated with a string in the directive code block.
data DirectiveType = GetType -- ^ Get the type of an expression via ':type' (or unique prefixes)
| GetInfo -- ^ Get info about the identifier via ':info' (or unique prefixes)
| SetDynFlag -- ^ Enable or disable an extensions, packages etc. via `:set`.
-- Emulates GHCi's `:set`
| LoadFile -- ^ Load a Haskell module.
| SetOption -- ^ Set IHaskell kernel option `:option`.
| SetExtension -- ^ `:extension Foo` is a shortcut for `:set -XFoo`
| ShellCmd -- ^ Execute a shell command.
| GetHelp -- ^ General help via ':?' or ':help'.
| SearchHoogle -- ^ Search for something via Hoogle.
| GetDoc -- ^ Get documentation for an identifier via Hoogle.
| GetKind -- ^ Get the kind of a type via ':kind'.
| LoadModule -- ^ Load and unload modules via ':module'.
deriving (Show, Eq)
-- | Pragma types. Only LANGUAGE pragmas are currently supported. Other pragma types are kept around
-- as a string for error reporting.
data PragmaType = PragmaLanguage
| PragmaUnsupported String
deriving (Show, Eq)
-- | Parse a string into code blocks.
parseString :: String -> Ghc [Located CodeBlock]
parseString codeString = do
-- Try to parse this as a single module.
flags <- getSessionDynFlags
let output = runParser flags parserModule codeString
case output of
Parsed mod
| Just _ <- hsmodName (unLoc mod) -> return [Located 1 $ Module codeString]
_ -> do
-- Split input into chunks based on indentation.
let chunks = layoutChunks $ removeComments codeString
result <- joinFunctions <$> processChunks [] chunks
-- Return to previous flags. When parsing, flags can be set to make sure parsing works properly. But
-- we don't want those flags to be set during evaluation until the right time.
_ <- setSessionDynFlags flags
return result
where
parseChunk :: GhcMonad m => String -> LineNumber -> m (Located CodeBlock)
parseChunk chunk line = Located line <$> handleChunk chunk line
where
handleChunk chunk line
| isDirective chunk = return $ parseDirective chunk line
| isPragma chunk = return $ parsePragma chunk line
| otherwise = parseCodeChunk chunk line
processChunks :: GhcMonad m => [Located CodeBlock] -> [Located String] -> m [Located CodeBlock]
processChunks accum remaining =
case remaining of
-- If we have no more remaining lines, return the accumulated results.
[] -> return $ reverse accum
-- If we have more remaining, parse the current chunk and recurse.
Located line chunk:remaining -> do
block <- parseChunk chunk line
activateExtensions $ unloc block
processChunks (block : accum) remaining
-- Test whether a given chunk is a directive.
isDirective :: String -> Bool
isDirective = isPrefixOf ":" . strip
-- Test if a chunk is a pragma.
isPragma :: String -> Bool
isPragma = isPrefixOf "{-#" . strip
activateExtensions :: GhcMonad m => CodeBlock -> m ()
activateExtensions (Directive SetExtension ext) = void $ setExtension ext
activateExtensions (Directive SetDynFlag flags) =
case stripPrefix "-X" flags of
Just ext -> void $ setExtension ext
Nothing -> return ()
activateExtensions (Pragma PragmaLanguage extensions) = void $ setAll extensions
where
setAll :: GhcMonad m => [String] -> m (Maybe String)
setAll exts = do
errs <- mapM setExtension exts
return $ msum errs
activateExtensions _ = return ()
-- | Parse a single chunk of code, as indicated by the layout of the code.
parseCodeChunk :: GhcMonad m => String -> LineNumber -> m CodeBlock
parseCodeChunk code startLine = do
flags <- getSessionDynFlags
let
-- Try each parser in turn.
rawResults = map (tryParser code) (parsers flags)
-- Convert statements into expressions where we can
results = map (statementToExpression flags) rawResults
case successes results of
-- If none of them succeeded, choose the best error message to display. Only one of the error
-- messages is actually relevant.
[] -> return $ bestError $ failures results
-- If one of the parsers succeeded
result:_ -> return result
where
successes :: [ParseOutput a] -> [a]
successes [] = []
successes (Parsed a:rest) = a : successes rest
successes (_:rest) = successes rest
failures :: [ParseOutput a] -> [(ErrMsg, LineNumber, ColumnNumber)]
failures [] = []
failures (Failure msg (Loc line col):rest) = (msg, line, col) : failures rest
failures (_:rest) = failures rest
bestError :: [(ErrMsg, LineNumber, ColumnNumber)] -> CodeBlock
bestError errors = ParseError (Loc (line + startLine - 1) col) msg
where
(msg, line, col) = maximumBy compareLoc errors
compareLoc (_, line1, col1) (_, line2, col2) = compare line1 line2 <> compare col1 col2
statementToExpression :: DynFlags -> ParseOutput CodeBlock -> ParseOutput CodeBlock
statementToExpression flags (Parsed (Statement stmt)) = Parsed result
where
result = if isExpr flags stmt
then Expression stmt
else Statement stmt
statementToExpression _ other = other
-- Check whether a string is a valid expression.
isExpr :: DynFlags -> String -> Bool
isExpr flags str =
case runParser flags parserExpression str of
Parsed{} -> True
_ -> False
tryParser :: String -> (String -> CodeBlock, String -> ParseOutput String) -> ParseOutput CodeBlock
tryParser string (blockType, parser) =
case parser string of
Parsed res -> Parsed (blockType res)
Failure err loc -> Failure err loc
otherwise -> error "tryParser failed, output was neither Parsed nor Failure"
parsers :: DynFlags -> [(String -> CodeBlock, String -> ParseOutput String)]
parsers flags =
[ (Import, unparser parserImport)
, (TypeSignature, unparser parserTypeSignature)
, (Statement, unparser parserStatement)
, (Declaration, unparser parserDeclaration)
]
where
unparser :: Parser a -> String -> ParseOutput String
unparser parser code =
case runParser flags parser code of
Parsed out -> Parsed code
Partial out strs -> Partial code strs
Failure err loc -> Failure err loc
-- | Find consecutive declarations of the same function and join them into a single declaration.
-- These declarations may also include a type signature, which is also joined with the subsequent
-- declarations.
joinFunctions :: [Located CodeBlock] -> [Located CodeBlock]
joinFunctions [] = []
joinFunctions blocks =
if signatureOrDecl $ unloc $ head blocks
then Located lnum (conjoin $ map unloc decls) : joinFunctions rest
else head blocks : joinFunctions (tail blocks)
where
decls = takeWhile (signatureOrDecl . unloc) blocks
rest = drop (length decls) blocks
lnum = line $ head decls
signatureOrDecl (Declaration _) = True
signatureOrDecl (TypeSignature _) = True
signatureOrDecl _ = False
str (Declaration s) = s
str (TypeSignature s) = s
str _ = error "Expected declaration or signature"
conjoin :: [CodeBlock] -> CodeBlock
conjoin = Declaration . intercalate "\n" . map str
-- | Parse a pragma of the form {-# LANGUAGE ... #-}
parsePragma :: String -- ^ Pragma string.
-> Int -- ^ Line number at which the directive appears.
-> CodeBlock -- ^ Pragma code block or a parse error.
parsePragma ('{':'-':'#':pragma) line =
let commaToSpace :: Char -> Char
commaToSpace ',' = ' '
commaToSpace x = x
pragmas = words $ takeWhile (/= '#') $ map commaToSpace pragma
in case pragmas of
--empty string pragmas are unsupported
[] -> Pragma (PragmaUnsupported "") []
"LANGUAGE":xs -> Pragma PragmaLanguage xs
x:xs -> Pragma (PragmaUnsupported x) xs
-- | Parse a directive of the form :directiveName.
parseDirective :: String -- ^ Directive string.
-> Int -- ^ Line number at which the directive appears.
-> CodeBlock -- ^ Directive code block or a parse error.
parseDirective (':':'!':directive) line = Directive ShellCmd $ '!' : directive
parseDirective (':':directive) line =
case find rightDirective directives of
Just (directiveType, _) -> Directive directiveType arg
where arg = unwords restLine
_:restLine = words directive
Nothing ->
let directiveStart =
case words directive of
[] -> ""
first:_ -> first
in ParseError (Loc line 1) $ "Unknown directive: '" ++ directiveStart ++ "'."
where
rightDirective (_, dirname) =
case words directive of
[] -> False
dir:_ -> dir `elem` tail (inits dirname)
directives =
[ (LoadModule, "module")
, (GetType, "type")
, (GetKind, "kind")
, (GetInfo, "info")
, (SearchHoogle, "hoogle")
, (GetDoc, "documentation")
, (SetDynFlag, "set")
, (LoadFile, "load")
, (SetOption, "option")
, (SetExtension, "extension")
, (GetHelp, "?")
, (GetHelp, "help")
]
parseDirective _ _ = error "Directive must start with colon!"
-- | Parse a module and return the name declared in the 'module X where' line. That line is
-- required, and if it does not exist, this will error. Names with periods in them are returned
-- piece y piece.
getModuleName :: GhcMonad m => String -> m [String]
getModuleName moduleSrc = do
flags <- getSessionDynFlags
let output = runParser flags parserModule moduleSrc
case output of
Failure{} -> error "Module parsing failed."
Parsed mod ->
case unLoc <$> hsmodName (unLoc mod) of
Nothing -> error "Module must have a name."
Just name -> return $ split "." $ moduleNameString name
otherwise -> error "getModuleName failed, output was neither Parsed nor Failure"
|
artuuge/IHaskell
|
src/IHaskell/Eval/Parser.hs
|
mit
| 12,547 | 0 | 16 | 3,617 | 2,744 | 1,442 | 1,302 | 218 | 13 |
--------------------------------------------------------------------------------
-- | The LLVM abstract syntax.
--
module Llvm.AbsSyn where
import Llvm.MetaData
import Llvm.Types
import Unique
-- | Block labels
type LlvmBlockId = Unique
-- | A block of LLVM code.
data LlvmBlock = LlvmBlock {
-- | The code label for this block
blockLabel :: LlvmBlockId,
-- | A list of LlvmStatement's representing the code for this block.
-- This list must end with a control flow statement.
blockStmts :: [LlvmStatement]
}
type LlvmBlocks = [LlvmBlock]
-- | An LLVM Module. This is a top level container in LLVM.
data LlvmModule = LlvmModule {
-- | Comments to include at the start of the module.
modComments :: [LMString],
-- | LLVM Alias type definitions.
modAliases :: [LlvmAlias],
-- | LLVM meta data.
modMeta :: [MetaDecl],
-- | Global variables to include in the module.
modGlobals :: [LMGlobal],
-- | LLVM Functions used in this module but defined in other modules.
modFwdDecls :: LlvmFunctionDecls,
-- | LLVM Functions defined in this module.
modFuncs :: LlvmFunctions
}
-- | An LLVM Function
data LlvmFunction = LlvmFunction {
-- | The signature of this declared function.
funcDecl :: LlvmFunctionDecl,
-- | The functions arguments
funcArgs :: [LMString],
-- | The function attributes.
funcAttrs :: [LlvmFuncAttr],
-- | The section to put the function into,
funcSect :: LMSection,
-- | Prefix data
funcPrefix :: Maybe LlvmStatic,
-- | The body of the functions.
funcBody :: LlvmBlocks
}
type LlvmFunctions = [LlvmFunction]
type SingleThreaded = Bool
-- | LLVM ordering types for synchronization purposes. (Introduced in LLVM
-- 3.0). Please see the LLVM documentation for a better description.
data LlvmSyncOrdering
-- | Some partial order of operations exists.
= SyncUnord
-- | A single total order for operations at a single address exists.
| SyncMonotonic
-- | Acquire synchronization operation.
| SyncAcquire
-- | Release synchronization operation.
| SyncRelease
-- | Acquire + Release synchronization operation.
| SyncAcqRel
-- | Full sequential Consistency operation.
| SyncSeqCst
deriving (Show, Eq)
-- | Llvm Statements
data LlvmStatement
{- |
Assign an expression to an variable:
* dest: Variable to assign to
* source: Source expression
-}
= Assignment LlvmVar LlvmExpression
{- |
Memory fence operation
-}
| Fence Bool LlvmSyncOrdering
{- |
Always branch to the target label
-}
| Branch LlvmVar
{- |
Branch to label targetTrue if cond is true otherwise to label targetFalse
* cond: condition that will be tested, must be of type i1
* targetTrue: label to branch to if cond is true
* targetFalse: label to branch to if cond is false
-}
| BranchIf LlvmVar LlvmVar LlvmVar
{- |
Comment
Plain comment.
-}
| Comment [LMString]
{- |
Set a label on this position.
* name: Identifier of this label, unique for this module
-}
| MkLabel LlvmBlockId
{- |
Store variable value in pointer ptr. If value is of type t then ptr must
be of type t*.
* value: Variable/Constant to store.
* ptr: Location to store the value in
-}
| Store LlvmVar LlvmVar
{- |
Mutliway branch
* scrutinee: Variable or constant which must be of integer type that is
determines which arm is chosen.
* def: The default label if there is no match in target.
* target: A list of (value,label) where the value is an integer
constant and label the corresponding label to jump to if the
scrutinee matches the value.
-}
| Switch LlvmVar LlvmVar [(LlvmVar, LlvmVar)]
{- |
Return a result.
* result: The variable or constant to return
-}
| Return (Maybe LlvmVar)
{- |
An instruction for the optimizer that the code following is not reachable
-}
| Unreachable
{- |
Raise an expression to a statement (if don't want result or want to use
Llvm unnamed values.
-}
| Expr LlvmExpression
{- |
A nop LLVM statement. Useful as its often more efficient to use this
then to wrap LLvmStatement in a Just or [].
-}
| Nop
{- |
A LLVM statement with metadata attached to it.
-}
| MetaStmt [MetaAnnot] LlvmStatement
deriving (Eq)
-- | Llvm Expressions
data LlvmExpression
{- |
Allocate amount * sizeof(tp) bytes on the stack
* tp: LlvmType to reserve room for
* amount: The nr of tp's which must be allocated
-}
= Alloca LlvmType Int
{- |
Perform the machine operator op on the operands left and right
* op: operator
* left: left operand
* right: right operand
-}
| LlvmOp LlvmMachOp LlvmVar LlvmVar
{- |
Perform a compare operation on the operands left and right
* op: operator
* left: left operand
* right: right operand
-}
| Compare LlvmCmpOp LlvmVar LlvmVar
{- |
Extract a scalar element from a vector
* val: The vector
* idx: The index of the scalar within the vector
-}
| Extract LlvmVar LlvmVar
{- |
Extract a scalar element from a structure
* val: The structure
* idx: The index of the scalar within the structure
Corresponds to "extractvalue" instruction.
-}
| ExtractV LlvmVar Int
{- |
Insert a scalar element into a vector
* val: The source vector
* elt: The scalar to insert
* index: The index at which to insert the scalar
-}
| Insert LlvmVar LlvmVar LlvmVar
{- |
Allocate amount * sizeof(tp) bytes on the heap
* tp: LlvmType to reserve room for
* amount: The nr of tp's which must be allocated
-}
| Malloc LlvmType Int
{- |
Load the value at location ptr
-}
| Load LlvmVar
{- |
Atomic load of the value at location ptr
-}
| ALoad LlvmSyncOrdering SingleThreaded LlvmVar
{- |
Navigate in an structure, selecting elements
* inbound: Is the pointer inbounds? (computed pointer doesn't overflow)
* ptr: Location of the structure
* indexes: A list of indexes to select the correct value.
-}
| GetElemPtr Bool LlvmVar [LlvmVar]
{- |
Cast the variable from to the to type. This is an abstraction of three
cast operators in Llvm, inttoptr, prttoint and bitcast.
* cast: Cast type
* from: Variable to cast
* to: type to cast to
-}
| Cast LlvmCastOp LlvmVar LlvmType
{- |
Call a function. The result is the value of the expression.
* tailJumps: CallType to signal if the function should be tail called
* fnptrval: An LLVM value containing a pointer to a function to be
invoked. Can be indirect. Should be LMFunction type.
* args: Concrete arguments for the parameters
* attrs: A list of function attributes for the call. Only NoReturn,
NoUnwind, ReadOnly and ReadNone are valid here.
-}
| Call LlvmCallType LlvmVar [LlvmVar] [LlvmFuncAttr]
{- |
Call a function as above but potentially taking metadata as arguments.
* tailJumps: CallType to signal if the function should be tail called
* fnptrval: An LLVM value containing a pointer to a function to be
invoked. Can be indirect. Should be LMFunction type.
* args: Arguments that may include metadata.
* attrs: A list of function attributes for the call. Only NoReturn,
NoUnwind, ReadOnly and ReadNone are valid here.
-}
| CallM LlvmCallType LlvmVar [MetaExpr] [LlvmFuncAttr]
{- |
Merge variables from different basic blocks which are predecessors of this
basic block in a new variable of type tp.
* tp: type of the merged variable, must match the types of the
predecessor variables.
* precessors: A list of variables and the basic block that they originate
from.
-}
| Phi LlvmType [(LlvmVar,LlvmVar)]
{- |
Inline assembly expression. Syntax is very similar to the style used by GCC.
* assembly: Actual inline assembly code.
* constraints: Operand constraints.
* return ty: Return type of function.
* vars: Any variables involved in the assembly code.
* sideeffect: Does the expression have side effects not visible from the
constraints list.
* alignstack: Should the stack be conservatively aligned before this
expression is executed.
-}
| Asm LMString LMString LlvmType [LlvmVar] Bool Bool
{- |
A LLVM expression with metadata attached to it.
-}
| MExpr [MetaAnnot] LlvmExpression
deriving (Eq)
|
urbanslug/ghc
|
compiler/llvmGen/Llvm/AbsSyn.hs
|
bsd-3-clause
| 8,875 | 0 | 9 | 2,499 | 566 | 366 | 200 | 66 | 0 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
module Yesod.Routes.Class
( RenderRoute (..)
, ParseRoute (..)
, RouteAttrs (..)
) where
import Data.Text (Text)
import Data.Set (Set)
class Eq (Route a) => RenderRoute a where
-- | The <http://www.yesodweb.com/book/routing-and-handlers type-safe URLs> associated with a site argument.
data Route a
renderRoute :: Route a
-> ([Text], [(Text, Text)]) -- ^ The path of the URL split on forward slashes, and a list of query parameters with their associated value.
class RenderRoute a => ParseRoute a where
parseRoute :: ([Text], [(Text, Text)]) -- ^ The path of the URL split on forward slashes, and a list of query parameters with their associated value.
-> Maybe (Route a)
class RenderRoute a => RouteAttrs a where
routeAttrs :: Route a
-> Set Text -- ^ A set of <http://www.yesodweb.com/book/route-attributes attributes associated with the route>.
|
geraldus/yesod
|
yesod-core/src/Yesod/Routes/Class.hs
|
mit
| 998 | 0 | 10 | 226 | 195 | 112 | 83 | 18 | 0 |
{-# OPTIONS_GHC -fwarn-unsafe #-}
{-# LANGUAGE FlexibleInstances #-}
module SH_Overlap6_A (
C(..)
) where
import SH_Overlap6_B
instance
{-# OVERLAPS #-}
C [Int] where
f _ = "[Int]"
|
urbanslug/ghc
|
testsuite/tests/safeHaskell/overlapping/SH_Overlap6_A.hs
|
bsd-3-clause
| 198 | 0 | 6 | 44 | 38 | 24 | 14 | 9 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
-- | An unstructured grab-bag of various tools and inspection
-- functions that didn't really fit anywhere else.
module Futhark.Tools
( module Futhark.Construct,
redomapToMapAndReduce,
dissectScrema,
sequentialStreamWholeArray,
partitionChunkedFoldParameters,
-- * Primitive expressions
module Futhark.Analysis.PrimExp.Convert,
)
where
import Control.Monad.Identity
import Futhark.Analysis.PrimExp.Convert
import Futhark.Construct
import Futhark.IR
import Futhark.IR.SOACS.SOAC
import Futhark.Util
-- | Turns a binding of a @redomap@ into two seperate bindings, a
-- @map@ binding and a @reduce@ binding (returned in that order).
--
-- Reuses the original pattern for the @reduce@, and creates a new
-- pattern with new 'Ident's for the result of the @map@.
redomapToMapAndReduce ::
( MonadFreshNames m,
Buildable rep,
ExpDec rep ~ (),
Op rep ~ SOAC rep
) =>
Pat (LetDec rep) ->
( SubExp,
[Reduce rep],
Lambda rep,
[VName]
) ->
m (Stm rep, Stm rep)
redomapToMapAndReduce (Pat pes) (w, reds, map_lam, arrs) = do
(map_pat, red_pat, red_arrs) <-
splitScanOrRedomap pes w map_lam $ map redNeutral reds
let map_stm = mkLet map_pat $ Op $ Screma w arrs (mapSOAC map_lam)
red_stm <-
Let red_pat (defAux ()) . Op
<$> (Screma w red_arrs <$> reduceSOAC reds)
return (map_stm, red_stm)
splitScanOrRedomap ::
(Typed dec, MonadFreshNames m) =>
[PatElem dec] ->
SubExp ->
Lambda rep ->
[[SubExp]] ->
m ([Ident], Pat dec, [VName])
splitScanOrRedomap pes w map_lam nes = do
let (acc_pes, arr_pes) =
splitAt (length $ concat nes) pes
(acc_ts, _arr_ts) =
splitAt (length (concat nes)) $ lambdaReturnType map_lam
map_accpat <- zipWithM accMapPatElem acc_pes acc_ts
map_arrpat <- mapM arrMapPatElem arr_pes
let map_pat = map_accpat ++ map_arrpat
return (map_pat, Pat acc_pes, map identName map_accpat)
where
accMapPatElem pe acc_t =
newIdent (baseString (patElemName pe) ++ "_map_acc") $ acc_t `arrayOfRow` w
arrMapPatElem = return . patElemIdent
-- | Turn a Screma into a Scanomap (possibly with mapout parts) and a
-- Redomap. This is used to handle Scremas that are so complicated
-- that we cannot directly generate efficient parallel code for them.
-- In essense, what happens is the opposite of horisontal fusion.
dissectScrema ::
( MonadBuilder m,
Op (Rep m) ~ SOAC (Rep m),
Buildable (Rep m)
) =>
Pat (LetDec (Rep m)) ->
SubExp ->
ScremaForm (Rep m) ->
[VName] ->
m ()
dissectScrema pat w (ScremaForm scans reds map_lam) arrs = do
let num_reds = redResults reds
num_scans = scanResults scans
(scan_res, red_res, map_res) =
splitAt3 num_scans num_reds $ patNames pat
to_red <- replicateM num_reds $ newVName "to_red"
let scanomap = scanomapSOAC scans map_lam
letBindNames (scan_res <> to_red <> map_res) $
Op $ Screma w arrs scanomap
reduce <- reduceSOAC reds
letBindNames red_res $ Op $ Screma w to_red reduce
-- | Turn a stream SOAC into statements that apply the stream lambda
-- to the entire input.
sequentialStreamWholeArray ::
(MonadBuilder m, Buildable (Rep m)) =>
Pat (LetDec (Rep m)) ->
SubExp ->
[SubExp] ->
Lambda (Rep m) ->
[VName] ->
m ()
sequentialStreamWholeArray pat w nes lam arrs = do
-- We just set the chunksize to w and inline the lambda body. There
-- is no difference between parallel and sequential streams here.
let (chunk_size_param, fold_params, arr_params) =
partitionChunkedFoldParameters (length nes) $ lambdaParams lam
-- The chunk size is the full size of the array.
letBindNames [paramName chunk_size_param] $ BasicOp $ SubExp w
-- The accumulator parameters are initialised to the neutral element.
forM_ (zip fold_params nes) $ \(p, ne) ->
letBindNames [paramName p] $ BasicOp $ SubExp ne
-- Finally, the array parameters are set to the arrays (but reshaped
-- to make the types work out; this will be simplified rapidly).
forM_ (zip arr_params arrs) $ \(p, arr) ->
letBindNames [paramName p] $
BasicOp $
Reshape (map DimCoercion $ arrayDims $ paramType p) arr
-- Then we just inline the lambda body.
mapM_ addStm $ bodyStms $ lambdaBody lam
-- The number of results in the body matches exactly the size (and
-- order) of 'pat', so we bind them up here, again with a reshape to
-- make the types work out.
forM_ (zip (patElems pat) $ bodyResult $ lambdaBody lam) $ \(pe, SubExpRes cs se) ->
certifying cs $ case (arrayDims $ patElemType pe, se) of
(dims, Var v)
| not $ null dims ->
letBindNames [patElemName pe] $ BasicOp $ Reshape (map DimCoercion dims) v
_ -> letBindNames [patElemName pe] $ BasicOp $ SubExp se
-- | Split the parameters of a stream reduction lambda into the chunk
-- size parameter, the accumulator parameters, and the input chunk
-- parameters. The integer argument is how many accumulators are
-- used.
partitionChunkedFoldParameters ::
Int ->
[Param dec] ->
(Param dec, [Param dec], [Param dec])
partitionChunkedFoldParameters _ [] =
error "partitionChunkedFoldParameters: lambda takes no parameters"
partitionChunkedFoldParameters num_accs (chunk_param : params) =
let (acc_params, arr_params) = splitAt num_accs params
in (chunk_param, acc_params, arr_params)
|
diku-dk/futhark
|
src/Futhark/Tools.hs
|
isc
| 5,418 | 0 | 18 | 1,135 | 1,432 | 730 | 702 | 107 | 2 |
module CardGame where
import Prelude
-- 6.53
data Suit = Spades | Hearts |
Diamonds | Clubs
deriving (Eq, Show)
-- 2..9,j,q,k,* (Ace)
type Value = Char
type Card = (Suit,Value)
type Deck = [Card]
-- 6.55
data Player = North | South |
East | West
deriving (Eq,Show)
-- 6.56
-- Head leads
type Trick = [(Player,Card)]
noTrumpTrick :: Trick
noTrumpTrick = [(East,(Spades,'3')),(South,(Spades,'k')),(West,(Spades,'6')),(North,(Spades,'*'))]
trickWithTrump :: Trick
trickWithTrump = [ (North, (Diamonds, '3')), (South,(Clubs,'*')), (East,(Diamonds,'7')), (West,(Diamonds,'9')) ]
-- 6.57
-- Assumes no trump
winNT :: Trick -> Player
winNT trick = head [ p | (p,v) <- player_values, v == max_value]
where
max_value = maximum [ v | (p,(s,v)) <- trick ]
player_values = [ (p,v) | (p,(s,v)) <- trick ]
-- 6.58
-- Assumes at least one trump exists
winT' :: Trick -> Player
winT' trick = head [ p | (p,(s,v)) <- trumps, v == max_trump_value ]
where
(p,(lead_suit,v)) = head trick
trumps = [ (p,(s,v)) | (p,(s,v)) <- trick, s /= lead_suit ]
max_trump_value = maximum [ v | (p,(s,v)) <- trumps ]
winT :: Suit -> Trick -> Player
winT trump trick = winNT trumps
where
trumps = [ (p,(s,v)) | (p,(s,v)) <- trick, s == trump ]
-- 6.59
type Hand = (Player,[Card])
-- 6.60
type Hands = [Hand]
north :: Hand
north = (North, [(Spades,'*'),(Hearts,'k'),(Hearts,'7'),(Diamonds,'3')])
south :: Hand
south = (South, [(Spades,'k'),(Spades,'j'),(Spades,'q'),(Clubs,'*'),(Hearts,'2')])
east :: Hand
east = (East,[(Spades,'3'),(Hearts,'j'),(Diamonds,'7'),(Diamonds,'2'),(Clubs,'9')])
west :: Hand
west = (West,[(Spades,'6'),(Diamonds,'9'),(Diamonds,'6'),(Clubs,'k'),(Clubs,'6')])
sampleHands :: Hands
sampleHands = [north,south,east,west]
-- 6.61
-- Given the players' hands, and a trick
-- it checks wheter the trick is both possible and legal or not
-- A trick is possible when the card played by each player is in theri hand
-- A trick is legal when the player follow the leading suit if they can
--
playerHasCard :: Player -> Hands -> Card -> Bool
playerHasCard p hs c = and [elem c h | (player,h) <- hs, player == p]
playerHasSuit :: Player -> Hands -> Suit -> Bool
playerHasSuit p hs s = elem s player_suits
where
player_suits = fst $ unzip $ concat [h | (player,h) <- hs, player == p]
checkPlay :: Hands -> Trick -> Bool
checkPlay hs t = isPossible && isLegal
where
isPossible = and [ True | (p,c) <- t, playerHasCard p hs c ]
isLegal = length (can_follow ++ cant_follow) == 4
(lead_player,(lead_suit,_)) = head t
can_follow = [ p | (p,(s,v)) <- t, s == lead_suit, playerHasCard p hs (s,v) ]
cant_follow = [ p | (p,(s,v)) <- t, s /= lead_suit, not (playerHasSuit p hs lead_suit)]
-- 6.62
data Team = NorthSouth | EastWest
deriving (Eq)
teamFromPlayer :: Player -> Team
teamFromPlayer player
| elem player [North,South] = NorthSouth
| otherwise = EastWest
scoreFromTeam :: Team -> Int
scoreFromTeam team
| team == NorthSouth = 1
| otherwise = -1
winnerTeamFromScore :: [Int] -> Team
winnerTeamFromScore scores
| (sum scores) > 0 = NorthSouth
| otherwise = EastWest
winnerNT :: [Trick] -> Team
winnerNT tricks = winnerTeamFromScore scores
where
scores = [ scoreFromTeam $ teamFromPlayer $ winNT t | t <- tricks ]
winnerT :: Suit -> [Trick] -> Team
winnerT trump tricks = winnerTeamFromScore scores
where
scores = [ scoreFromTeam $ teamFromPlayer $ winT trump t | t <- tricks ]
|
CarmineM74/haskell_craft3e
|
cardgame.hs
|
mit
| 3,605 | 0 | 12 | 802 | 1,488 | 870 | 618 | 72 | 1 |
{-# LANGUAGE TupleSections, ViewPatterns, MultiWayIf, OverloadedLists #-}
{-# LANGUAGE ScopedTypeVariables, OverloadedStrings, PatternGuards, QuasiQuotes #-}
module Control.OperationalTransformation.JSON.Transform where
import qualified Control.OperationalTransformation as C
import Control.OperationalTransformation.JSON.Affects
import qualified Control.OperationalTransformation.JSON.Apply as Ap
-- import Control.OperationalTransformation.JSON.QuasiQuote
import Control.OperationalTransformation.JSON.Types
import Control.OperationalTransformation.JSON.Util
import Control.OperationalTransformation.Text0
import Data.List
import Data.String.Interpolate.IsString
invertOperation = error "invertOperation not implemented"
getfst (JSONOperation [x]) = x
op1 = JSONOperation [StringInsert [Pos 2] 0 "bbb"]
op2 = JSONOperation [StringInsert [Pos 3] 0 "cccc"]
foo = affects -- Just to avoid warning that the import is unused
unsafeIndex msg l i | i < length l = l !! i
unsafeIndex msg l index | otherwise = error $ [i|UNSAFE INDEX ERROR, IT WAS #{msg}. l = #{l}. i = #{index}|]
----------------------------------------------------------------------------------
-- Transform right
-- In transformRight, the left operation affects the right operation.
-- So, transform the right operation properly and return it
----------------------------------------------------------------------------------
transformRight :: JSONOp -> JSONOp -> Either String JSONOp
-- ListDelete/ListMove on the same list
transformRight op1@(ListDelete {}) op2@(ListMove path2 index21 index22) | path1 == path2 =
if -- Delete of the thing being moved makes the move a no-op
| index1 == index21 -> Right Identity
-- Delete in the middle of the range causes the top index to go down
| index21 <= index1 && index1 <= index22 -> Right $ ListMove path2 index21 (index22 - 1)
| index22 <= index1 && index1 <= index21 -> Right $ ListMove path2 (index21 - 1) index22
-- Delete before the range causes both indices to go down
| index1 < bottom -> Right $ ListMove path2 (index21 - 1) (index22 - 1)
-- Otherwise, no change
| otherwise -> Right op2
where bottom = min index21 index22
top = max index21 index22
path1 = getPath op1
path2 = getPath op2
Pos index1 = last (getFullPath op1)
-- ListInsert/ListMove on the same list
transformRight op1@(ListInsert path1 index1 _) op2@(ListMove path2 index21 index22) | path1 == path2 =
if -- Insert in the middle of the range causes the top index to go up
| path1 == path2 && bottom < index1 && index1 <= top && index22 > index21 -> Right $ ListMove path2 index21 (index22 + 1)
| path1 == path2 && bottom < index1 && index1 <= top && index22 < index21 -> Right $ ListMove path2 (index21 + 1) index22
-- Insert before the range causes both indices to go up
| path1 == path2 && index1 <= bottom -> Right $ ListMove path2 (index21 + 1) (index22 + 1)
-- Otherwise, no change
| otherwise -> Right op2
where bottom = min index21 index22
top = max index21 index22
-- ListMove/ListInsert on the same list
transformRight op1@(ListMove path1 index11 index12) op2@(ListInsert path2 index2 _) | path1 == path2 =
if -- on same index when the ListMove moves it to earlier: the ListInsert gets bumped up by 1
| index11 == index2 && index12 < index11 -> Right $ replaceIndex op2 (length path1) (index2 + 1)
-- in between
| index11 < index2 && index2 <= index12 -> Right $ replaceIndex op2 (length path1) (index2 - 1)
| index12 < index2 && index2 <= index11 -> Right $ replaceIndex op2 (length path1) (index2 + 1)
-- If the ListInsert is at or before the smaller index of the ListMove, it's not affected. TODO: cover this in `affects`
| index2 <= (min index11 index12) -> Right op2
-- If the ListInsert is at or after the larger index of the ListMove, it's not affected. TODO: cover this in `affects`
| index2 >= (max index11 index12) -> Right op2
-- ListMove/Anything on same list or child thereof
transformRight (ListMove listPath1 listIndex1 listIndex2) op2 | listPath1 `isStrictPrefixOf` (getFullPath op2)
, Pos i <- unsafeIndex "B" (getFullPath op2) (length listPath1) =
if | i == listIndex1 -> Right $ replaceIndex op2 (length listPath1) listIndex2
-- in between
| listIndex1 <= i && i <= listIndex2 -> Right $ replaceIndex op2 (length listPath1) (i - 1)
| listIndex2 <= i && i <= listIndex1 -> Right $ replaceIndex op2 (length listPath1) (i + 1)
| otherwise -> Right op2
-- ListDelete/ListReplace on the same list: a delete affecting a replace turns into an insert.
transformRight op1@(ListDelete path1 index1 value1) op2@(ListReplace path2 index2 old new)
| path1 == path2 && index1 == index2 = Right $ ListInsert path2 index2 new
-- ListInsert/Anything
transformRight op1@(ListInsert listPath _ val) op2
| listPath `isStrictPrefixOf` (getFullPath op2) = Right $ replaceIndexFn op2 (length listPath) (+ 1)
-- ListDelete/ListInsert at same index: ListInsert is unchanged
transformRight op1@(ListDelete path1 index1 val1) op2@(ListInsert path2 index2 val2)
| getFullPath op1 == getFullPath op2 = Right op2
-- ListReplace/ListDelete: a replace affecting a delete turns the delete into a no-op
transformRight op1@(ListReplace {}) op2@(ListDelete {})
| (getFullPath op1) == (getFullPath op2) = Right Identity
-- ListReplace/ListDelete: a replace affecting a delete turns the delete into a no-op
transformRight op1@(ListDelete {}) op2@(ListDelete {})
| (getFullPath op1) == (getFullPath op2) = Right Identity
-- ListReplace/ListMove: a list replace on the same index as a list move leaves the move unchanged (since the replace will move instead)
transformRight op1@(ListReplace {}) op2@(ListMove {})
| (getFullPath op1) == (getFullPath op2) = Right op2
-- ListReplace/Anything: a list replace on the same index turns the other thing into a no-op
transformRight op1@(ListReplace {}) (getFullPath -> fullPath2)
| (getFullPath op1) `isPrefixOf` fullPath2 = Right Identity
-- ListDelete/Anything
transformRight op1@(ListDelete listPath i1 val) op2@(((\x -> (safeIndex x (length listPath))) . getFullPath) -> Just (Pos i2))
= if | i1 == i2 -> Right Identity
| i1 < i2 -> Right $ replaceIndex op2 (length listPath) (i2 - 1)
| True -> Right op2
-- ObjectDelete/ObjectDelete: a delete on the same key creates a no-op
transformRight op1@(ObjectDelete path1 key1 value1) op2@(ObjectDelete path2 key2 value2)
| path1 == path2 && key1 == key2 = Right Identity
-- ObjectDelete/ObjectReplace: a delete affecting a replace turns the replace into an insert
transformRight op1@(ObjectDelete path1 key1 value1) op2@(ObjectReplace path2 key2 old2 new2)
| getFullPath op1 == getFullPath op2 = Right $ ObjectInsert path1 key1 new2
-- ObjectReplace/ObjectDelete: a replace affecting a delete turns the delete into a no-op
transformRight op1@(ObjectReplace path1 key1 old1 new1) op2@(ObjectDelete path2 key2 value2)
| (getFullPath op1) `isPrefixOf` (getFullPath op2) = Right Identity
-- ObjectDelete/Anything: a delete affecting any operation inside the delete turns the thing into a no-op
transformRight op1@(ObjectDelete {}) op2
| getFullPath op1 `isPrefixOf` getFullPath op2 = Right $ Identity
-- ObjectReplace/Anything: a delete affecting any operation inside the replace turns the thing into a no-op
transformRight op1@(ObjectReplace {}) op2
| getFullPath op1 `isPrefixOf` getFullPath op2 = Right $ Identity
-- (ObjectReplace,ObjectDelete,ListDelete)/Anything: an operation that affects a replace or delete means we need to change what's removed
transformRight op1 op2@(ObjectReplace path key old new) =
(\old' -> ObjectReplace path key old' new) <$> (Ap.apply (setPath (drop (length $ getFullPath op2) (getPath op1)) op1) old)
transformRight op1 op2@(ObjectDelete path key old) =
(\old' -> ObjectDelete path key old') <$> (Ap.apply (setPath (drop (length $ getFullPath op2) (getPath op1)) op1) old)
transformRight op1 op2@(ListDelete path i2 value) =
(\value' -> ListDelete path i2 value') <$> Ap.apply (setPath (drop (length $ getFullPath op2) (getPath op1)) op1) value
-- A delete or replace turns the other operation into a no-op
transformRight op1@(ObjectDelete {}) op2 = Right Identity
transformRight op1@(ObjectReplace {}) op2 = Right Identity
transformRight op1@(ListDelete {}) op2 = Right Identity
-- transformRight x y = Left [i|transformRight not handled: #{x} affecting #{y}|]
-- Unhandled transformRight case. Just leave the operation unchanged.
-- This will allow us to run quickcheck tests without crashing here all the time.
transformRight x y = Right y
----------------------------------------------------------------------------------
--- Transform double
----------------------------------------------------------------------------------
-- |In transformDouble, both operations affect the other
transformDouble :: JSONOp -> JSONOp -> Either String (JSONOp, JSONOp)
transformDouble op1@(ListInsert path1 i1 value1) op2@(ListInsert path2 i2 value2)
| (path2 == path1) && (i1 > i2) = error "Problem with transformDouble ListInsert/ListInsert" -- TODO rev <$> transform op2 op1 -- WLOG
| (path2 == path1) && (i1 <= i2) = Right (op1, ListInsert path2 (succ i2) value2)
| (path2 `isPrefixOf` path1) = undefined -- TODO rev <$> transform op2 op1 -- WLOG
| (path1 `isPrefixOf` path2) = Right (op1, op2) -- TODO: increment the appropriate part of path2
| otherwise = Right (op1, op2)
-- For dueling subtype operations, defer to the operation's transform function
transformDouble (ApplySubtypeOperation path1 typ1 op1) (ApplySubtypeOperation path2 typ2 op2) = case (C.transform op1 op2) of
Left err -> Left err
Right (T0 [], T0 []) -> Right (Identity, Identity)
Right (T0 [], op2') -> Right (Identity, ApplySubtypeOperation path2 typ2 op2')
Right (op1', T0 []) -> Right (ApplySubtypeOperation path1 typ1 op1', Identity)
Right (op1', op2') -> Right (ApplySubtypeOperation path1 typ1 op1', ApplySubtypeOperation path2 typ2 op2')
-- On simultaneous inserts, the left one wins
transformDouble op1@(ObjectInsert path1 key1 value1) op2@(ObjectInsert path2 key2 value2) |
path1 == path2 && key1 == key2 = Right (ObjectReplace path1 key1 value2 value1, Identity)
-- On simultaneous list replaces, the left one wins
transformDouble op1@(ListReplace path1 key1 old1 new1) op2@(ListReplace path2 key2 old2 new2)
| (path1 == path2) && (key1 == key2) = Right (ListReplace path1 key1 new2 new1, Identity)
-- On simultaneous object replaces, the left one wins
transformDouble op1@(ObjectReplace path1 key1 old1 new1) op2@(ObjectReplace path2 key2 old2 new2)
| (path1 == path2) && (key1 == key2) = Right (ObjectReplace path1 key1 new2 new1, Identity)
-- ListMove/ListMove: fall back to special logic
transformDouble op1@(ListMove path1 otherFrom otherTo) op2@(ListMove path2 from to) | path1 == path2
= Right (transformListMove LeftSide op2 op1, transformListMove RightSide op1 op2)
-- The right default behavior for transformDouble is to transform the two sides independently,
-- for the cases where the transformations don't depend on each other
transformDouble x y = (, ) <$> transformRight y x <*> transformRight x y
----------------------------------------------------------------------------------
--- Transform ListMove/ListMove (special case)
----------------------------------------------------------------------------------
data Side = LeftSide | RightSide deriving (Show, Eq)
-- ListMove/ListMove, where we're transforming the right one
-- Made by directly copying the logic in json0.js
transformListMove side op1@(ListMove path1 otherFrom otherTo) op2@(ListMove path2 from to) | otherFrom == otherTo = op2
-- Where did my thing go? Someone already moved it and we're the right: tiebreak to a no-op
transformListMove side (ListMove path1 otherFrom otherTo) (ListMove path2 from to) | ((from == otherFrom) && (side == RightSide)) = Identity
-- Where did my thing go? Someone already moved it and we're the left: tiebreak to a valid op
transformListMove side (ListMove path1 otherFrom otherTo) (ListMove path2 from to) | ((from == otherFrom) && (side == LeftSide) && (from == to)) = ListMove path2 otherTo otherTo -- Ugh special case
transformListMove side (ListMove path1 otherFrom otherTo) (ListMove path2 from to) | ((from == otherFrom) && (side == LeftSide)) = ListMove path2 otherTo to
-- Mimic the imperative JS code from json0.js exactly
-- (This was too tricky to do otherwise)
-- TODO: use ST monad or something to make this less error-prone
transformListMove side (ListMove path1 otherFrom otherTo) (ListMove path2 from to) = ListMove path2 newFrom newTo where
newFrom' = if (from > otherFrom) then from - 1 else from
newFrom'' = if (from > otherTo) then newFrom' + 1 else newFrom'
newFrom''' = if ((from == otherTo) && (otherFrom > otherTo)) then newFrom'' + 1 else newFrom''
newFrom = newFrom'''
newTo1 = if ((from == otherTo) && (otherFrom > otherTo) && (from == to)) then to + 1 else to
newTo2 = if (to > otherFrom) then newTo1 - 1 else newTo1
newTo3 = if ((to == otherFrom) && (to > from)) then newTo2 - 1 else newTo2
newTo4 = if (to > otherTo) then newTo3 + 1 else newTo3
newTo5 = if ((to == otherTo) && ((otherTo > otherFrom && to > from) || (otherTo < otherFrom && to < from)) && (side == RightSide)) then newTo4 + 1 else newTo4
newTo6 = if ((to == otherTo) && (not ((otherTo > otherFrom && to > from) || (otherTo < otherFrom && to < from))) && (to > from)) then newTo5 + 1 else newTo5
newTo7 = if ((to == otherTo) && (not ((otherTo > otherFrom && to > from) || (otherTo < otherFrom && to < from))) && (not (to > from)) && (to == otherFrom)) then newTo6 - 1 else newTo6
newTo = newTo7
transformListMove side op1 op2 = error [i|Invalid arguments to transformListMove: #{side}, #{op1}, #{op2}|]
safeIndex l i | i < length l = Just $ l !! i
safeIndex l i | otherwise = Nothing
|
thomasjm/ot.hs
|
src/Control/OperationalTransformation/JSON/Transform.hs
|
mit
| 14,053 | 24 | 19 | 2,461 | 3,825 | 2,010 | 1,815 | 133 | 17 |
module HMenu.Select (
select
) where
import Data.List (sort)
select :: (String -> String -> Integer) -> String -> [String] -> [String]
select metric input xs = map snd $ sort scoreAndElem
where d = metric input
scores = map d xs
scoreAndElem = zip scores xs
|
thelastnode/hmenu
|
HMenu/Select.hs
|
mit
| 280 | 0 | 8 | 69 | 107 | 57 | 50 | 8 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE StandaloneDeriving #-}
module CleanFRP
( BoomWorld(..)
, bInput
, bRotations
, bwSendMoveEvent
, bwLookAtEvent
, MoveEvent(..)
, LookEvent(..)
, bwSendFire
, Game(..)
, EntityData(..)
, newBoomWorld
, gameWorlds
, gamePushWorld
, gameRenderer
, gameEntityData
, FireEvent(..)
, EntityManager(..)
, Player(..)
, deltaId
, bwSendTick
, flattenDelta
, gameEntityManager
, collisionSystem
, applyDelta
, bPendingActions
, gameRenderManager
, merge
, mergeAll
, bwSendLookAtEvent
-- Gameplay
, MoveEvents(..)
, F, WrapB(..), WrapR(..)
, spawnEntity
, deltaRotate
, deltaMove
, accum
, BoomWorldDelta
, Behavior
, EntityId
, bwFire
, bwTick
, deltaRender
, deltaRenderAdd
, merge3
, merge4
, bPositions
, removeEntity
, bwMoveEvents
) where
import Prelude
import System.Exit
import Linear
import GHC.Float
import Data.Octree (Vector3 (..))
import Data.Typeable
import Control.Monad.Free
import Control.Applicative
import Control.Lens hiding (both, coerce)
import Control.Monad.RWS.Strict hiding (listen)
import qualified Control.Monad.RWS.Strict as State
import Data.IORef
import Debug.Trace
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import Data.Maybe
import Collision
import FRP.Sodium hiding (merge, accum)
import qualified FRP.Sodium as S
import qualified FRP.Sodium.Context as SC
import Control.Monad.State.Strict
import Render.Halo
import qualified Render.Halo as H
import qualified Language.Haskell.Interpreter as I
import Language.Haskell.Interpreter hiding (lift)
import Language.Haskell.Interpreter.Unsafe
accum :: a -> Event (a -> a) -> Reactive (Behavior a)
accum z efa = S.accum z . split . coalesce mappend . fmap (:[]) $ efa
type EntityId = Int
type Component a = Map.Map EntityId a
data MoveEvents =
SetPosition
| Move
data MoveEvent :: MoveEvents -> * where
SetPositionEvent :: (Float, Float) -> MoveEvent SetPosition
MoveEvent :: (Float, Float) -> MoveEvent Move
StopMoveEvent :: MoveEvent Move
data FireEvent where
FireEvent :: FireEvent
data Player =
PlayerOne
| PlayerTwo
deriving (Eq, Ord, Show)
data LookEvent =
LookAt (Float, Float)
| StopLookAt
-- type WorldWire a b = Sys.WorldWire BoomWorld a b
data BoomWorldInput = BoomWorldInput
{ _bwMoveEvents :: Event (MoveEvent Move)
, _bwSendMoveEvent :: MoveEvent Move -> Reactive ()
, _bwLookAtEvent :: Event LookEvent
, _bwSendLookAtEvent :: LookEvent -> Reactive ()
, _bwFire :: Event FireEvent
, _bwSendFire :: FireEvent -> Reactive ()
, _bwTick :: Event Float
, _bwSendTick :: Float -> Reactive ()
}
newBoomWorldInput :: IO BoomWorldInput
newBoomWorldInput = sync $ do
(me, sme) <- newEvent
(fe, sfe) <- newEvent
(te, ste) <- newEvent
(la, sla) <- newEvent
return $ BoomWorldInput me sme la sla fe sfe te ste
-- type RenderInfo = (SpriteRenderUnit,
data BoomWorld = BoomWorld
{ _bPositions :: Component (Float, Float)
, _bRotations :: Component Float
, _bBoundarySizes :: Component (Float, Float)
, _bOctree :: GameOctree
, _bCollision :: Event (EntityId, EntityId)
, _bInput :: BoomWorldInput
, _bNextEntityId :: EntityId
, _bPendingActions :: [Reactive ()]
} deriving (Typeable)
deriving instance Typeable Plain
deriving instance Typeable SC.Behavior
deriving instance Typeable SC.Event
deriving instance Typeable SC.Reactive
data BoomWorldDelta' next =
DeltaMove EntityId (Float, Float) next
| DeltaRotate EntityId Float next
| DeltaBoundary EntityId (Float, Float) next
| DeltaSpawn (EntityId -> Reactive ()) (EntityId -> next)
| DeltaRemoveEntity EntityId next
| DeltaId next
| DeltaRenderAdd EntityId ((EntityId, SpriteInstanceId) -> Reactive ()) (RenderControl SpriteInstanceId) (SpriteInstanceId -> next)
| DeltaRender (RenderControl ()) next
deriving (Functor, Typeable)
data BoomWorldDeltaApplied =
DeltaMove' EntityId (Float, Float)
| DeltaRotate' EntityId Float
| DeltaBoundary' EntityId (Float, Float)
| DeltaSpawn' EntityId (EntityId -> Reactive ())
| DeltaRemoveEntity' EntityId
| DeltaRenderControl' (RenderControl ())
| DeltaNewSpriteId' (EntityId, SpriteInstanceId) ((EntityId, SpriteInstanceId) -> Reactive ())
instance Show BoomWorldDeltaApplied where
show (DeltaMove' eId pos) = show ("DeltaMove'", eId, pos)
show (DeltaRotate' eId pos) = show ("DeltaRotate'", eId, pos)
show (DeltaBoundary' eId pos) = show ("DeltaBoundary'", eId, pos)
show (DeltaSpawn' eId _) = show ("DeltaSpawn'", eId)
show (DeltaRemoveEntity' eId) = show ("DeltaRemoveEntity'", eId)
show _ = "Delta"
instance Show a => Show (BoomWorldDelta' a) where
show ((DeltaMove eId pos n)) = "DeltaMove " ++ show (eId, pos) ++ "\n" ++ show n
show ((DeltaRotate eId pos n)) = "DeltaRotate " ++ show (eId, pos) ++ "\n" ++ show n
show ((DeltaBoundary eId pos n)) = "DeltaBoundary " ++ show (eId, pos) ++ "\n" ++ show n
show ((DeltaSpawn _ g)) = "DeltaSpawn\n" ++ show (g 0)
show ((DeltaRemoveEntity eId n)) = "DeltaRemoveEntity " ++ show (eId) ++ "\n" ++ show n
show ((DeltaId n)) = show n
show _ = "Delta"
data EntityData = EntityData
{ _p1Sprites :: (SpriteInstanceId, SpriteInstanceId, SpriteInstanceId)
, _p2Sprites :: (SpriteInstanceId, SpriteInstanceId, SpriteInstanceId)
, _edBullets :: Map.Map EntityId SpriteInstanceId
}
data EntityManager = EntityManager
{ _emNextEntityId :: EntityId
} deriving Show
data Game = Game
{ _gameWorlds :: !(Map.Map Player (Behavior BoomWorld))
, _gamePushWorld :: Map.Map Player (BoomWorld -> Reactive ())
-- , _gameWires :: !(Map.Map Player (WorldWire () ()))
, _gameRenderManager :: !RenderManager
, _gameRenderer :: !Renderer
, _gameEntityData :: !(Maybe EntityData)
, _gameEntityManager :: !(EntityManager)
}
makeLenses ''EntityManager
makeLenses ''Game
makeLenses ''EntityData
instance Show Game where
show g = show "Game" -- (g^.gameWorlds)
deltaId :: BoomWorldDelta ()
deltaId = liftF (DeltaId ())
type BoomWorldDelta = Free BoomWorldDelta'
deltaMove :: EntityId -> (Float, Float) -> BoomWorldDelta ()
deltaMove eId pos = liftF (DeltaMove eId pos ())
deltaRotate :: EntityId -> Float -> BoomWorldDelta ()
deltaRotate eId rot = liftF (DeltaRotate eId rot ())
spawnEntity :: (EntityId -> Reactive ()) -> BoomWorldDelta EntityId
spawnEntity pushId = liftF (DeltaSpawn pushId id)
removeEntity :: EntityId -> BoomWorldDelta ()
removeEntity eId = liftF (DeltaRemoveEntity eId ())
deltaRenderAdd eId pushId rc = liftF (DeltaRenderAdd eId pushId rc id)
deltaRender rc = liftF (DeltaRender rc ())
makeLenses ''BoomWorld
makeLenses ''BoomWorldInput
merge a b = split . coalesce mappend . fmap (:[]) $ S.merge a b
merge3 a b c = split . coalesce mappend . fmap (:[]) $ S.merge (S.merge a b) c
merge4 a b c d = split . coalesce mappend . fmap (:[]) $ S.merge (S.merge (S.merge a b) c) d
add (x, y) (x0, y0) = (x + x0, y + y0)
flattenDelta :: BoomWorldDelta a -> State (EntityManager, Renderer) [BoomWorldDeltaApplied]
flattenDelta d@(Free (DeltaMove eId pos n)) = do
rc <- flattenDelta n
return $ (DeltaMove' eId pos) : rc
flattenDelta d@(Free (DeltaRotate eId rot n)) = do
rc <- flattenDelta n
return $ (DeltaRotate' eId rot) : rc
flattenDelta d@(Free (DeltaBoundary eId boundary n)) = do
rc <- flattenDelta n
return $ (DeltaBoundary' eId boundary) : rc
flattenDelta d@(Free (DeltaSpawn r g)) = do
eId <- use $ _1.emNextEntityId
_1.emNextEntityId += 1
rc <- flattenDelta (g eId)
return $ (DeltaSpawn' eId r) : rc
flattenDelta d@(Free (DeltaRemoveEntity eId n)) = do
rc <- flattenDelta n
return $ (DeltaRemoveEntity' eId) : rc
flattenDelta d@(Free (DeltaId n)) = do
rc <- flattenDelta n
return $ rc
flattenDelta d@(Free (DeltaRenderAdd eId pushRea rc f))= do
renderer <- use _2
let (rcResult, renderer') = runState (getRenderControlResult rc) renderer
_2 .= renderer'
res <- flattenDelta (f rcResult)
return $ (DeltaNewSpriteId' (eId, rcResult) pushRea):(DeltaRenderControl' (void rc)):res
flattenDelta (Free (DeltaRender rc n)) = do
res <- flattenDelta n
return $ (DeltaRenderControl' rc):res
flattenDelta d@(Pure _) = do
return []
applyDelta :: [BoomWorldDeltaApplied] -> [BoomWorldDeltaApplied]
-> BoomWorld -> BoomWorld
-> (BoomWorld, BoomWorld, RenderControl ()) -- (BoomWorld, BoomWorld)
applyDelta changeList1 changeList2 bw1 bw2 = (bw1', bw2',
--handleEvents changeList1 changeList2
foldr (>>) (Pure ()) . catMaybes . map collectRC $ changeList1 ++ changeList2
)
--return ())
where
collectRC (DeltaRenderControl' rc) = Just rc
collectRC _ = Nothing
(_, bw1') = runState (mapM_ applyDelta' changeList1) bw1
(_, bw2') = runState (mapM_ applyDelta' changeList2) bw2
applyDelta' :: BoomWorldDeltaApplied -> State BoomWorld ()
applyDelta' d@((DeltaMove' id pos)) = do
let mod (Just oldPos) = Just $ add oldPos pos
mod (Nothing) = Just pos
bPositions %= Map.alter mod id
applyDelta' d@((DeltaRotate' eId rot)) = do
let mod (Just oldRot) = Just $ oldRot + rot
mod (Nothing) = Just rot
bRotations %= Map.alter mod eId
applyDelta' d@((DeltaBoundary' eId boundary)) = do
return ()
applyDelta' d@((DeltaSpawn' eId r)) = do
bPendingActions %= mappend [(r eId)]
applyDelta' (DeltaNewSpriteId' sId r) = do
bPendingActions %= mappend [(r sId)]
applyDelta' (DeltaRenderControl' _) = do
return ()
applyDelta' d@((DeltaRemoveEntity' eId)) = do
return ()
collisionSystem :: [BoomWorldDeltaApplied] -> RWS () [BoomWorldDeltaApplied] GameOctree ()
collisionSystem deltas = mapM_ collisionSystem' deltas
where
collisionSystem' :: BoomWorldDeltaApplied -> RWS () [BoomWorldDeltaApplied] GameOctree ()
collisionSystem' f@((DeltaMove' id (dx, dy))) = do
mObj <- use $ octreeObject id
case mObj of
Just obj -> do
let transaction = do
octreeUpdate [obj & ooPosition %~ (\(x, y) -> (x + dx, y + dy))]
octreeQueryObject id
octree <- State.get
let (collisions, newOctree) = runState transaction octree
if collisions == [] then do
put newOctree
tell $ [(DeltaMove' id (dx, dy))]
else
-- lift $ liftF (DeltaCollision collisions)
return ()
Nothing -> tell [(DeltaMove' id (dx, dy))]
collisionSystem' s@(DeltaSpawn' eId _) = do
modify (execState $ octreeUpdateInsert [(eId, (0, 0), [(0, 5), (5, 5), (5, 0), (0, 0)])])
tell [s]
return ()
collisionSystem' delta = tell [delta]
run :: IO F
run = do
-- r <- runInterpreter initModule
r <- unsafeRunInterpreterWithArgs ["-package-db .cabal-sandbox/x86_64-linux-ghc-7.8.2-packages.conf.d/"] initModule
print "it worked"
case r of
Left err -> do
print err
exitFailure
return $ const (WrapR $ return (never, const (return ())))
Right func ->
return $ func
newtype WrapB = WrapB { unWrapB :: Behavior BoomWorld } deriving (Typeable)
newtype WrapR = WrapR { unWrapR :: Reactive (Event (BoomWorldDelta ()), () -> Reactive ()) } deriving (Typeable)
type F = WrapB -> WrapR
initModule :: Interpreter F
initModule = do
I.set [languageExtensions :=
[RecursiveDo
]]
setImports ["Prelude"]
loadModules ["Gameplay"]
setTopLevelModules ["Gameplay"]
-- fun <- interpret "enterTheGame" (as :: SC.Behavior Plain BoomWorld -> Reactive (Event (BoomWorldDelta ())))
t <- interpret "enterTheGame" (as :: F)
-- let fun = const (return never) :: F
return t
newBoomWorld :: IO (Behavior BoomWorld, BoomWorld -> Reactive ())
newBoomWorld = do
input <- newBoomWorldInput
sync . newBehavior $ BoomWorld (Map.fromList [(1, (0, 0))]) Map.empty Map.empty newOctree never input 2 []
where
initialCommands = do
eId <- spawnEntity (const (return ()))
_ <- deltaMove eId (20, 20)
return ()
mergeAll :: [Event a] -> Event a
mergeAll events = S.split $ fmap (:[]) $ foldr (S.merge) never events
|
mfpi/boom
|
src/CleanFRP.hs
|
mit
| 12,964 | 0 | 25 | 3,009 | 4,277 | 2,278 | 1,999 | -1 | -1 |
module Main where
import Hunch.Options.Data
import Hunch.Options.CommandLine
import Hunch.Runner
import Data.Maybe (isNothing)
import Control.Monad (when)
main :: IO ()
main = withOptions $ \opts -> do
let versionOpt = version opts
simulateOpt = simulate opts
noInput = isNothing . input $ opts
-- Priority on version flag, then simulation flag.
-- If no input is given for simulation or default mode, an error
-- is raised and helptext is shown.
when versionOpt $ printVersion opts
when noInput showHelpText
when simulateOpt $ runSimulation opts
runCreation opts
|
loganbraga/hunch
|
app/Main.hs
|
mit
| 612 | 0 | 13 | 133 | 141 | 73 | 68 | 15 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-} -- haskell-mode wants this
{-# LANGUAGE LambdaCase #-}
module PPL2.CodeGen.Builder where
import PPL2.Prelude
import PPL2.VM.Types
-- ----------------------------------------
newtype Builder a = BU {unBU :: ([a] -> [a])}
instance Semigroup (Builder a) where
BU x <> BU y = BU $ x . y
instance Monoid (Builder a) where
mempty = BU id
builder2List :: Builder a -> [a]
builder2List = ($ []) . unBU
-- ----------------------------------------
type Code = Builder AInstr
gi :: AInstr -> Code
gi i = BU (i:)
toACode :: Code -> ACode
toACode = builder2List
-- ----------------------------------------
gLoad :: Address -> Code
gLoad = gi . Load
gLoadInt :: Int -> Code
gLoadInt = gi . LoadI
gLoadInd :: Code
gLoadInd = gi LoadInd
gLoadLab :: Label -> Code
gLoadLab = gi . LoadLab
gStore :: Address -> Code
gStore = gi . Store
gStoreInd :: Code
gStoreInd = gi StoreInd
gPop :: Code
gPop = gi Pop
gDup :: Offset -> Code
gDup = gi . Dup
gComp :: Mnemonic -> Code
gComp = gi . Comp
gJump :: Label -> Code
gJump = gi . Jump
gLabel :: Label -> Code
gLabel = gi . Label
gSRJump :: Label -> Code
gSRJump = gi . SRJump
gSRJumpInd :: Code
gSRJumpInd = gi SRJumpInd
gBr :: Bool -> Label -> Code
gBr b = gi . Br b
{-
gBrFalse :: Label -> Code
gBrFalse = gi . Br False
gBrTrue :: Label -> Code
gBrTrue = gi . Br True
-- -}
gEnter :: Offset -> Code
gEnter = gi . Enter
gLeave :: Code
gLeave = gi Leave
gTerminate :: Code
gTerminate = gi Term
-- ----------------------------------------
|
UweSchmidt/ppl2
|
src/PPL2/CodeGen/Builder.hs
|
mit
| 1,542 | 0 | 9 | 313 | 477 | 266 | 211 | 51 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ExistentialQuantification #-}
module MemoryManager.Types where
import SoOSiM
data MemorySource
= MemorySource
{ baseAddress :: Int
, scope :: Int
, sourceId :: Maybe ComponentId
}
data MemState =
MemState { addressLookup :: [MemorySource]
}
data MemCommand = Register MemorySource
| Read Int
| forall a . Typeable a => Write Int a
deriving Typeable
data MemoryManager = MemoryManager
|
christiaanb/SoOSiM
|
examples/MemoryManager/Types.hs
|
mit
| 515 | 0 | 9 | 146 | 102 | 61 | 41 | 16 | 0 |
map :: (a -> b) -> [a] -> [b]
map _ [] = []
map f (x:xs) = f x : map f xs
|
iharh/fp-by-example
|
tex/src/map_hof.hs
|
mit
| 77 | 0 | 7 | 27 | 70 | 36 | 34 | 3 | 1 |
--set difference on ordered lists
minus :: (Ord a) => [a] -> [a] -> [a]
minus (x:xs) (y:ys) = case (compare x y) of
LT -> x : minus xs (y:ys)
EQ -> minus xs ys
GT -> minus (x:xs) ys
minus xs _ = xs
primesToM :: Integer -> [Integer]
primesToM m = 2 : sieve [3,5..m]
where
sieve (p:xs)
| p*p > m = p : xs
| otherwise = p : sieve (xs `minus` [p*p, p*p+2*p..])
findLargestPrimeFactor :: Integer -> Integer
findLargestPrimeFactor n = last $ filter (\p -> (n `mod` p == 0)) (primesToM (floor $ sqrt $ fromIntegral n))
|
DylanSp/Project-Euler-in-Haskell
|
prob3/solution.hs
|
mit
| 596 | 0 | 15 | 188 | 314 | 165 | 149 | 13 | 3 |
listToNumber :: [Int] -> Int
listToNumber [] = 0
listToNumber (x:xs) = x*(10^(length xs)) + (listToNumber xs)
|
mbyankova/fp2015
|
week11/listToNumber.hs
|
mit
| 109 | 0 | 10 | 16 | 65 | 34 | 31 | 3 | 1 |
{-# LANGUAGE TypeFamilies, KindSignatures, DataKinds, TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables, FlexibleContexts #-}
{-# LANGUAGE BangPatterns #-}
import Prelude hiding (id, (.))
import Control.Applicative
import Control.Category
import Control.Monad
import Data.Foldable
import Data.Monoid
import Data.Traversable
import Data.Char
import Data.IORef
import Data.Maybe
import Data.Proxy
import Data.Word
import Control.DeepSeq
import Data.Functor.Identity
import Data.Random
import Data.Random.Distribution.Categorical
import Data.Vector (Vector)
import qualified Data.Vector.Generic as V
import qualified Data.Vector.Unboxed as U
import qualified Numeric.LinearAlgebra.HMatrix as HM
import qualified Criterion
import AI.Funn.Common
import AI.Funn.Flat
import AI.Funn.LSTM
import AI.Funn.Mixing
import AI.Funn.Network
import AI.Funn.RNN
import AI.Funn.SGD
import AI.Funn.SomeNat
import GHC.TypeLits
type Layer = Network Identity
sampleIO :: RVar a -> IO a
sampleIO v = runRVar v StdRandom
blob :: [Double] -> Blob n
blob xs = Blob (V.fromList xs)
deepseqM :: (Monad m, NFData a) => a -> m ()
deepseqM x = deepseq x (return ())
addParameters :: Parameters -> Parameters -> Parameters
addParameters (Parameters x) (Parameters y) = Parameters (x + y)
scaleParameters :: Double -> Parameters -> Parameters
scaleParameters x (Parameters y) = Parameters (HM.scale x y)
norm :: Parameters -> Double
norm (Parameters xs) = sqrt $ V.sum $ V.map (^2) xs
checkGradient :: forall a. (KnownNat a) => Network Identity (Blob a) () -> IO ()
checkGradient network = do parameters <- sampleIO (initialise network)
input <- sampleIO (generateBlob $ uniform 0 1)
let (e, d_input, d_parameters) = runNetwork' network parameters input
d1 <- sampleIO (V.replicateM a (uniform (-ε) ε))
d2 <- sampleIO (V.replicateM (params network) (uniform (-ε) ε))
let parameters' = Parameters (V.zipWith (+) (getParameters parameters) d2)
input' = input ## Blob d1
let (e', _, _) = runNetwork' network parameters' input'
δ_expected = sum (V.toList $ V.zipWith (*) (getBlob d_input) d1)
+ sum (V.toList $ V.zipWith (*) (getParameters d_parameters) d2)
print (e' - e, δ_expected)
where
a = fromIntegral (natVal (Proxy :: Proxy a)) :: Int
ε = 0.000001
xorData :: Vector (Blob 2, Blob 1)
xorData = V.fromList [
(blob [0, 0], blob [0]),
(blob [0, 1], blob [1]),
(blob [1, 0], blob [1]),
(blob [1, 1], blob [0])]
main :: IO ()
main = return ()
|
nshepperd/funn
|
mhug-talk-15/demo.hs
|
mit
| 3,023 | 0 | 16 | 953 | 961 | 521 | 440 | 68 | 1 |
--
-- The following iterative sequence is defined for the set of positive integers:
-- n -> n/2 (n is even)
-- n -> 3n + 1 (n is odd)
-- Using the rule above and starting with 13, we generate the following sequence:
-- 13 -> 40 -> 20 -> 10 -> 5 -> 16 -> 8 -> 4 -> 2 -> 1
--
-- It can be seen that this sequence (starting at 13 and finishing at 1) contains 10 terms. Although it has not been proved yet (Collatz Problem), it is thought that all starting numbers finish at 1.
--
-- Which starting number, under one million, produces the longest chain?
--
-- NOTE: Once the chain starts the terms are allowed to go above one million.
--
searchTo = 1000000
nextNumber :: Integer -> Integer
nextNumber n
| r == 0 = q
| otherwise = 3*n + 1
where (q,r) = quotRem n 2
seqLen2 :: Int -> Integer -> Int
seqLen2 a 1 = a
seqLen2 a n = seqLen2 (a+1) (nextNumber n)
sequenceLength :: Integer -> Int
sequenceLength = seqLen2 1
longestSequence = snd $ maximum [(sequenceLength a, a) | a <- [1..searchTo]]
main = putStrLn $ show $ longestSequence
|
stu-smith/project-euler-haskell
|
Euler-014b.hs
|
mit
| 1,056 | 0 | 10 | 233 | 200 | 109 | 91 | 13 | 1 |
-- | Provides API for managing values that may be extracter from or injected to
-- sass source file.
module Text.Sass.Values
(
SassMapEntry
, SassValue (..)
, Lib.SassSeparator (..)
) where
import qualified Bindings.Libsass as Lib
-- | Entry of 'SassMap'.
type SassMapEntry = (SassValue, SassValue)
-- | Represents value used by libsass.
data SassValue = SassBool Bool -- ^ Boolean value.
| SassNumber Double String -- ^ Number (value and unit).
| SassColor {
sassColorR :: Double,
sassColorG :: Double,
sassColorB :: Double,
sassColorA :: Double
} -- ^ RGBA color.
| SassString String -- ^ String
| SassList [SassValue] Lib.SassSeparator
-- ^ List of 'SassValue's.
| SassMap [SassMapEntry] -- ^ Map.
| SassNull -- ^ Null value.
| SassWarning String -- ^ Warning with message.
| SassError String -- ^ Error with message.
deriving (Eq, Show)
|
jakubfijalkowski/hsass
|
Text/Sass/Values.hs
|
mit
| 1,113 | 0 | 8 | 410 | 155 | 103 | 52 | 21 | 0 |
-- Functions from http://learnyouahaskell.com/syntax-in-functions
lucky :: (Integral a) => a -> String
lucky x = if x ==7
then "LUCKY NUMBER SEVEN!"
else "Sorry, out of luck"
-- Using pattern matching
lucky' :: (Integral a) => a -> String
lucky' 7 = "LUCKY NUMBER SEVEN"
lucky' x = "Sorry, out of luck"
-- Defining the factorial recursively
factorial :: (Integral a) => a -> a
factorial 0 = 1
factorial n = n * factorial (n - 1)
-- Adding 2D vectors using pair tuples
addVectors :: (Num a) => (a,a) -> (a,a) -> (a,a)
addVectors (x1, y1) (x2, y2) = (x1 + x2, y1 + y2)
-- Creating the triple tuple versions of
-- fst and snd (those only work on pair tuples)
first :: (a, b, c) -> a
first (x, _, _) = x
second :: (a, b, c) -> b
second(_, y, _) = y
third :: (a, b, c) -> c
third (_, _, z) = z
-- Alternative implementation of the head function
head' :: [a] -> a
head' [] = error "Can't call head on an empty list, dummy!"
head' (x:_) = x
-- trivial function that tells us some of the first
-- elements of the list in (in)convenient English form.
tell :: (Show a) => [a] -> String
tell [] = "The list is empty"
tell (x:[]) = "The list has one element: " ++ show x
tell (x:y:[]) = "The list has two elements: " ++ show x ++
" and " ++ show y
tell (x:y:_) = "This list is long. The first two elements are: " ++ show x ++
" and " ++ show y
-- implementing lenght using pattern matching
-- and recursion
length' :: (Num b) => [a] -> b
length' [] = 0
length' (_:xs) = 1 + length' xs
-- implementing sum
sum' :: (Num a) => [a] -> a
sum' [] = 0
sum' (x:xs) = x + sum' xs
-- Using Guards
bmiTell :: (RealFloat a) => a -> a -> String
bmiTell weight height
| weight / height ^ 2 <= 18.5 = "You're underweight, you emo, you!"
| weight / height ^ 2 <= 25.0 = "You're supposedly normal. Pffft, I bet you're ugly!"
| weight / height ^ 2 <= 30.0 = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulations!"
-- implementing max using Guards
max' :: (Ord a) => a -> a -> a
max' a b
| a > b = a
| otherwise = b
-- Not only can we call functions as infix with backticks,
-- we can also define them using backticks. Sometimes it's
-- easier to read that way.
myCompare :: (Ord a) => a -> a -> Ordering
a `myCompare` b
| a > b = GT
| a == b = EQ
| a < b = LT
-- Redefining bmiTell using a where binding
-- to not repeat ourselves
bmiTell' :: (RealFloat a) => a -> a -> String
bmiTell' weight height
| bmi <= skinny = "You're underweight, you emo, you!"
| bmi <= normal = "You're supposedly normal. Pffft, I bet you're ugly!"
| bmi <= fat = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulations!"
where bmi = weight / height ^ 2
(skinny, normal, fat) = (18.5, 25.0, 30.0)
-- You can also define new functions in the where blocks
-- This function takes a list of (weight, height) pair tuples
-- and returns a list with the bmis for each pair
calcBmis :: (RealFloat a) => [(a, a)] -> [a]
calcBmis xs = [bmi w h | (w, h) <- xs]
where bmi weight height = weight / height ^ 2
cylinderArea :: (RealFloat a) => a -> a -> a
cylinderArea r h =
let sideArea = 2 * pi * r * h
topArea = pi * r^2
in sideArea + 2 * topArea
-- Redefining calcBmis using let bindings
-- inside list comprehensions
calcBmis' :: (RealFloat a) => [(a, a)] -> [a]
calcBmis' xs = [bmi | (w, h) <- xs, let bmi = w / h ^ 2]
-- Case expressions
head'' :: [a] -> a
head'' xs = case xs of [] -> error "No head for empty lists!"
(x:_) -> x
-- Case expressions can be used anywhere
describeList :: [a] -> String
describeList xs = "The list is " ++
case xs of [] -> "empty."
[x] -> "a singleton list."
xs -> "a longer list."
-- This could have also been
describeList' :: [a] -> String
describeList' xs = "The list is " ++ what xs
where what [] = "empty."
what [x] = "a singleton list."
what xs = "a longer list."
|
jeiros/Haskell-Tutorial
|
functions_page3.hs
|
mit
| 4,055 | 0 | 11 | 1,050 | 1,331 | 717 | 614 | 80 | 3 |
module Rules (applyRules) where
import Lex
applyRules :: Cmplx -> Cmplx -> [Cmplx]
applyRules a b = forwardApplication a b ++
backwardApplication a b ++
forwardComposition a b ++
forwardCrossingComposition a b ++
backwardComposition a b ++
backwardCrossingComposition a b ++
forwardSubstitution a b ++
forwardCrossingSubstitution a b ++
backwardSubstitution a b ++
backwardCrossingSubstitution a b
where
forwardApplication :: Cmplx -> Cmplx -> [Cmplx]
forwardApplication (CmplxTree lhs Forw rhs) x = [lhs | rhs == x]
forwardApplication _ _ = []
backwardApplication :: Cmplx -> Cmplx -> [Cmplx]
backwardApplication x (CmplxTree lhs Back rhs) = [lhs | rhs == x]
backwardApplication _ _ = []
forwardComposition :: Cmplx -> Cmplx -> [Cmplx]
forwardComposition (CmplxTree lhs Forw rhs) (CmplxTree lhs' Forw rhs') = [CmplxTree lhs Forw rhs' | rhs == lhs']
forwardComposition _ _ = []
forwardCrossingComposition :: Cmplx -> Cmplx -> [Cmplx]
forwardCrossingComposition (CmplxTree lhs Forw rhs) (CmplxTree lhs' Back rhs') = [CmplxTree lhs Back rhs' | rhs == lhs']
forwardCrossingComposition _ _ = []
backwardComposition :: Cmplx -> Cmplx -> [Cmplx]
backwardComposition (CmplxTree lhs Back rhs) (CmplxTree lhs' Back rhs') = [CmplxTree lhs' Back rhs | lhs == rhs']
backwardComposition _ _ = []
backwardCrossingComposition :: Cmplx -> Cmplx -> [Cmplx]
backwardCrossingComposition (CmplxTree lhs Forw rhs) (CmplxTree lhs' Back rhs') = [CmplxTree lhs' Forw rhs | lhs == rhs']
backwardCrossingComposition _ _ = []
forwardSubstitution :: Cmplx -> Cmplx -> [Cmplx]
forwardSubstitution (CmplxTree (CmplxTree lhs'' Forw rhs'') Forw _) (CmplxTree lhs' Forw rhs') = [CmplxTree lhs'' Forw rhs' | rhs'' == lhs']
forwardSubstitution _ _ = []
forwardCrossingSubstitution :: Cmplx -> Cmplx -> [Cmplx]
forwardCrossingSubstitution (CmplxTree (CmplxTree lhs'' Forw rhs'') Back _) (CmplxTree lhs' Back rhs') = [CmplxTree lhs'' Back rhs' | rhs'' == lhs']
forwardCrossingSubstitution _ _ = []
backwardSubstitution :: Cmplx -> Cmplx -> [Cmplx]
backwardSubstitution (CmplxTree lhs Back rhs) (CmplxTree (CmplxTree lhs'' Back rhs'') Back _) = [CmplxTree lhs'' Back rhs | lhs == rhs'']
backwardSubstitution _ _ = []
backwardCrossingSubstitution :: Cmplx -> Cmplx -> [Cmplx]
backwardCrossingSubstitution (CmplxTree lhs Forw rhs) (CmplxTree (CmplxTree lhs'' Back rhs'') Forw _) = [CmplxTree lhs'' Forw rhs | lhs == rhs'']
backwardCrossingSubstitution _ _ = []
|
agrasley/HaskellCCG
|
Rules.hs
|
mit
| 2,887 | 0 | 14 | 823 | 892 | 455 | 437 | 43 | 11 |
module ZoomHub.Types.Content
( Content,
contentId,
contentType,
contentURL,
contentState,
contentInitializedAt,
contentActiveAt,
contentCompletedAt,
contentMIME,
contentSize,
contentProgress,
contentNumViews,
contentError,
contentDZI,
contentSubmitterEmail,
contentVerificationToken,
contentVerifiedAt,
version,
)
where
import Data.Int (Int32)
import ZoomHub.Types.Content.Internal
( Content,
contentActiveAt,
contentCompletedAt,
contentDZI,
contentError,
contentId,
contentInitializedAt,
contentMIME,
contentNumViews,
contentProgress,
contentSize,
contentState,
contentSubmitterEmail,
contentType,
contentURL,
contentVerificationToken,
contentVerifiedAt,
)
-- Content versions
--
-- Version 3: From 2009-09-10 until 2016-04-04
--
-- Version 4: From 2009-11-14 until 2014-09-03
--
-- Version 5: From 2021-09-16
-- - Introduces
-- - `submitter_email`
-- - `verification_token`
-- - `verified_at`
-- for tracking the author of a submission or upload.
version :: Int32
version = 5
|
zoomhub/zoomhub
|
src/ZoomHub/Types/Content.hs
|
mit
| 1,128 | 0 | 5 | 250 | 157 | 108 | 49 | 40 | 1 |
module Parser where
import Control.Applicative((<*))
import Text.Parsec
import Text.Parsec.String
import Text.Parsec.Expr
import Text.Parsec.Token
import Text.Parsec.Language
import CTLModelChecker
def :: LanguageDef st
def = emptyDef{ commentStart = "{*"
, commentEnd = "*}"
, identStart = letter
, identLetter = alphaNum
, opStart = oneOf "{}[]/\\-t?!UO"
, opLetter = oneOf "{}[]/\\-t?!UO"
, reservedOpNames = ["/\\", "\\/", "?{}", "!{}", "?[]", "![]", "?U", "!U", "?O", "!O","true", "-"]
}
TokenParser{ parens = m_parens
, identifier = m_identifier
, reservedOp = m_reservedOp
, reserved = m_reserved
, semiSep1 = m_semiSep1
, whiteSpace = m_whiteSpace } = makeTokenParser def
orF :: Formula -> Formula -> Formula
orF x y = Not (And (Not x) (Not y))
eDiam :: Formula -> Formula
eDiam x = EUntil T x
aDiam :: Formula -> Formula
aDiam x = AUntil T x
eBox :: Formula -> Formula
eBox x = Not (aDiam (Not x))
aBox :: Formula -> Formula
aBox x = Not (eDiam (Not x))
formparser :: Parser Formula
formparser = buildExpressionParser formparser_table formparser_term <?> "formula"
formparser_table = [
[Prefix (m_reservedOp "-" >> return Not)],
[Infix (m_reservedOp "/\\" >> return And) AssocLeft],
[Infix (m_reservedOp "\\/" >> return orF) AssocLeft],
[Infix (m_reservedOp "?U" >> return EUntil) AssocLeft],
[Infix (m_reservedOp "!U" >> return AUntil) AssocLeft],
[Prefix (m_reservedOp "?O" >> return ENext)],
[Prefix (m_reservedOp "!O" >> return ANext)],
[Prefix (m_reservedOp "?{}" >> return eDiam)],
[Prefix (m_reservedOp "!{}" >> return aDiam)],
[Prefix (m_reservedOp "?[]" >> return eBox)],
[Prefix (m_reservedOp "![]" >> return aBox)]
]
formparser_term =
try(m_parens formparser)
<|> do { x <- m_identifier ; return (A x) }
parseF :: String -> Formula
parseF inp = case parse (m_whiteSpace >> formparser) "" inp of
{ Left err -> A "Error"
; Right ans -> ans
}
|
goodlyrottenapple/CTLModelChecker
|
Parser.hs
|
mit
| 2,116 | 0 | 10 | 539 | 725 | 389 | 336 | 53 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Yesod.Core.Dispatch
( -- * Quasi-quoted routing
parseRoutes
, parseRoutesNoCheck
, parseRoutesFile
, parseRoutesFileNoCheck
, mkYesod
, mkYesodWith
-- ** More fine-grained
, mkYesodData
, mkYesodSubData
, mkYesodDispatch
, mkYesodSubDispatch
-- *** Helpers
, defaultGen
, getGetMaxExpires
-- ** Path pieces
, PathPiece (..)
, PathMultiPiece (..)
, Texts
-- * Convert to WAI
, toWaiApp
, toWaiAppPlain
, toWaiAppYre
, warp
, warpDebug
, warpEnv
, mkDefaultMiddlewares
, defaultMiddlewaresNoLogging
-- * WAI subsites
, WaiSubsite (..)
, WaiSubsiteWithAuth (..)
) where
import Prelude hiding (exp)
import Yesod.Core.Internal.TH
import Language.Haskell.TH.Syntax (qLocation)
import Web.PathPieces
import qualified Network.Wai as W
import Data.ByteString.Lazy.Char8 ()
import Data.Bits ((.|.), finiteBitSize, shiftL)
import Data.Text (Text)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as S8
import Data.ByteString.Builder (byteString, toLazyByteString)
import Network.HTTP.Types (status301, status307)
import Yesod.Routes.Parse
import Yesod.Core.Types
import Yesod.Core.Class.Yesod
import Yesod.Core.Class.Dispatch
import Yesod.Core.Internal.Run
import Text.Read (readMaybe)
import System.Environment (getEnvironment)
import System.Entropy (getEntropy)
import Control.AutoUpdate (mkAutoUpdate, defaultUpdateSettings, updateAction, updateFreq)
import Yesod.Core.Internal.Util (getCurrentMaxExpiresRFC1123)
import Network.Wai.Middleware.Autohead
import Network.Wai.Middleware.AcceptOverride
import Network.Wai.Middleware.RequestLogger
import Network.Wai.Middleware.Gzip
import Network.Wai.Middleware.MethodOverride
import qualified Network.Wai.Handler.Warp
import System.Log.FastLogger
import Control.Monad.Logger
import Control.Monad (when)
import qualified Paths_yesod_core
import Data.Version (showVersion)
-- | Convert the given argument into a WAI application, executable with any WAI
-- handler. This function will provide no middlewares; if you want commonly
-- used middlewares, please use 'toWaiApp'.
toWaiAppPlain :: YesodDispatch site => site -> IO W.Application
toWaiAppPlain site = do
logger <- makeLogger site
sb <- makeSessionBackend site
getMaxExpires <- getGetMaxExpires
return $ toWaiAppYre YesodRunnerEnv
{ yreLogger = logger
, yreSite = site
, yreSessionBackend = sb
, yreGen = defaultGen
, yreGetMaxExpires = getMaxExpires
}
-- | Generate a random number uniformly distributed in the full range
-- of 'Int'.
--
-- Note: Before 1.6.20, this generates pseudo-random number in an
-- unspecified range. The range size may not be a power of 2. Since
-- 1.6.20, this uses a secure entropy source and generates in the full
-- range of 'Int'.
--
-- @since 1.6.21.0
defaultGen :: IO Int
defaultGen = bsToInt <$> getEntropy bytes
where
bits = finiteBitSize (undefined :: Int)
bytes = div (bits + 7) 8
bsToInt = S.foldl' (\v i -> shiftL v 8 .|. fromIntegral i) 0
-- | Pure low level function to construct WAI application. Usefull
-- when you need not standard way to run your app, or want to embed it
-- inside another app.
--
-- @since 1.4.29
toWaiAppYre :: YesodDispatch site => YesodRunnerEnv site -> W.Application
toWaiAppYre yre req =
case cleanPath site $ W.pathInfo req of
Left pieces -> sendRedirect site pieces req
Right pieces -> yesodDispatch yre req
{ W.pathInfo = pieces
}
where
site = yreSite yre
sendRedirect :: Yesod master => master -> [Text] -> W.Application
sendRedirect y segments' env sendResponse =
sendResponse $ W.responseLBS status
[ ("Content-Type", "text/plain")
, ("Location", BL.toStrict $ toLazyByteString dest')
] "Redirecting"
where
-- Ensure that non-GET requests get redirected correctly. See:
-- https://github.com/yesodweb/yesod/issues/951
status
| W.requestMethod env == "GET" = status301
| otherwise = status307
dest = joinPath y (resolveApproot y env) segments' []
dest' =
if S.null (W.rawQueryString env)
then dest
else dest `mappend`
byteString (W.rawQueryString env)
-- | Same as 'toWaiAppPlain', but provides a default set of middlewares. This
-- set may change with future releases, but currently covers:
--
-- * Logging
--
-- * GZIP compression
--
-- * Automatic HEAD method handling
--
-- * Request method override with the _method query string parameter
--
-- * Accept header override with the _accept query string parameter
toWaiApp :: YesodDispatch site => site -> IO W.Application
toWaiApp site = do
logger <- makeLogger site
toWaiAppLogger logger site
toWaiAppLogger :: YesodDispatch site => Logger -> site -> IO W.Application
toWaiAppLogger logger site = do
sb <- makeSessionBackend site
getMaxExpires <- getGetMaxExpires
let yre = YesodRunnerEnv
{ yreLogger = logger
, yreSite = site
, yreSessionBackend = sb
, yreGen = defaultGen
, yreGetMaxExpires = getMaxExpires
}
messageLoggerSource
site
logger
$(qLocation >>= liftLoc)
"yesod-core"
LevelInfo
(toLogStr ("Application launched" :: S.ByteString))
middleware <- mkDefaultMiddlewares logger
return $ middleware $ toWaiAppYre yre
-- | A convenience method to run an application using the Warp webserver on the
-- specified port. Automatically calls 'toWaiApp'. Provides a default set of
-- middlewares. This set may change at any point without a breaking version
-- number. Currently, it includes:
--
-- * Logging
--
-- * GZIP compression
--
-- * Automatic HEAD method handling
--
-- * Request method override with the _method query string parameter
--
-- * Accept header override with the _accept query string parameter
--
-- If you need more fine-grained control of middlewares, please use 'toWaiApp'
-- directly.
--
-- Since 1.2.0
warp :: YesodDispatch site => Int -> site -> IO ()
warp port site = do
logger <- makeLogger site
toWaiAppLogger logger site >>= Network.Wai.Handler.Warp.runSettings (
Network.Wai.Handler.Warp.setPort port $
Network.Wai.Handler.Warp.setServerName serverValue $
Network.Wai.Handler.Warp.setOnException (\_ e ->
when (shouldLog' e) $
messageLoggerSource
site
logger
$(qLocation >>= liftLoc)
"yesod-core"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e))
Network.Wai.Handler.Warp.defaultSettings)
where
shouldLog' = Network.Wai.Handler.Warp.defaultShouldDisplayException
serverValue :: S8.ByteString
serverValue = S8.pack $ concat
[ "Warp/"
, Network.Wai.Handler.Warp.warpVersion
, " + Yesod/"
, showVersion Paths_yesod_core.version
, " (core)"
]
-- | A default set of middlewares.
--
-- Since 1.2.0
mkDefaultMiddlewares :: Logger -> IO W.Middleware
mkDefaultMiddlewares logger = do
logWare <- mkRequestLogger def
{ destination = Network.Wai.Middleware.RequestLogger.Logger $ loggerSet logger
, outputFormat = Apache FromSocket
}
return $ logWare . defaultMiddlewaresNoLogging
-- | All of the default middlewares, excluding logging.
--
-- Since 1.2.12
defaultMiddlewaresNoLogging :: W.Middleware
defaultMiddlewaresNoLogging = acceptOverride . autohead . gzip def . methodOverride
-- | Deprecated synonym for 'warp'.
warpDebug :: YesodDispatch site => Int -> site -> IO ()
warpDebug = warp
{-# DEPRECATED warpDebug "Please use warp instead" #-}
-- | Runs your application using default middlewares (i.e., via 'toWaiApp'). It
-- reads port information from the PORT environment variable, as used by tools
-- such as Keter and the FP Complete School of Haskell.
--
-- Note that the exact behavior of this function may be modified slightly over
-- time to work correctly with external tools, without a change to the type
-- signature.
warpEnv :: YesodDispatch site => site -> IO ()
warpEnv site = do
env <- getEnvironment
case lookup "PORT" env of
Nothing -> error "warpEnv: no PORT environment variable found"
Just portS ->
case readMaybe portS of
Nothing -> error $ "warpEnv: invalid PORT environment variable: " ++ show portS
Just port -> warp port site
-- | Default constructor for 'yreGetMaxExpires' field. Low level
-- function for simple manual construction of 'YesodRunnerEnv'.
--
-- @since 1.4.29
getGetMaxExpires :: IO (IO Text)
getGetMaxExpires = mkAutoUpdate defaultUpdateSettings
{ updateAction = getCurrentMaxExpiresRFC1123
, updateFreq = 24 * 60 * 60 * 1000000 -- Update once per day
}
|
yesodweb/yesod
|
yesod-core/src/Yesod/Core/Dispatch.hs
|
mit
| 9,333 | 0 | 18 | 2,188 | 1,652 | 946 | 706 | 175 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Bot.NetHack.Config
( BotConfig(..) )
where
import Control.Monad
import Data.Data
import qualified Data.Text as T
import Data.Word
import Data.Yaml
import GHC.Generics
data BotConfig = BotConfig
{ playername :: !T.Text
, latency :: !Int
, password :: !(Maybe T.Text)
, nethackCommand :: [T.Text]
, webDiagnosticsPort :: !Word16
, webDiagnosticsHost :: !T.Text }
deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic )
instance FromJSON BotConfig where
parseJSON (Object ob) = do
pn <- ob .: "name"
pw <- ob .: "password"
lat <- ob .: "latency"
cmd <- ob .: "nethackCommand"
port <- ob .: "webDiagnosticsPort"
host <- ob .: "webDiagnosticsHost"
when (null cmd) $ fail "FromJSON.BotConfig: nethackCommand cannot be empty."
return BotConfig { playername = pn
, password = pw
, latency = lat
, nethackCommand = cmd
, webDiagnosticsPort = port
, webDiagnosticsHost = host }
parseJSON _ = fail "FromJSON.BotConfig: expected an object."
|
Noeda/adeonbot
|
bot/src/Bot/NetHack/Config.hs
|
mit
| 1,205 | 0 | 12 | 322 | 312 | 170 | 142 | 45 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html
module Stratosphere.Resources.EMRCluster where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.EMRClusterApplication
import Stratosphere.ResourceProperties.EMRClusterBootstrapActionConfig
import Stratosphere.ResourceProperties.EMRClusterConfiguration
import Stratosphere.ResourceProperties.EMRClusterJobFlowInstancesConfig
import Stratosphere.ResourceProperties.EMRClusterKerberosAttributes
import Stratosphere.ResourceProperties.EMRClusterStepConfig
import Stratosphere.ResourceProperties.Tag
-- | Full data type definition for EMRCluster. See 'emrCluster' for a more
-- convenient constructor.
data EMRCluster =
EMRCluster
{ _eMRClusterAdditionalInfo :: Maybe Object
, _eMRClusterApplications :: Maybe [EMRClusterApplication]
, _eMRClusterAutoScalingRole :: Maybe (Val Text)
, _eMRClusterBootstrapActions :: Maybe [EMRClusterBootstrapActionConfig]
, _eMRClusterConfigurations :: Maybe [EMRClusterConfiguration]
, _eMRClusterCustomAmiId :: Maybe (Val Text)
, _eMRClusterEbsRootVolumeSize :: Maybe (Val Integer)
, _eMRClusterInstances :: EMRClusterJobFlowInstancesConfig
, _eMRClusterJobFlowRole :: Val Text
, _eMRClusterKerberosAttributes :: Maybe EMRClusterKerberosAttributes
, _eMRClusterLogUri :: Maybe (Val Text)
, _eMRClusterName :: Val Text
, _eMRClusterReleaseLabel :: Maybe (Val Text)
, _eMRClusterScaleDownBehavior :: Maybe (Val Text)
, _eMRClusterSecurityConfiguration :: Maybe (Val Text)
, _eMRClusterServiceRole :: Val Text
, _eMRClusterSteps :: Maybe [EMRClusterStepConfig]
, _eMRClusterTags :: Maybe [Tag]
, _eMRClusterVisibleToAllUsers :: Maybe (Val Bool)
} deriving (Show, Eq)
instance ToResourceProperties EMRCluster where
toResourceProperties EMRCluster{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::EMR::Cluster"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("AdditionalInfo",) . toJSON) _eMRClusterAdditionalInfo
, fmap (("Applications",) . toJSON) _eMRClusterApplications
, fmap (("AutoScalingRole",) . toJSON) _eMRClusterAutoScalingRole
, fmap (("BootstrapActions",) . toJSON) _eMRClusterBootstrapActions
, fmap (("Configurations",) . toJSON) _eMRClusterConfigurations
, fmap (("CustomAmiId",) . toJSON) _eMRClusterCustomAmiId
, fmap (("EbsRootVolumeSize",) . toJSON) _eMRClusterEbsRootVolumeSize
, (Just . ("Instances",) . toJSON) _eMRClusterInstances
, (Just . ("JobFlowRole",) . toJSON) _eMRClusterJobFlowRole
, fmap (("KerberosAttributes",) . toJSON) _eMRClusterKerberosAttributes
, fmap (("LogUri",) . toJSON) _eMRClusterLogUri
, (Just . ("Name",) . toJSON) _eMRClusterName
, fmap (("ReleaseLabel",) . toJSON) _eMRClusterReleaseLabel
, fmap (("ScaleDownBehavior",) . toJSON) _eMRClusterScaleDownBehavior
, fmap (("SecurityConfiguration",) . toJSON) _eMRClusterSecurityConfiguration
, (Just . ("ServiceRole",) . toJSON) _eMRClusterServiceRole
, fmap (("Steps",) . toJSON) _eMRClusterSteps
, fmap (("Tags",) . toJSON) _eMRClusterTags
, fmap (("VisibleToAllUsers",) . toJSON) _eMRClusterVisibleToAllUsers
]
}
-- | Constructor for 'EMRCluster' containing required fields as arguments.
emrCluster
:: EMRClusterJobFlowInstancesConfig -- ^ 'emrcInstances'
-> Val Text -- ^ 'emrcJobFlowRole'
-> Val Text -- ^ 'emrcName'
-> Val Text -- ^ 'emrcServiceRole'
-> EMRCluster
emrCluster instancesarg jobFlowRolearg namearg serviceRolearg =
EMRCluster
{ _eMRClusterAdditionalInfo = Nothing
, _eMRClusterApplications = Nothing
, _eMRClusterAutoScalingRole = Nothing
, _eMRClusterBootstrapActions = Nothing
, _eMRClusterConfigurations = Nothing
, _eMRClusterCustomAmiId = Nothing
, _eMRClusterEbsRootVolumeSize = Nothing
, _eMRClusterInstances = instancesarg
, _eMRClusterJobFlowRole = jobFlowRolearg
, _eMRClusterKerberosAttributes = Nothing
, _eMRClusterLogUri = Nothing
, _eMRClusterName = namearg
, _eMRClusterReleaseLabel = Nothing
, _eMRClusterScaleDownBehavior = Nothing
, _eMRClusterSecurityConfiguration = Nothing
, _eMRClusterServiceRole = serviceRolearg
, _eMRClusterSteps = Nothing
, _eMRClusterTags = Nothing
, _eMRClusterVisibleToAllUsers = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-additionalinfo
emrcAdditionalInfo :: Lens' EMRCluster (Maybe Object)
emrcAdditionalInfo = lens _eMRClusterAdditionalInfo (\s a -> s { _eMRClusterAdditionalInfo = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-applications
emrcApplications :: Lens' EMRCluster (Maybe [EMRClusterApplication])
emrcApplications = lens _eMRClusterApplications (\s a -> s { _eMRClusterApplications = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-autoscalingrole
emrcAutoScalingRole :: Lens' EMRCluster (Maybe (Val Text))
emrcAutoScalingRole = lens _eMRClusterAutoScalingRole (\s a -> s { _eMRClusterAutoScalingRole = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-bootstrapactions
emrcBootstrapActions :: Lens' EMRCluster (Maybe [EMRClusterBootstrapActionConfig])
emrcBootstrapActions = lens _eMRClusterBootstrapActions (\s a -> s { _eMRClusterBootstrapActions = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-configurations
emrcConfigurations :: Lens' EMRCluster (Maybe [EMRClusterConfiguration])
emrcConfigurations = lens _eMRClusterConfigurations (\s a -> s { _eMRClusterConfigurations = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-customamiid
emrcCustomAmiId :: Lens' EMRCluster (Maybe (Val Text))
emrcCustomAmiId = lens _eMRClusterCustomAmiId (\s a -> s { _eMRClusterCustomAmiId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-ebsrootvolumesize
emrcEbsRootVolumeSize :: Lens' EMRCluster (Maybe (Val Integer))
emrcEbsRootVolumeSize = lens _eMRClusterEbsRootVolumeSize (\s a -> s { _eMRClusterEbsRootVolumeSize = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-instances
emrcInstances :: Lens' EMRCluster EMRClusterJobFlowInstancesConfig
emrcInstances = lens _eMRClusterInstances (\s a -> s { _eMRClusterInstances = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-jobflowrole
emrcJobFlowRole :: Lens' EMRCluster (Val Text)
emrcJobFlowRole = lens _eMRClusterJobFlowRole (\s a -> s { _eMRClusterJobFlowRole = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-kerberosattributes
emrcKerberosAttributes :: Lens' EMRCluster (Maybe EMRClusterKerberosAttributes)
emrcKerberosAttributes = lens _eMRClusterKerberosAttributes (\s a -> s { _eMRClusterKerberosAttributes = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-loguri
emrcLogUri :: Lens' EMRCluster (Maybe (Val Text))
emrcLogUri = lens _eMRClusterLogUri (\s a -> s { _eMRClusterLogUri = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-name
emrcName :: Lens' EMRCluster (Val Text)
emrcName = lens _eMRClusterName (\s a -> s { _eMRClusterName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-releaselabel
emrcReleaseLabel :: Lens' EMRCluster (Maybe (Val Text))
emrcReleaseLabel = lens _eMRClusterReleaseLabel (\s a -> s { _eMRClusterReleaseLabel = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-scaledownbehavior
emrcScaleDownBehavior :: Lens' EMRCluster (Maybe (Val Text))
emrcScaleDownBehavior = lens _eMRClusterScaleDownBehavior (\s a -> s { _eMRClusterScaleDownBehavior = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-securityconfiguration
emrcSecurityConfiguration :: Lens' EMRCluster (Maybe (Val Text))
emrcSecurityConfiguration = lens _eMRClusterSecurityConfiguration (\s a -> s { _eMRClusterSecurityConfiguration = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-servicerole
emrcServiceRole :: Lens' EMRCluster (Val Text)
emrcServiceRole = lens _eMRClusterServiceRole (\s a -> s { _eMRClusterServiceRole = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-steps
emrcSteps :: Lens' EMRCluster (Maybe [EMRClusterStepConfig])
emrcSteps = lens _eMRClusterSteps (\s a -> s { _eMRClusterSteps = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-tags
emrcTags :: Lens' EMRCluster (Maybe [Tag])
emrcTags = lens _eMRClusterTags (\s a -> s { _eMRClusterTags = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticmapreduce-cluster.html#cfn-elasticmapreduce-cluster-visibletoallusers
emrcVisibleToAllUsers :: Lens' EMRCluster (Maybe (Val Bool))
emrcVisibleToAllUsers = lens _eMRClusterVisibleToAllUsers (\s a -> s { _eMRClusterVisibleToAllUsers = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/Resources/EMRCluster.hs
|
mit
| 10,375 | 0 | 15 | 1,152 | 1,801 | 1,027 | 774 | 125 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-| The Ganeti WConfd core functions.
As TemplateHaskell require that splices be defined in a separate
module, we combine all the TemplateHaskell functionality that HTools
needs in this module (except the one for unittests).
-}
{-
Copyright (C) 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.WConfd.Core where
import Control.Arrow ((&&&))
import Control.Monad (liftM, unless, when)
import qualified Data.Map as M
import qualified Data.Set as S
import Language.Haskell.TH (Name)
import qualified System.Random as Rand
import Ganeti.BasicTypes
import qualified Ganeti.JSON as J
import qualified Ganeti.Locking.Allocation as L
import Ganeti.Locking.Locks ( GanetiLocks(ConfigLock), LockLevel(LevelConfig)
, lockLevel, LockLevel, ClientId )
import qualified Ganeti.Locking.Waiting as LW
import Ganeti.Objects (ConfigData, DRBDSecret, LogicalVolume, Ip4Address)
import qualified Ganeti.WConfd.ConfigVerify as V
import Ganeti.WConfd.Language
import Ganeti.WConfd.Monad
import qualified Ganeti.WConfd.TempRes as T
import qualified Ganeti.WConfd.ConfigWriter as CW
-- * Functions available to the RPC module
-- Just a test function
echo :: String -> WConfdMonad String
echo = return
-- ** Configuration related functions
checkConfigLock :: ClientId -> L.OwnerState -> WConfdMonad ()
checkConfigLock cid state = do
la <- readLockAllocation
unless (L.holdsLock cid ConfigLock state la)
. failError $ "Requested lock " ++ show state
++ " on the configuration missing"
-- | Read the configuration, checking that a shared lock is held.
-- If not, the call fails.
readConfig :: ClientId -> WConfdMonad ConfigData
readConfig ident = checkConfigLock ident L.OwnShared >> CW.readConfig
-- | Write the configuration, checking that an exclusive lock is held.
-- If not, the call fails.
writeConfig :: ClientId -> ConfigData -> WConfdMonad ()
writeConfig ident cdata = do
checkConfigLock ident L.OwnExclusive
-- V.verifyConfigErr cdata
CW.writeConfig cdata
-- | Explicitly run verification of the configuration.
-- The caller doesn't need to hold the configuration lock.
verifyConfig :: WConfdMonad ()
verifyConfig = CW.readConfig >>= V.verifyConfigErr
-- *** Locks on the configuration (only transitional, will be removed later)
-- | Tries to acquire 'ConfigLock' for the client.
-- If the second parameter is set to 'True', the lock is acquired in
-- shared mode.
--
-- If the lock was successfully acquired, returns the current configuration
-- state.
lockConfig
:: ClientId
-> Bool -- ^ set to 'True' if the lock should be shared
-> WConfdMonad (J.MaybeForJSON ConfigData)
lockConfig cid shared = do
let reqtype = if shared then ReqShared else ReqExclusive
-- warn if we already have the lock, this shouldn't happen
la <- readLockAllocation
when (L.holdsLock cid ConfigLock L.OwnShared la)
. failError $ "Client " ++ show cid ++
" already holds a config lock"
waiting <- tryUpdateLocks cid [(ConfigLock, reqtype)]
liftM J.MaybeForJSON $ case waiting of
[] -> liftM Just CW.readConfig
_ -> return Nothing
-- | Release the config lock, if the client currently holds it.
unlockConfig
:: ClientId -> WConfdMonad ()
unlockConfig cid = freeLocksLevel cid LevelConfig
-- | Force the distribution of configuration without actually modifying it.
-- It is not necessary to hold a lock for this operation.
flushConfig :: WConfdMonad ()
flushConfig = forceConfigStateDistribution
-- ** Temporary reservations related functions
dropAllReservations :: ClientId -> WConfdMonad ()
dropAllReservations cid =
modifyTempResState (const $ T.dropAllReservations cid)
-- *** DRBD
computeDRBDMap :: WConfdMonad T.DRBDMap
computeDRBDMap = uncurry T.computeDRBDMap =<< readTempResState
-- Allocate a drbd minor.
--
-- The free minor will be automatically computed from the existing devices.
-- A node can be given multiple times in order to allocate multiple minors.
-- The result is the list of minors, in the same order as the passed nodes.
allocateDRBDMinor
:: T.InstanceUUID -> [T.NodeUUID] -> WConfdMonad [T.DRBDMinor]
allocateDRBDMinor inst nodes =
modifyTempResStateErr (\cfg -> T.allocateDRBDMinor cfg inst nodes)
-- Release temporary drbd minors allocated for a given instance using
-- 'allocateDRBDMinor'.
--
-- This should be called on the error paths, on the success paths
-- it's automatically called by the ConfigWriter add and update
-- functions.
releaseDRBDMinors
:: T.InstanceUUID -> WConfdMonad ()
releaseDRBDMinors inst = modifyTempResState (const $ T.releaseDRBDMinors inst)
-- *** MACs
-- Randomly generate a MAC for an instance and reserve it for
-- a given client.
generateMAC
:: ClientId -> J.MaybeForJSON T.NetworkUUID -> WConfdMonad T.MAC
generateMAC cid (J.MaybeForJSON netId) = do
g <- liftIO Rand.newStdGen
modifyTempResStateErr $ T.generateMAC g cid netId
-- Reserves a MAC for an instance in the list of temporary reservations.
reserveMAC :: ClientId -> T.MAC -> WConfdMonad ()
reserveMAC = (modifyTempResStateErr .) . T.reserveMAC
-- *** DRBDSecrets
-- Randomly generate a DRBDSecret for an instance and reserves it for
-- a given client.
generateDRBDSecret :: ClientId -> WConfdMonad DRBDSecret
generateDRBDSecret cid = do
g <- liftIO Rand.newStdGen
modifyTempResStateErr $ T.generateDRBDSecret g cid
-- *** LVs
reserveLV :: ClientId -> LogicalVolume -> WConfdMonad ()
reserveLV jobId lv = modifyTempResStateErr $ T.reserveLV jobId lv
-- *** IPv4s
-- | Reserve a given IPv4 address for use by an instance.
reserveIp :: ClientId -> T.NetworkUUID -> Ip4Address -> Bool -> WConfdMonad ()
reserveIp = (((modifyTempResStateErr .) .) .) . T.reserveIp
-- | Give a specific IP address back to an IP pool.
-- The IP address is returned to the IP pool designated by network id
-- and marked as reserved.
releaseIp :: ClientId -> T.NetworkUUID -> Ip4Address -> WConfdMonad ()
releaseIp = (((modifyTempResStateErr .) const .) .) . T.releaseIp
-- Find a free IPv4 address for an instance and reserve it.
generateIp :: ClientId -> T.NetworkUUID -> WConfdMonad Ip4Address
generateIp = (modifyTempResStateErr .) . T.generateIp
-- | Commit all reserved/released IP address to an IP pool.
-- The IP addresses are taken from the network's IP pool and marked as
-- reserved/free for instances.
--
-- Note that the reservations are kept, they are supposed to be cleaned
-- when a job finishes.
commitTemporaryIps :: ClientId -> WConfdMonad ()
commitTemporaryIps = modifyConfigDataErr_ . T.commitReservedIps
-- | Immediately release an IP address, without using the reservations pool.
commitReleaseTemporaryIp
:: T.NetworkUUID -> Ip4Address -> WConfdMonad ()
commitReleaseTemporaryIp net_uuid addr =
modifyConfigDataErr_ (const $ T.commitReleaseIp net_uuid addr)
-- | List all IP reservations for the current client.
--
-- This function won't be needed once the corresponding calls are moved to
-- WConfd.
listReservedIps :: ClientId -> WConfdMonad [T.IPv4Reservation]
listReservedIps jobId =
liftM (S.toList . T.listReservedIps jobId . snd) readTempResState
-- ** Locking related functions
-- | List the locks of a given owner (i.e., a job-id lockfile pair).
listLocks :: ClientId -> WConfdMonad [(GanetiLocks, L.OwnerState)]
listLocks cid = liftM (M.toList . L.listLocks cid) readLockAllocation
-- | List all active locks.
listAllLocks :: WConfdMonad [GanetiLocks]
listAllLocks = liftM L.listAllLocks readLockAllocation
-- | List all active locks with their owners.
listAllLocksOwners :: WConfdMonad [(GanetiLocks, [(ClientId, L.OwnerState)])]
listAllLocksOwners = liftM L.listAllLocksOwners readLockAllocation
-- | Get full information of the lock waiting status, i.e., provide
-- the information about all locks owners and all pending requests.
listLocksWaitingStatus :: WConfdMonad
( [(GanetiLocks, [(ClientId, L.OwnerState)])]
, [(Integer, ClientId, [L.LockRequest GanetiLocks])]
)
listLocksWaitingStatus = liftM ( (L.listAllLocksOwners . LW.getAllocation)
&&& (S.toList . LW.getPendingRequests) )
readLockWaiting
-- | Try to update the locks of a given owner (i.e., a job-id lockfile pair).
-- This function always returns immediately. If the lock update was possible,
-- the empty list is returned; otherwise, the lock status is left completly
-- unchanged, and the return value is the list of jobs which need to release
-- some locks before this request can succeed.
tryUpdateLocks :: ClientId -> GanetiLockRequest -> WConfdMonad [ClientId]
tryUpdateLocks cid req =
liftM S.toList
. (>>= toErrorStr)
$ modifyLockWaiting (LW.updateLocks cid (fromGanetiLockRequest req))
-- | Try to update the locks of a given owner and make that a pending
-- request if not immediately possible.
updateLocksWaiting :: ClientId -> Integer
-> GanetiLockRequest -> WConfdMonad [ClientId]
updateLocksWaiting cid prio req =
liftM S.toList
. (>>= toErrorStr)
. modifyLockWaiting
$ LW.updateLocksWaiting prio cid (fromGanetiLockRequest req)
-- | Tell whether a given owner has pending requests.
hasPendingRequest :: ClientId -> WConfdMonad Bool
hasPendingRequest cid = liftM (LW.hasPendingRequest cid) readLockWaiting
-- | Free all locks of a given owner (i.e., a job-id lockfile pair).
freeLocks :: ClientId -> WConfdMonad ()
freeLocks cid =
modifyLockWaiting_ $ LW.releaseResources cid
-- | Free all locks of a given owner (i.e., a job-id lockfile pair)
-- of a given level in the Ganeti sense (e.g., "cluster", "node").
freeLocksLevel :: ClientId -> LockLevel -> WConfdMonad ()
freeLocksLevel cid level =
modifyLockWaiting_ $ LW.freeLocksPredicate ((==) level . lockLevel) cid
-- | Downgrade all locks of the given level to shared.
downGradeLocksLevel :: ClientId -> LockLevel -> WConfdMonad ()
downGradeLocksLevel cid level =
modifyLockWaiting_ $ LW.downGradeLocksPredicate ((==) level . lockLevel) cid
-- | Intersect the possesed locks of an owner with a given set.
intersectLocks :: ClientId -> [GanetiLocks] -> WConfdMonad ()
intersectLocks cid locks = modifyLockWaiting_ $ LW.intersectLocks locks cid
-- | Opportunistically allocate locks for a given owner.
opportunisticLockUnion :: ClientId
-> [(GanetiLocks, L.OwnerState)]
-> WConfdMonad [GanetiLocks]
opportunisticLockUnion cid req =
modifyLockWaiting $ LW.opportunisticLockUnion cid req
-- | Opprtunistially allocate locks for a given owner, requesting a
-- certain minimum of success.
guardedOpportunisticLockUnion :: Int
-> ClientId
-> [(GanetiLocks, L.OwnerState)]
-> WConfdMonad [GanetiLocks]
guardedOpportunisticLockUnion count cid req =
modifyLockWaiting $ LW.guardedOpportunisticLockUnion count cid req
-- * The list of all functions exported to RPC.
exportedFunctions :: [Name]
exportedFunctions = [ 'echo
-- config
, 'readConfig
, 'writeConfig
, 'verifyConfig
, 'lockConfig
, 'unlockConfig
, 'flushConfig
-- temporary reservations (common)
, 'dropAllReservations
-- DRBD
, 'computeDRBDMap
, 'allocateDRBDMinor
, 'releaseDRBDMinors
-- MACs
, 'reserveMAC
, 'generateMAC
-- DRBD secrets
, 'generateDRBDSecret
-- LVs
, 'reserveLV
-- IPv4s
, 'reserveIp
, 'releaseIp
, 'generateIp
, 'commitTemporaryIps
, 'commitReleaseTemporaryIp
, 'listReservedIps
-- locking
, 'listLocks
, 'listAllLocks
, 'listAllLocksOwners
, 'listLocksWaitingStatus
, 'tryUpdateLocks
, 'updateLocksWaiting
, 'freeLocks
, 'freeLocksLevel
, 'downGradeLocksLevel
, 'intersectLocks
, 'opportunisticLockUnion
, 'guardedOpportunisticLockUnion
, 'hasPendingRequest
]
|
ribag/ganeti-experiments
|
src/Ganeti/WConfd/Core.hs
|
gpl-2.0
| 13,344 | 0 | 14 | 3,096 | 2,093 | 1,181 | 912 | 178 | 3 |
module Data.DMARCAggregateReport.Parser (
parseReport,
ExtractFailure(..)
) where
import qualified Data.ByteString.Lazy as LazyByteString (fromStrict, ByteString)
import Data.String.Utils (strip)
import Text.XML.Light.Input (parseXMLDoc)
import Text.XML.Light.Types (QName(..))
import qualified Text.XML.Light.Proc as P
import qualified System.Time as Time
import qualified Data.DMARCAggregateReport.Types as T
data ExtractFailure = XMLParseFailure | InvalidDocument
deriving (Show, Eq)
-- | Converts a serialized XML report to a Haskell data type
parseReport :: LazyByteString.ByteString
-- ^ A DMARC aggregate report serialized as XML
-> Either ExtractFailure T.Report
parseReport xmlStr = case parseXMLDoc xmlStr of
Just dom -> processDOM dom
Nothing -> Left XMLParseFailure
processDOM rootElm = case processDOM' rootElm of
Just report -> Right report
Nothing -> Left InvalidDocument
processDOM' rootElm = pure T.Report
<*> orgName
<*> reportId
<*> beginDate
<*> endDate
<*> records
where
orgName = metadata >>= findContent "org_name"
reportId = metadata >>= findContent "report_id"
beginDate = dateElm >>= findContent "begin" >>= parseTimestamp
endDate = dateElm >>= findContent "end" >>= parseTimestamp
records = mapM parseRecord $ P.findElements (qn "record") rootElm
dateElm = P.findElement (qn "date_range") rootElm
metadata = P.findElement (qn "report_metadata") rootElm
-- 2016-05-07 strip is needed because yahoo has an errant space in the end date
parseTimestamp str = (safeRead . strip) str >>= (\v -> return $ Time.TOD v 0)
parseRecord recordElm = pure T.Record
<*> sourceIp
<*> headerFrom
<*> messageCount
<*> policyDkim
<*> policySpf
where
sourceIp = row >>= findContent "source_ip" >>= safeRead
headerFrom = identifiers >>= findContent "header_from"
messageCount = row >>= findContent "count" >>= safeRead
policyDkim = policy >>= findContent "dkim" >>= safeRead
policySpf = policy >>= findContent "spf" >>= safeRead
identifiers = P.findElement (qn "identifiers") recordElm
row = P.findElement (qn "row") recordElm
policy = row >>= P.findElement (qn "policy_evaluated")
findContent tagName elm = P.findElement (qn tagName) elm >>= return . P.strContent
qn tag = QName tag Nothing Nothing
safeRead :: (Read a) => String -> Maybe a
safeRead str = case reads str of
[(val, "")] -> Just val
_ -> Nothing
|
splondike/dmarc-check
|
src/Data/DMARCAggregateReport/Parser.hs
|
gpl-2.0
| 2,847 | 0 | 12 | 849 | 684 | 361 | 323 | 54 | 2 |
{- |
Module : $Header$
Description : Interface to the Leo-II theorem prover.
Copyright : (c) A. Tsogias, DFKI Bremen 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (imports Prover)
LEO-II theorem prover for THF0
-}
module THF.ProveLeoII where
import Logic.Prover
import THF.Cons
import THF.Sign
import THF.ProverState
import Common.AS_Annotation as AS_Anno
import Common.Result
import Common.ProofTree
import Common.Utils (basename, getTempFile, timeoutCommand)
import Common.SZSOntology
import GUI.GenericATP
import Interfaces.GenericATPState
import Control.Monad (when)
import qualified Control.Concurrent as Concurrent
import Proofs.BatchProcessing
import System.Directory
import Data.List
import Data.Maybe
import Data.Time (timeToTimeOfDay)
import Data.Time.Clock (picosecondsToDiffTime, secondsToDiffTime)
leoIIName :: String
leoIIName = "Leo-II"
{- | The Prover implementation. First runs the batch prover (with
graphical feedback), then starts the GUI prover. -}
leoIIProver :: Prover SignTHF SentenceTHF MorphismTHF () ProofTree
leoIIProver = mkAutomaticProver leoIIName () leoIIGUI leoIICMDLautomaticBatch
leoIIHelpText :: String
leoIIHelpText =
"No help available yet.\n" ++
"email [email protected] " ++
"for more information.\n"
{- |
Record for prover specific functions. This is used by both GUI and command
line interface.
-}
atpFun :: String -- ^ theory name
-> ATPFunctions SignTHF SentenceTHF MorphismTHF ProofTree ProverStateTHF
atpFun _ = ATPFunctions
{ initialProverState = initialProverStateTHF
, atpTransSenName = id
, atpInsertSentence = insertSentenceTHF
, goalOutput = showProblemTHF
, proverHelpText = leoIIHelpText
, batchTimeEnv = "HETS_LEOII_BATCH_TIME_LIMIT"
, fileExtensions = FileExtensions
{ problemOutput = ".thf"
, proverOutput = ".leoII"
, theoryConfiguration = ".lpcf" }
, runProver = runLeoII
, createProverOptions = extraOpts }
-- ** GUI
{- |
Invokes the generic prover GUI. LeoII specific functions are omitted by
data type ATPFunctions.
-}
leoIIGUI :: String -- ^ theory name
-> Theory SignTHF SentenceTHF ProofTree
-> [FreeDefMorphism SentenceTHF MorphismTHF] -- ^ freeness constraints
-> IO [ProofStatus ProofTree] -- ^ proof status for each goal
leoIIGUI thName th freedefs =
genericATPgui (atpFun thName) True leoIIName thName th
freedefs emptyProofTree
-- ** command line function
{- |
Implementation of 'Logic.Prover.proveCMDLautomaticBatch' which provides an
automatic command line interface to the Darwin prover.
Leo-II specific functions are omitted by data type ATPFunctions.
-}
leoIICMDLautomaticBatch
:: Bool -- ^ True means include proved theorems
-> Bool -- ^ True means save problem file
-> Concurrent.MVar (Result [ProofStatus ProofTree])
-- ^ used to store the result of the batch run
-> String -- ^ theory name
-> TacticScript -- ^ default tactic script
-> Theory SignTHF SentenceTHF ProofTree
-- ^ theory consisting of a signature and sentences
-> [FreeDefMorphism SentenceTHF MorphismTHF] -- ^ freeness constraints
-> IO (Concurrent.ThreadId, Concurrent.MVar ())
{- ^ fst: identifier of the batch thread for killing it
snd: MVar to wait for the end of the thread -}
leoIICMDLautomaticBatch inclProvedThs saveProblem_batch resultMVar
thName defTS th freedefs =
genericCMDLautomaticBatch (atpFun thName) inclProvedThs saveProblem_batch
resultMVar leoIIName thName
(parseTacticScript batchTimeLimit [] defTS) th freedefs emptyProofTree
{- |
Runs the Leo-II prover.
-}
runLeoII :: ProverStateTHF
-> GenericConfig ProofTree -- ^ configuration to use
-> Bool -- ^ True means save THF file
-> String -- ^ name of the theory in the DevGraph
-> Named SentenceTHF -- ^ goal to prove
-> IO (ATPRetval, GenericConfig ProofTree)
-- ^ (retval, configuration with proof status and complete output)
runLeoII pst cfg saveTHF thName nGoal = do
let options = extraOpts cfg
tout = maybe leoIITimeout (+ 1) (timeLimit cfg)
extraOptions = maybe "-po" (("-po -t " ++) . show) (timeLimit cfg)
tmpFileName = thName ++ '_' : AS_Anno.senAttr nGoal
prob <- showProblemTHF pst nGoal []
runRes <- runLeoIIProcess tout saveTHF extraOptions tmpFileName prob
case runRes of
Nothing ->
let ctime = timeToTimeOfDay $ secondsToDiffTime
$ toInteger leoIITimeout
in return (ATPTLimitExceeded, cfg
{ proofStatus =
(openProofStatus (AS_Anno.senAttr nGoal)
"LEO-II" emptyProofTree)
{ usedTime = ctime
, tacticScript = TacticScript
$ show ATPTacticScript
{ tsTimeLimit = configTimeLimit cfg
, tsExtraOpts = options} }
, timeUsed = ctime })
Just (exitCode, out, tUsed) ->
let ctime = timeToTimeOfDay $ picosecondsToDiffTime
$ toInteger $ tUsed * 1000000000
(err, retval) = case () of
_ | szsProved exitCode -> (ATPSuccess, provedStatus)
_ | szsDisproved exitCode -> (ATPSuccess, disProvedStatus)
_ | szsTimeout exitCode ->
(ATPTLimitExceeded, defaultProofStatus)
_ | szsStopped exitCode ->
(ATPBatchStopped, defaultProofStatus)
_ ->
(ATPError exitCode, defaultProofStatus)
defaultProofStatus =
(openProofStatus (AS_Anno.senAttr nGoal) "LEO-II" emptyProofTree)
{ usedTime = ctime
, tacticScript = TacticScript $ show ATPTacticScript
{ tsTimeLimit = configTimeLimit cfg
, tsExtraOpts = options} }
disProvedStatus = defaultProofStatus {goalStatus = Disproved}
provedStatus = defaultProofStatus { goalStatus = Proved True
, usedAxioms = getAxioms pst }
in return (err, cfg { proofStatus = retval
, resultOutput = out
, timeUsed = ctime })
-- Run the Leo-II process. timeoutCommand is used to terminate leo if it does
-- not terminate itself after the timeout time is over.
runLeoIIProcess
:: Int -- ^ timeout time in seconds
-> Bool -- ^ save problem
-> String -- ^ options
-> String -- ^ filename without extension
-> String -- ^ problem
-> IO (Maybe (String, [String], Int))
runLeoIIProcess tout saveTHF options tmpFileName prob = do
let tmpFile = basename tmpFileName ++ ".thf"
when saveTHF (writeFile tmpFile prob)
timeTmpFile <- getTempFile prob tmpFile
mres <- timeoutCommand tout "leo" (words options ++ [timeTmpFile])
maybe (return Nothing) (\ (_, pout, _) -> do
let l = lines pout
(res, _, tUsed) = parseOutput l
removeFile timeTmpFile
return $ Just (res, l, tUsed)) mres
-- parse the output and return the szsStatus and the used time.
parseOutput :: [String] -> (String, Bool, Int)
-- ^ (exit code, status found, used time ins ms)
parseOutput = foldl checkLine ("", False, -1) where
checkLine (exCode, stateFound, to) line = case getSZSStatusWord line of
Just szsState | not stateFound -> (szsState, True, to)
_ -> case words (fromMaybe "" $ stripPrefix "# Total time" line) of
_ : (tim : _) -> -- ":" : (tim : ("s" : []))
let time = round $ (read tim :: Float) * 1000
in (exCode, stateFound, time)
_ -> (exCode, stateFound, to)
-- try to read the szs status from a given String
getSZSStatusWord :: String -> Maybe String
getSZSStatusWord line =
case words (fromMaybe "" $ stripPrefix "% SZS status" line) of
[] -> Nothing
w : _ -> Just w
-- the standart leo-II timeout time
leoIITimeout :: Int
leoIITimeout = 601
|
nevrenato/Hets_Fork
|
THF/ProveLeoII.hs
|
gpl-2.0
| 8,402 | 0 | 22 | 2,384 | 1,621 | 877 | 744 | 149 | 6 |
-- |
-- TODO(kerckhove) big example here!
--
{-# LANGUAGE OverloadedStrings #-}
module Text.LaTeX.LambdaTeX
( module Text.LaTeX.LambdaTeX
-- ** Selections
, module Text.LaTeX.LambdaTeX.Selection
-- ** References
, module Text.LaTeX.LambdaTeX.Reference
-- ** Packages dependencies
, module Text.LaTeX.LambdaTeX.Package
-- ** IO dependencies
, module Text.LaTeX.LambdaTeX.Action
-- ** Re-exports
, module Text.LaTeX.LambdaTeX.Types
) where
import Control.Monad (forM_, void)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Concurrent.Async (async, wait)
import System.FilePath
import qualified Data.Set as S
import qualified Data.Text.IO as T
import Text.LaTeX.Base (LaTeX, renderFile)
import Text.LaTeX.LambdaTeX.Action
import Text.LaTeX.LambdaTeX.Package
import Text.LaTeX.LambdaTeX.Package.Internal
import Text.LaTeX.LambdaTeX.Reference
import Text.LaTeX.LambdaTeX.Reference.Internal
import Text.LaTeX.LambdaTeX.Reference.Types
import Text.LaTeX.LambdaTeX.Selection
import Text.LaTeX.LambdaTeX.Selection.Types
import Text.LaTeX.LambdaTeX.Types
-- | Build all the files for a LaTeX project given by a ΛTeXT generator
-- This either returns Left with an error or Right () to signify success.
--
-- This function takes care of some of the LaTeX tediousness:
--
-- * LaTeX file generation
-- * Automatic bibtex file generation
-- * All safety provided by 'execLambdaTeXT' (in the form of textual errors)
-- * Automatic asynchronic resolution of IO dependencies for graphviz or tikz figures
buildLaTeXProject ::
MonadIO m => ΛTeXT m a -> ProjectConfig -> m (Either [ΛError] ())
buildLaTeXProject func conf = do
(errs, latex, refs, actions) <-
execLambdaTeXT func $ projectGenerationConfig conf
-- Render tex file
let renderTex = do
let mainTexFile = projectTexFileName conf ++ ".tex"
renderFile (projectBuildDir conf </> mainTexFile) latex
-- Render bib file
let renderMain = do
let mainBibFile = projectBibFileName conf ++ ".bib"
T.writeFile (projectBuildDir conf </> mainBibFile) $
renderReferences refs
let performAction (name, action) = do
void $ action $ projectBuildDir conf
putStrLn $ "Job " ++ name ++ " done."
-- Perform all the IO actions asynchronously
as <-
liftIO $ mapM async $ renderTex : renderMain : map performAction actions
liftIO $ forM_ as wait
return $
if null errs
then Right ()
else Left errs
-- | Execute a ΛTeXT generation
-- This either returns a tuple of the errors and a tuple of the resulting LaTeX value and a list of external references that need to be put into a bibtex file.
--
-- This function takes care of a lot of safety issues:
--
-- * Subset selection. This allows you to build large documents in parts.
-- * External dependency selection. No more '??' for external references in the output pdf.
-- * Internal dependency safety. No more '??' for external references in the internal pdf.
-- * Package dependency resolution, TODO(kerckhove) with packages in the right order
-- * Dependency selection of figure dependencies on graphviz or tikz figures
execLambdaTeXT ::
Monad m
=> ΛTeXT m a
-> GenerationConfig
-> m ([ΛError], LaTeX, [Reference], [(String, FilePath -> IO ())])
execLambdaTeXT func conf = do
((_, latex), _, output) <-
runΛTeX func (ΛConfig $ generationSelection conf) initState
let mresult =
injectPackageDependencies
(S.toList $ outputPackageDependencies output)
latex
let (extraErrs, result) =
case mresult of
Nothing -> ([IncompatibleDependencies], latex)
Just res -> ([], res)
let refs = S.toList $ outputExternalReferences output
let actions = outputActions output
-- Check reference errors
let made = outputLabelsMade output
needed = outputLabelsNeeded output
diff = S.difference needed made
let referss = map ReferenceMissing $ S.toList diff
return (extraErrs ++ referss, result, refs, actions)
where
initState :: ΛState
initState = ΛState {stateCurrentPart = emptyPart}
-- * Configuration
-- | Configuration of a ΛTeX project
data ProjectConfig = ProjectConfig
{ projectGenerationConfig :: GenerationConfig
, projectBibFileName :: String
, projectTexFileName :: String
, projectBuildDir :: FilePath
} deriving (Show, Eq)
-- | Default project configuration.
--
-- Modify this instead of building your own 'ProjectConfig'
defaultProjectConfig :: ProjectConfig
defaultProjectConfig =
ProjectConfig
{ projectGenerationConfig = defaultGenerationConfig
, projectBibFileName = "main"
, projectTexFileName = "main"
, projectBuildDir = "."
}
-- | Configuration of ΛTeX generation
newtype GenerationConfig = GenerationConfig
{ generationSelection :: Selection
} deriving (Show, Eq)
-- | Default generation config.
--
-- Modify this instead of building your own 'GenerationConfig'
defaultGenerationConfig :: GenerationConfig
defaultGenerationConfig = GenerationConfig {generationSelection = [All]}
|
NorfairKing/LambdaTeX
|
src/Text/LaTeX/LambdaTeX.hs
|
gpl-2.0
| 5,295 | 0 | 16 | 1,183 | 975 | 559 | 416 | 89 | 2 |
module H17 where
split :: (Integral i) => [a] -> i -> ([a], [a])
split xs 0 = ([], xs)
split xs n = (as ++ [head bs], tail bs)
where
(as, bs) = split xs (n - 1)
|
hsinhuang/codebase
|
h99/H17.hs
|
gpl-2.0
| 175 | 0 | 9 | 53 | 114 | 64 | 50 | 5 | 1 |
module Hob.Command.NewTabSpec (main, spec) where
import Test.Hspec
import Graphics.UI.Gtk
import qualified Hob.Context as HC
import qualified Hob.Context.UiContext as HC
import HobTest.Context.Stubbed
import HobTest.Editor
main :: IO ()
main = hspec spec
spec :: Spec
spec =
describe "new tab command" $ do
it "creates a new unnamed file" $ do
ctx <- launchNewFile
pagesAfterActivatingDirectory <- getNumberOfEditorPages ctx
pagesAfterActivatingDirectory `shouldBe` 1
it "focuses the tab with the open file if requested to open an already loaded file" $ do
ctx <- loadStubbedContext
let notebook = HC.mainNotebook . HC.uiContext $ ctx
launchEditorTab ctx "/xxx/testName.hs"
currentPageOfFirstLoadedFile <- notebookGetCurrentPage notebook
launchEditorTab ctx "/xxx/c"
pagesBeforeOpeningExistingFile <- notebookGetNPages notebook
launchEditorTab ctx "/xxx/testName.hs"
currentPageAfterLoadingTheFirstLoadedFile <- notebookGetCurrentPage notebook
pagesAfterOpeningExistingFile <- notebookGetNPages notebook
pagesAfterOpeningExistingFile `shouldBe` pagesBeforeOpeningExistingFile
currentPageAfterLoadingTheFirstLoadedFile `shouldBe` currentPageOfFirstLoadedFile
|
svalaskevicius/hob
|
test/Hob/Command/NewTabSpec.hs
|
gpl-3.0
| 1,269 | 0 | 16 | 231 | 245 | 123 | 122 | 28 | 1 |
module Time where
import Data.List
data Time = Time { hour :: Integer
, minute :: Integer
, second :: Integer
} deriving (Eq)
instance Show Time where
show = showTime . process
instance Num Time where
Time h m s + Time h1 m1 s1 = Time (h+h1) (m+m1) (s+s1)
Time h m s - Time h1 m1 s1 = Time (h-h1) (m-m1) (s-s1)
Time h m s * Time h1 m1 s1 = Time (h*h1) (m*m1) (s*s1)
abs = tmap abs
signum = tmap signum
fromInteger s = Time 0 0 s
instance Ord Time where
Time h m s `compare` Time a b c = case h `compare` a of
LT -> LT
GT -> GT
EQ -> case m `compare` b of
LT -> LT
GT -> GT
EQ -> s `compare` c
(+:) t s = t + time 0 0 s
(+::) t m = t + time 0 m 0
(+:::) t h = t + time h 0 0
tmap :: (Integer -> Integer) -> Time -> Time
tmap f t = Time (f $ hour t) (f $ minute t) (f $ second t)
toTuple t = (hour t, minute t, second t)
fromTuple (h,m,s) = Time h m s
process :: Time -> Time
process = fromTuple . fixHours . fixMinutes . toTuple
where fixMinutes (h,m,s) = (h, m + s `div` 60, s `mod` 60)
fixHours (h,m,s) = ((h + m `div` 60) `mod` 24, m `mod` 60, s)
showTime :: Time -> String
showTime t = intercalate ":" . map (addZero . ($ t)) $ [hour, minute, second]
addZero :: (Num a, Ord a, Show a) => a -> String
addZero x | x < 10 = '0' : show x
| otherwise = show x
time = Time
|
Jiggins/Utils
|
Time.hs
|
gpl-3.0
| 1,420 | 0 | 12 | 451 | 785 | 416 | 369 | 40 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Database.Hedsql.Tests.Select
( tests
) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import Data.Monoid
import Database.Hedsql.Statements.Select
import Data.Text.Lazy ()
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit hiding (Test)
import qualified Database.Hedsql.PostgreSQL as P
import qualified Database.Hedsql.MariaDB as M
import qualified Database.Hedsql.SqLite as S
--------------------------------------------------------------------------------
-- PRIVATE
--------------------------------------------------------------------------------
----------------------------------------
-- SELECT
----------------------------------------
testSelectAllSqLite :: Test
testSelectAllSqLite = testCase "Select all" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Select all query is incorrect"
"SELECT * FROM \"People\""
(S.codeGen selectAll)
testSelectDistinctSqLite :: Test
testSelectDistinctSqLite = testCase "Select distinct" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Select distinct query is incorrect"
"SELECT DISTINCT \"firstName\" FROM \"People\""
(S.codeGen distinctSelect)
testSelectTuple :: Test
testSelectTuple = testCase "Select 2-tuple" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Select 2-tuple query is incorrect"
"SELECT \"firstName\", \"age\" FROM \"People\""
(S.codeGen selectTuple)
testSelect3Tuple :: Test
testSelect3Tuple = testCase "Select 3-tuple" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Select 3-tuple query is incorrect"
"SELECT \"firstName\", \"lastName\", \"age\" FROM \"People\""
(S.codeGen select3Tuple)
testSelect4Tuple :: Test
testSelect4Tuple = testCase "Select 4-tuple" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Select 4-tuple query is incorrect"
( "SELECT \"firstName\", \"lastName\", \"age\", \"title\" "
<> "FROM \"People\""
)
(S.codeGen select4Tuple)
----------------------------------------
-- Functions
----------------------------------------
testAdditionSqLite :: Test
testAdditionSqLite = testCase "Addition" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Addition in query is incorrect"
"SELECT \"age\" + 1 FROM \"People\""
(S.codeGen addition)
testMultiplicationSqLite :: Test
testMultiplicationSqLite = testCase "Multiplication" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Multiplication in query is incorrect"
"SELECT 3 * 4"
(S.codeGen multiplication)
testCurrentDateSqLite :: Test
testCurrentDateSqLite = testCase "Current date" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Current date function in query is incorrect"
"SELECT Date('now')"
(S.codeGen selectCurrentDate)
testRandomSqLite :: Test
testRandomSqLite = testCase "Random" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Random function in query is incorrect"
"SELECT random()"
(S.codeGen selectRandom)
testTrim :: Test
testTrim = testCase "Trim" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"TRIM function in query is incorrect"
"SELECT TRIM(\"name\")"
(S.codeGen selectTrim)
testLastInsertIdPostgreSQL :: Test
testLastInsertIdPostgreSQL =
testCase "Last Insert ID for PostgresQL" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Last Insert ID for PostgreSQL is incorrect"
"SELECT lastval()"
(P.codeGen selectLastInsertId)
testLastInsertIdMariaDB :: Test
testLastInsertIdMariaDB = testCase "Last Insert ID for MariaDB" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Last Insert ID for MariaDB is incorrect"
"SELECT LAST_INSERT_ID()"
(M.codeGen selectLastInsertId)
testLastInsertIdSqLite :: Test
testLastInsertIdSqLite = testCase "Last Insert ID for SqLite" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Last Insert ID for SqLite is incorrect"
"SELECT last_insert_rowid()"
(S.codeGen selectLastInsertId)
----------------------------------------
-- FROM
----------------------------------------
testCrossJoinSqLite :: Test
testCrossJoinSqLite = testCase "Cross join" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Cross join is incorrect"
"SELECT * FROM \"People\" CROSS JOIN \"Countries\""
(S.codeGen fromCrossJoin)
testInnerJoinOnSqLite :: Test
testInnerJoinOnSqLite = testCase "Inner join SqLite" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"SqLite inner join is incorrect"
("SELECT * "
<> "FROM \"People\" "
<> "INNER JOIN \"Countries\" "
<> "ON \"People\".\"countryId\" = \"Countries\".\"countryId\"")
(S.codeGen fromInnerJoinOn)
testInnerJoinUsingSqLite :: Test
testInnerJoinUsingSqLite = testCase "Inner join USING SqLite" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"SqLite inner join using is incorrect"
("SELECT * "
<> "FROM \"People\" INNER JOIN \"Countries\" USING (\"countryId\")")
(S.codeGen fromInnerJoinUsing)
testNaturalInnerJoin :: Test
testNaturalInnerJoin = testCase "Natural inner join" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Natural inner join is incorrect"
"SELECT * FROM \"People\" NATURAL INNER JOIN \"Countries\""
(S.codeGen fromNaturalInnerJoin)
testLeftJoinOn :: Test
testLeftJoinOn = testCase "Left join on" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Left join on is incorrect"
( "SELECT * FROM \"People\" LEFT JOIN \"Countries\" "
<> "ON \"People\".\"countryId\" = \"Countries\".\"countryId\""
)
(S.codeGen fromLeftJoinOn)
testLeftJoinUsing :: Test
testLeftJoinUsing = testCase "Left join using" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Left join using is incorrect"
( "SELECT * FROM \"People\" LEFT JOIN \"Countries\" "
<> "USING (\"countryId\")"
)
(S.codeGen fromLeftJoinUsing)
testRightJoinOn :: Test
testRightJoinOn = testCase "Right join on" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Right join on is incorrect"
( "SELECT * FROM \"People\" RIGHT JOIN \"Countries\" "
<> "ON \"People\".\"countryId\" = \"Countries\".\"countryId\""
)
(S.codeGen fromRightJoinOn)
testFullJoinOn :: Test
testFullJoinOn = testCase "Full join on" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Full join on is incorrect"
( "SELECT * FROM \"People\" FULL JOIN \"Countries\" "
<> "ON \"People\".\"countryId\" = \"Countries\".\"countryId\""
)
(S.codeGen fromFullJoinOn)
testLeftJoinOnAnd :: Test
testLeftJoinOnAnd = testCase "Left join on and" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Left join on and is incorrect"
( "SELECT * FROM \"People\" LEFT JOIN \"Countries\" "
<> "ON (\"People\".\"countryId\" = \"Countries\".\"countryId\" "
<> "AND \"Countries\".\"name\" = 'Italy')"
)
(S.codeGen fromLeftJoinOnAnd)
testSelfJoin :: Test
testSelfJoin = testCase "Self join" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Self join is incorrect"
( "SELECT * FROM \"People\" AS \"Father\" "
<> "INNER JOIN \"People\" AS \"Child\" "
<> "ON \"Father\".\"personId\" = \"Child\".\"father\""
)
(S.codeGen selfJoin)
testCrossJoinAlias :: Test
testCrossJoinAlias = testCase "Cross join with aliases" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Cross join with aliases is incorrect"
( "SELECT * FROM \"People\" AS \"P\" "
<> "CROSS JOIN \"Countries\" AS \"C\""
)
(S.codeGen crossJoinAlias)
testCrossRefAlias :: Test
testCrossRefAlias = testCase "Cross join alias reference" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Cross join alias reference is incorrect"
( "SELECT * FROM (\"People\" AS \"P\" "
<> "CROSS JOIN \"Countries\") AS \"PC\""
)
(S.codeGen crossRefAlias)
testSubQuery :: Test
testSubQuery = testCase "Sub-query in FROM clause" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Sub-query in FROM clause is incorrect"
"SELECT * FROM (SELECT * FROM \"People\") AS \"P\""
(S.codeGen selectSubQuery)
testNestedJoins :: Test
testNestedJoins = testCase "Multiple joins in FROM clause" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"Multiple joins in FROM clause is incorrect"
("SELECT * "
<> "FROM \"People\" "
<> "INNER JOIN \"Countries\" "
<> "ON \"People\".\"countryId\" = \"Countries\".\"countryId\" "
<> "INNER JOIN \"Addresses\" "
<> "ON \"People\".\"personId\" = \"Addresses\".\"personId\"")
(S.codeGen nestedJoins)
----------------------------------------
-- WHERE
----------------------------------------
testWhereAlias :: Test
testWhereAlias = testCase "WHERE clause with aliases" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"WHERE clause with aliases is incorrect"
"SELECT * FROM \"People\" AS \"P\" WHERE \"P\".\"age\" > 5"
(S.codeGen whereAlias)
testWhereAnd :: Test
testWhereAnd = testCase "WHERE clause with AND" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"WHERE clause with AND is incorrect"
( "SELECT * FROM \"People\", \"Countries\" "
<> "WHERE \"People\".\"countryId\" = \"Countries\".\"countryId\" "
<> "AND \"People\".\"age\" > 18"
)
(S.codeGen whereAnd)
testWhereInValues :: Test
testWhereInValues = testCase "WHERE clause with IN values" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"WHERE clause with IN values is incorrect"
( "SELECT * FROM \"Countries\" "
<> "WHERE \"name\" IN ('Italy', 'Switzerland')"
)
(S.codeGen whereInValues)
testWhereInSelect :: Test
testWhereInSelect = testCase "WHERE clause with IN sub-query" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"WHERE clause with IN sub-query is incorrect"
( "SELECT * FROM \"People\" "
<> "WHERE \"countryId\" IN (SELECT \"countryId\" "
<> "FROM \"Countries\" WHERE \"inhabitants\" >= \"size\" * 100)"
)
(S.codeGen whereInSelect)
testWhereBetween :: Test
testWhereBetween = testCase "WHERE clause with BETWEEN clause" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"WHERE clause with BETWEEN clause is incorrect"
( "SELECT * FROM \"Countries\" "
<> "WHERE \"inhabitants\" BETWEEN 10000 AND 1000000"
)
(S.codeGen whereBetween)
testWhereExists :: Test
testWhereExists = testCase "WHERE clause with EXISTS sub-query" assertFrom
where
assertFrom :: Assertion
assertFrom = assertEqual
"WHERE clause with EXISTS sub-query is incorrect"
( "SELECT * FROM \"People\" "
<> "WHERE EXISTS (SELECT * "
<> "FROM \"Countries\" "
<> "WHERE \"People\".\"countryId\" = \"Countries\".\"countryId\")"
)
(S.codeGen whereExists)
----------------------------------------
-- ORDER BY
----------------------------------------
testOrderBy :: Test
testOrderBy = testCase "ORDER BY clause" assertOrderBy
where
assertOrderBy :: Assertion
assertOrderBy = assertEqual
"ORDER BY clause is incorrect"
( "SELECT \"firstName\" FROM \"People\" "
<> "ORDER BY \"firstName\""
)
(S.codeGen orderByQuery)
testOrderByAlias :: Test
testOrderByAlias = testCase "ORDER BY alias clause" assertOrderBy
where
assertOrderBy :: Assertion
assertOrderBy = assertEqual
"ORDER BY alias clause is incorrect"
( "SELECT \"size\" + \"inhabitants\" AS \"sum\", \"name\" "
<> "FROM \"Countries\" ORDER BY \"sum\""
)
(S.codeGen orderBySum)
testOrderByAscDesc :: Test
testOrderByAscDesc = testCase "ORDER BY clause with ASC and DESC" assertOrderBy
where
assertOrderBy :: Assertion
assertOrderBy = assertEqual
"ORDER BY clause with ASC and DESC is incorrect"
( "SELECT \"firstName\", \"lastName\" FROM \"People\" "
<> "ORDER BY \"firstName\" ASC, \"lastName\" DESC"
)
(S.codeGen orderByAscDesc)
testOrderByNull :: Test
testOrderByNull =
testCase "ORDER BY clause with NULLS first and last" assertOrderBy
where
assertOrderBy :: Assertion
assertOrderBy = assertEqual
"ORDER BY clause with NullS first and last is incorrect"
( "SELECT \"age\", \"passeportNumber\" FROM \"People\" "
<> "ORDER BY \"age\" NULLS FIRST, \"passeportNumber\" NULLS LAST"
)
(S.codeGen orderByNull)
testOrderByLimit :: Test
testOrderByLimit = testCase "ORDER BY with LIMIT clause" assertOrderBy
where
assertOrderBy :: Assertion
assertOrderBy = assertEqual
"ORDER BY with LIMIT clause is incorrect"
( "SELECT * FROM \"People\" "
<> "ORDER BY \"firstName\" LIMIT 2"
)
(S.codeGen orderByLimit)
----------------------------------------
-- GROUP BY
----------------------------------------
testGroupBy :: Test
testGroupBy = testCase "GROUP BY clause" assertGroupBy
where
assertGroupBy :: Assertion
assertGroupBy = assertEqual
"GROUP BY clause is incorrect"
"SELECT \"age\" FROM \"People\" GROUP BY \"age\""
(S.codeGen selectGroupBy)
testGroupBySum :: Test
testGroupBySum = testCase "GROUP BY with SUM" assertGroupBy
where
assertGroupBy :: Assertion
assertGroupBy = assertEqual
"GROUP BY with SUM is incorrect"
( "SELECT \"lastName\", SUM(\"age\") FROM \"People\" "
<> "GROUP BY \"lastName\""
)
(S.codeGen groupBySum)
testGroupByAlias :: Test
testGroupByAlias = testCase "GROUP BY with an alias" assertGroupBy
where
assertGroupBy :: Assertion
assertGroupBy = assertEqual
"GROUP BY with an alias is incorrect"
( "SELECT \"lastName\" AS \"name\" FROM \"People\" "
<> "GROUP BY \"name\""
)
(S.codeGen groupByAlias)
testGroupByComplex :: Test
testGroupByComplex = testCase "Complex GROUP BY" assertGroupBy
where
assertGroupBy :: Assertion
assertGroupBy = assertEqual
"Complex GROUP BY is invalid"
( "SELECT \"personId\", \"P\".\"lastName\" AS \"name\", "
<> "SUM(\"C\".\"size\") * \"P\".\"age\" AS \"weirdFigure\" "
<> "FROM \"People\" AS \"P\" LEFT JOIN \"Countries\" AS \"C\" "
<> "USING (\"personId\") GROUP BY \"personId\", \"name\""
)
(S.codeGen groupByComplex)
testGroupBySumHaving :: Test
testGroupBySumHaving = testCase "GROUP BY with SUM and HAVING" assertGroupBy
where
assertGroupBy :: Assertion
assertGroupBy = assertEqual
"GROUP BY with SUM and HAVING is incorrect"
( "SELECT \"lastName\", SUM(\"age\") "
<> "FROM \"People\" GROUP BY \"lastName\" HAVING SUM(\"age\") > 18"
)
(S.codeGen groupBySumHaving)
testHavingComplex :: Test
testHavingComplex = testCase "Complex HAVING" assertGroupBy
where
assertGroupBy :: Assertion
assertGroupBy = assertEqual
"Complex HAVING is invalid"
( "SELECT \"personId\", \"P\".\"name\", "
<> "SUM(\"C\".\"size\" * (\"P\".\"age\" - 2)) AS \"weird\" "
<> "FROM \"People\" AS \"P\" LEFT JOIN \"Countries\" AS \"C\" "
<> "USING (\"personId\") WHERE \"personId\" > 2 "
<> "GROUP BY \"personId\", \"P\".\"name\", \"P\".\"age\" "
<> "HAVING SUM(\"P\".\"age\" * \"C\".\"size\") > 5000000"
)
(S.codeGen havingComplex)
----------------------------------------
-- Combined queries
----------------------------------------
testUnion :: Test
testUnion = testCase "SELECT UNION" assertUnion
where
assertUnion :: Assertion
assertUnion = assertEqual
"SELECT UNION is incorrect"
( "SELECT * FROM \"People\" WHERE \"personId\" = 1 "
<> "UNION SELECT * FROM \"People\" WHERE \"personId\" = 2"
)
(S.codeGen unionQuery)
testUnionCombined :: Test
testUnionCombined = testCase "Combined SELECT UNIONs" assertUnion
where
assertUnion :: Assertion
assertUnion = assertEqual
"Combined SELECT UNIONs are incorrect"
( "(SELECT * FROM \"People\" WHERE \"personId\" = 1 "
<> "UNION SELECT * FROM \"People\" WHERE \"personId\" = 2) "
<> "INTERSECT SELECT * FROM \"People\" WHERE \"personId\" = 1"
)
(S.codeGen unionCombined)
testUnionAll :: Test
testUnionAll = testCase "SELECT UNION ALL" assertUnion
where
assertUnion :: Assertion
assertUnion = assertEqual
"SELECT UNION ALL is incorrect"
( "SELECT * FROM \"People\" WHERE \"personId\" = 1 "
<> "UNION ALL SELECT * FROM \"People\" WHERE \"personId\" = 2"
)
(S.codeGen unionAllQuery)
testIntersectAll :: Test
testIntersectAll = testCase "SELECT INTERSECT ALL" assertUnion
where
assertUnion :: Assertion
assertUnion = assertEqual
"SELECT INTERSECT ALL is incorrect"
( "SELECT * FROM \"People\" WHERE \"personId\" = 1 "
<> "INTERSECT ALL SELECT * FROM \"People\" WHERE \"personId\" = 2"
)
(S.codeGen intersectAllQuery)
testExcept :: Test
testExcept = testCase "SELECT EXCEPT" assertUnion
where
assertUnion :: Assertion
assertUnion = assertEqual
"SELECT EXCEPT is incorrect"
( "SELECT * FROM \"People\" "
<> "EXCEPT SELECT * FROM \"People\" WHERE \"personId\" = 1"
)
(S.codeGen exceptQuery)
testExceptAll :: Test
testExceptAll = testCase "SELECT EXCEPT ALL" assertUnion
where
assertUnion :: Assertion
assertUnion = assertEqual
"SELECT EXCEPT ALL is incorrect"
( "SELECT * FROM \"People\" "
<> "EXCEPT ALL SELECT * FROM \"People\" WHERE \"personId\" = 1"
)
(S.codeGen exceptAllQuery)
----------------------------------------
-- Comparison operators
----------------------------------------
testGreaterThan :: Test
testGreaterThan = testCase "SELECT greater than" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT greater than is incorrect"
"SELECT * FROM \"People\" WHERE \"age\" > 18"
(S.codeGen selectGreaterThan)
testGreaterThanOrEqualTo :: Test
testGreaterThanOrEqualTo = testCase "SELECT greater or equal to" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT greater or equal to is incorrect"
"SELECT * FROM \"People\" WHERE \"age\" >= 18"
(S.codeGen selectGreaterThanOrEqualTo)
testSmallerThan :: Test
testSmallerThan = testCase "SELECT smaller than" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT smaller than is incorrect"
"SELECT * FROM \"People\" WHERE \"age\" < 18"
(S.codeGen selectSmallerThan)
testSmallerThanOrEqualTo :: Test
testSmallerThanOrEqualTo = testCase "SELECT smaller or equal to" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT smaller or equal to is incorrect"
"SELECT * FROM \"People\" WHERE \"age\" <= 18"
(S.codeGen selectSmallerThanOrEqualTo)
testEqualTo :: Test
testEqualTo = testCase "SELECT equal to" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT equal to is incorrect"
"SELECT * FROM \"People\" WHERE \"age\" = 18"
(S.codeGen selectEqualTo)
testNotEqualTo :: Test
testNotEqualTo = testCase "SELECT not equal to" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT not equal to is incorrect"
"SELECT * FROM \"People\" WHERE \"age\" <> 18"
(S.codeGen selectNotEqualTo)
testNotBetween :: Test
testNotBetween = testCase "SELECT not between" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT not between is incorrect"
"SELECT * FROM \"People\" WHERE \"age\" NOT BETWEEN 5 AND 18"
(S.codeGen selectNotBetween)
----------------------------------------
-- Boolean operators
----------------------------------------
testIsNull :: Test
testIsNull = testCase "SELECT is null" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is null is incorrect"
"SELECT * FROM \"People\" WHERE \"passeportNumber\" IS NULL"
(S.codeGen isNullQuery)
testIsNotNull :: Test
testIsNotNull = testCase "SELECT is not null" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is not null is incorrect"
"SELECT * FROM \"People\" WHERE \"passeportNumber\" IS NOT NULL"
(S.codeGen isNotNullQuery)
testIsDistinctFrom :: Test
testIsDistinctFrom = testCase "SELECT is distinct from" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is distinct from is incorrect"
( "SELECT * "
<> "FROM \"People\" "
<> "WHERE \"nickNameAsKind\" IS DISTINCT FROM \"nickNameAsAdult\""
)
(S.codeGen isDistinctFromQuery)
testIsNotDistinctFrom :: Test
testIsNotDistinctFrom = testCase "SELECT is not distinct from" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is not distinct from is incorrect"
( "SELECT * "
<> "FROM \"People\" "
<> "WHERE \"nickNameAsKind\" "
<> "IS NOT DISTINCT FROM \"nickNameAsAdult\""
)
(S.codeGen isNotDistinctFromQuery)
testIsTrue :: Test
testIsTrue = testCase "SELECT is true" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is true is incorrect"
"SELECT * FROM \"People\" WHERE \"married\" IS TRUE"
(S.codeGen isTrueQuery)
testIsNotTrue :: Test
testIsNotTrue = testCase "SELECT is not true" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is not true is incorrect"
"SELECT * FROM \"People\" WHERE \"married\" IS NOT TRUE"
(S.codeGen isNotTrueQuery)
testIsFalse :: Test
testIsFalse = testCase "SELECT is false" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is false is incorrect"
"SELECT * FROM \"People\" WHERE \"married\" IS FALSE"
(S.codeGen isFalseQuery)
testIsNotFalse :: Test
testIsNotFalse = testCase "SELECT is not false" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is not false is incorrect"
"SELECT * FROM \"People\" WHERE \"married\" IS NOT FALSE"
(S.codeGen isNotFalseQuery)
testIsUnknown :: Test
testIsUnknown = testCase "SELECT is Unknown" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is unknown is incorrect"
( "SELECT * "
<> "FROM \"People\" "
<> "WHERE \"nickNameAsKind\" = \"nickNameAsAdult\" IS UNKNOWN"
)
(S.codeGen isUnknownQuery)
testIsNotUnknown :: Test
testIsNotUnknown = testCase "SELECT is not Unknown" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"SELECT is not unknown is incorrect"
( "SELECT * "
<> "FROM \"People\" "
<> "WHERE \"nickNameAsKind\" = \"nickNameAsAdult\" IS NOT UNKNOWN"
)
(S.codeGen isNotUnknownQuery)
----------------------------------------
-- PostgreSQL
----------------------------------------
testSelectDistinctOnPostgreSQL :: Test
testSelectDistinctOnPostgreSQL = testCase "Select distinct on" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Select distinct on query is incorrect"
("SELECT DISTINCT ON (\"firstName\") * "
<> "FROM \"People\" ORDER BY \"age\"")
(P.codeGen distinctOnSelect)
testFromLateralPostgreSQL :: Test
testFromLateralPostgreSQL = testCase "Lateral join" assertSelect
where
assertSelect :: Assertion
assertSelect = assertEqual
"Lateral join is incorrect"
( "SELECT * "
<> "FROM \"Countries\", LATERAL ("
<> "SELECT * FROM \"People\" "
<> "WHERE \"People\".\"countryId\" = \"Countries\".\"countryId\") "
<> "AS \"C\""
)
(P.codeGen fromLateral)
--------------------------------------------------------------------------------
-- PUBLIC
--------------------------------------------------------------------------------
-- | Gather all tests.
tests :: Test
tests = testGroup "Select"
[ testGroup "All vendors"
[ testSelectAllSqLite
, testSelectDistinctSqLite
, testSelectTuple
, testSelect3Tuple
, testSelect4Tuple
, testAdditionSqLite
, testCurrentDateSqLite
, testMultiplicationSqLite
, testCrossJoinSqLite
, testInnerJoinOnSqLite
, testInnerJoinUsingSqLite
, testNaturalInnerJoin
, testLeftJoinOn
, testLeftJoinUsing
, testRightJoinOn
, testFullJoinOn
, testLeftJoinOnAnd
, testSelfJoin
, testCrossJoinAlias
, testCrossRefAlias
, testSubQuery
, testNestedJoins
, testWhereAlias
, testWhereAnd
, testWhereInValues
, testWhereInSelect
, testWhereBetween
, testWhereExists
, testOrderBy
, testOrderByAlias
, testOrderByAscDesc
, testOrderByNull
, testOrderByLimit
, testGroupBy
, testGroupBySum
, testGroupByAlias
, testGroupByComplex
, testGroupBySumHaving
, testHavingComplex
, testUnion
, testUnionCombined
, testUnionAll
, testIntersectAll
, testExcept
, testExceptAll
, testRandomSqLite
, testTrim
, testGreaterThan
, testGreaterThanOrEqualTo
, testSmallerThan
, testSmallerThanOrEqualTo
, testEqualTo
, testNotEqualTo
, testNotBetween
, testIsNull
, testIsNotNull
, testIsDistinctFrom
, testIsNotDistinctFrom
, testIsTrue
, testIsNotTrue
, testIsFalse
, testIsNotFalse
, testIsUnknown
, testIsNotUnknown
]
, testGroup "PostgreSQL"
[ testLastInsertIdPostgreSQL
, testSelectDistinctOnPostgreSQL
, testFromLateralPostgreSQL
]
, testGroup "MariaDb"
[ testLastInsertIdMariaDB
]
, testGroup "SqLite"
[ testLastInsertIdSqLite
]
]
|
momomimachli/Hedsql
|
tests/Database/Hedsql/Tests/Select.hs
|
gpl-3.0
| 29,846 | 0 | 13 | 8,757 | 3,671 | 1,998 | 1,673 | 638 | 1 |
-- This program is free software: you can redistribute it and/or modify it under
-- the terms of the GNU General Public License as published by the Free Software
-- Foundation, either version 3 of the License, or (at your option) any later
-- version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-- details.
--
-- You should have received a copy of the GNU General Public License along with
-- this program. If not, see <http://www.gnu.org/licenses/>.
-- |
-- Module : ExamplesSpec
-- Description : Runs the examples.
-- Copyright : Copyright 2015 Peter Harpending
-- License : GPL-3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
module ExamplesSpec where
import Control.Monad (forM_)
import Data.Algorithm.Diff
import Data.Algorithm.DiffOutput
import Data.List (sort, isSuffixOf)
import System.Directory
import Test.Hspec
import Text.Comarkdown
spec :: Spec
spec =
parallel $
describe "Examples with an input & output file" $ do
dirPath <- runIO $ makeAbsolute "tests/io-examples/"
dirContents <- runIO $ getDirectoryContents dirPath
let dirContents' = drop 2 (sort dirContents)
inputPaths = filter (isSuffixOf "in.md") dirContents'
outputPaths = filter (isSuffixOf "out.md") dirContents'
forM_ (zip inputPaths outputPaths) $
\(ip, op) -> specify (mconcat ["Compiling ", ip, " matches ", op]) $ do
inputFile <- makeAbsolute (mappend dirPath ip)
outputFile <- makeAbsolute (mappend dirPath op)
compiledInput <- comdToMd inputFile
output <- readFile outputFile
compiledInput `shouldBe` output
diffResults :: FilePath -> FilePath -> IO ()
diffResults inputFile outputFile = do
compiledInput <- comdToMd inputFile
output <- readFile outputFile
let diff = getGroupedDiff (lines compiledInput) (lines output)
putStrLn (ppDiff diff)
|
pharpend/comarkdown
|
tests/ExamplesSpec.hs
|
gpl-3.0
| 2,104 | 0 | 16 | 428 | 380 | 199 | 181 | 30 | 1 |
-- | module Category
-- Category接收两个对象类型a, b,具体化的Category实际是a -> b的态射。
--
-- Q: 为什么不用一个类型x封装所有的对象,如:
-- class Category cat where
-- type Object cat
-- type Morphism cat :: Object cat -> Object cat -> *
-- A: 目前发现有两个原因:
-- 其一:上面定义GHC不支持,会报defined and used in the same recursive group错。
-- 其二:这样失去了GHC的类型推断能力,关联不同对象的态射都是同一个类型。
class Category (cat :: k -> k -> *) where
id :: cat a a
(.) :: cat b c -> cat a b -> cat a c
|
KenetJervet/mapensee
|
books/category-theory-for-programmers/src/Category.hs
|
gpl-3.0
| 635 | 0 | 9 | 99 | 74 | 42 | 32 | -1 | -1 |
module NoLit
where
import Text.Regex.Posix
import Data.Maybe
import qualified Data.Map as M
import Control.Exception as CE
import Debug.Trace
type Chunk = [String]
type FileContent = String
data TangledFile = TangledFile {
file :: FilePath,
contents :: String
} deriving (Eq,Show)
generateSourceFiles :: String -> M.Map String [String] -> FileContent
generateSourceFiles root chunks = unlines $ concatMap expandLines (chunks M.! root)
where
expandLines :: String -> [String]
expandLines line =
let header = getHeader $ matchLabel line
in case header of
Just label -> concatMap expandLines (chunks M.! label)
Nothing -> [line]
-- Matches the regular expression looking for a header start of line
matchHeader header = (header =~ "^<([a-zA-Z\\/\\.]+|\\*)>=\\s*$") :: (String,String,String,[String])
matchLabel label = (label =~ "^(\\s*)<([a-zA-Z\\/\\.]+)>\\s*$") :: (String,String,String,[String])
getHeader :: (String,String,String,[String]) -> Maybe String
getHeader (_,_,_,[]) = Nothing
getHeader (_,_,_,m:ms) = Just m
getLabel :: (String,String,String,[String]) -> Maybe String
getLabel (_,_,_,s:l:ls) = Just l
getLabel _ = Nothing
createChunksMap :: [[String]] -> M.Map String [String]
createChunksMap chunks = M.fromList $ map (\c -> (extractName $ head c, tail c)) chunks
where
extractName :: String -> String
extractName header = chunkName $ matchHeader header
chunkName (_,_,_,[]) = error "header should be detected"
chunkName (_,_,_,m:ms) = m
slice :: [String] -> [Chunk]
slice [] = []
slice (l:ls)
| l =~ pattern :: Bool = let (chunk, rest) = span (\x -> not (x =~ pattern :: Bool)) ls
in chunk : slice (tail rest)
| otherwise = slice ls
where
pattern = "^-{4,}$"
-- The first level of the <*>= tag is intended for file definition
tangle :: String -> [TangledFile]
tangle content = map (\x -> TangledFile x (generateSourceFiles x theMap)) files
where
root = case M.lookup "*" theMap of
Just x -> x
Nothing -> error "Malformed file, missing the root \"*\" element"
files = mapMaybe (getLabel . matchLabel) root
theMap = createChunksMap.slice $ lines content
|
tonicebrian/nolit
|
src/NoLit.hs
|
gpl-3.0
| 2,313 | 1 | 15 | 556 | 806 | 441 | 365 | 48 | 2 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
--------------------------------------------------------------------------------
-- |
-- Module : Dhek.Engine.Type
--
-- Engine type declarations
--------------------------------------------------------------------------------
module Dhek.Engine.Type where
--------------------------------------------------------------------------------
import Control.Applicative
--------------------------------------------------------------------------------
import Control.Lens
import Control.Monad.State
import qualified Data.IntMap as I
import Data.Text
import Graphics.UI.Gtk (Modifier)
--------------------------------------------------------------------------------
import Dhek.Cartesian
import Dhek.Engine.Instr
import Dhek.Engine.Misc.LastHistory
import Dhek.Types
--------------------------------------------------------------------------------
-- | Mode Monad
--------------------------------------------------------------------------------
class (Monad m, Applicative m) => ModeMonad m where
mMove :: DrawEnv -> m ()
mPress :: DrawEnv -> m ()
mRelease :: DrawEnv -> m ()
mDrawing :: PageItem -> Ratio -> m ()
mKeyPress :: KbEnv -> m ()
mKeyRelease :: KbEnv -> m ()
mEnter :: m ()
mLeave :: m ()
--------------------------------------------------------------------------------
-- | Declarations
--------------------------------------------------------------------------------
type Pos = (Double, Double)
type Ratio = Double
type Width = Double
type Zoom = Double
--------------------------------------------------------------------------------
data DhekMode
= DhekNormal
| DhekDuplication
| DhekSelection
--------------------------------------------------------------------------------
newtype M a = M (forall m. ModeMonad m => m a)
--------------------------------------------------------------------------------
newtype Mode = Mode (forall a. M a -> EngineState -> IO EngineState)
--------------------------------------------------------------------------------
-- | We expect from a cleanup handler to handle @EngineState@ state and
-- IO actions
type EngineCtx m = (MonadIO m, MonadState EngineState m)
--------------------------------------------------------------------------------
-- | Holds a Engine mode and a cleanup handler. @ModeManager@ manages anything
-- related to a @Mode@ lifecycle
data ModeManager
= ModeManager
{ mgrMode :: Mode
, mgrCleanup :: forall m. EngineCtx m => m ()
}
--------------------------------------------------------------------------------
data DrawEnv
= DrawEnv
{ drawPointer :: Point2D -- ^ (x, y) pointer position
, drawRects :: [Rect] -- ^ Page rectangle
, drawRatio :: Double -- ^ Page ratio
, drawModifier :: [Modifier]
}
--------------------------------------------------------------------------------
data KbEnv
= KbEnv
{ kbKeyName :: Text
, kbModifier :: [Modifier]
}
--------------------------------------------------------------------------------
data DrawState
= DrawState
{ _drawSelected :: !(LastHistory Int) -- hold Rect id
, _drawOverRect :: !(Maybe Rect)
, _drawFreshId :: !Int
, _drawNewGuide :: !(Maybe Guide)
, _drawOverGuide :: !(Maybe Guide)
}
--------------------------------------------------------------------------------
data EngineState = EngineState
{ _engineCurPage :: {-# UNPACK #-} !Int
, _engineCurZoom :: {-# UNPACK #-} !Int
, _engineRectId :: {-# UNPACK #-} !Int
, _engineOverlap :: !Bool
, _engineMagnetic :: !Bool
, _engineDraw :: !Bool
, _enginePropLabel :: !String
, _enginePropType :: !(Maybe String)
, _enginePrevPos :: !(Double, Double)
, _engineBoards :: !Boards
, _engineDrawState :: !DrawState
, _engineBaseWidth :: !Int
, _engineThick :: !Double
, _engineEventStack :: ![Event]
}
--------------------------------------------------------------------------------
data EngineEnv
= EngineEnv
{ _engineFilename :: !String }
--------------------------------------------------------------------------------
-- | Constructors
--------------------------------------------------------------------------------
drawStateNew :: DrawState
drawStateNew = DrawState{ _drawSelected = lhNew
, _drawOverRect = Nothing
, _drawFreshId = 0
, _drawNewGuide = Nothing
, _drawOverGuide = Nothing
}
--------------------------------------------------------------------------------
-- | Lenses
--------------------------------------------------------------------------------
makeLenses ''EngineState
makeLenses ''DrawState
--------------------------------------------------------------------------------
-- | Mode instances
--------------------------------------------------------------------------------
instance Functor M where
fmap f (M m) = M $ fmap f m
--------------------------------------------------------------------------------
instance Applicative M where
pure a = M $ pure a
(M f) <*> (M a) = M (f <*> a)
--------------------------------------------------------------------------------
instance Monad M where
return a = M $ return a
M m >>= f = M (m >>= \a -> runM (f a))
--------------------------------------------------------------------------------
instance ModeMonad M where
mMove = move
mPress = press
mRelease = release
mDrawing = drawing
mKeyPress = keyPress
mKeyRelease = keyRelease
mEnter = enter
mLeave = leave
--------------------------------------------------------------------------------
-- | Mode Run
--------------------------------------------------------------------------------
runM :: ModeMonad m => M a -> m a
runM (M m) = m
--------------------------------------------------------------------------------
runMode :: Mode -> EngineState -> M a -> IO EngineState
runMode (Mode k) s m = k m s
--------------------------------------------------------------------------------
-- | Mode callback handlers
--------------------------------------------------------------------------------
move :: DrawEnv -> M ()
move e = M $ mMove e
--------------------------------------------------------------------------------
press :: DrawEnv -> M ()
press e = M $ mPress e
--------------------------------------------------------------------------------
release :: DrawEnv -> M ()
release e = M $ mRelease e
--------------------------------------------------------------------------------
drawing :: PageItem -> Ratio -> M ()
drawing p r = M $ mDrawing p r
--------------------------------------------------------------------------------
keyPress :: KbEnv -> M ()
keyPress e = M $ mKeyPress e
--------------------------------------------------------------------------------
keyRelease :: KbEnv -> M ()
keyRelease e = M $ mKeyRelease e
--------------------------------------------------------------------------------
enter :: M ()
enter = M mEnter
--------------------------------------------------------------------------------
leave :: M ()
leave = M mLeave
--------------------------------------------------------------------------------
-- | Helpers
--------------------------------------------------------------------------------
engineStateGetRects :: MonadState EngineState m => m [Rect]
engineStateGetRects = do
pid <- use engineCurPage
use $ engineBoards.boardsMap.at pid.traverse.boardRects.to I.elems
--------------------------------------------------------------------------------
engineStateSetRects :: MonadState EngineState m => [Rect] -> m ()
engineStateSetRects rs = do
pid <- use engineCurPage
forM_ rs $ \r -> do
let rid = r ^. rectId
engineBoards.boardsMap.at pid.traverse.boardRects.at rid ?= r
--------------------------------------------------------------------------------
engineStateSetRect :: MonadState EngineState m => Rect -> m ()
engineStateSetRect r
= do pid <- use engineCurPage
let rid = r ^. rectId
engineBoards.boardsMap.at pid.traverse.boardRects.at rid ?= r
--------------------------------------------------------------------------------
engineStateGetRect :: MonadState EngineState m => Int -> m (Maybe Rect)
engineStateGetRect rid
= do pid <- use engineCurPage
m <- use $ engineBoards.boardsMap.at pid.traverse.boardRects
return $ I.lookup rid m
--------------------------------------------------------------------------------
engineStateGetGuides :: MonadState EngineState m => m [Guide]
engineStateGetGuides
= do pid <- use engineCurPage
use $ engineBoards.boardsMap.at pid.traverse.boardGuides
--------------------------------------------------------------------------------
engineStateSetGuides :: MonadState EngineState m => [Guide] -> m ()
engineStateSetGuides gs
= do pid <- use engineCurPage
engineBoards.boardsMap.at pid.traverse.boardGuides .= gs
|
cchantep/dhek
|
Dhek/Engine/Type.hs
|
gpl-3.0
| 9,319 | 0 | 15 | 1,620 | 1,684 | 909 | 775 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Monitoring.Projects.NotificationChannels.GetVerificationCode
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Requests a verification code for an already verified channel that can
-- then be used in a call to VerifyNotificationChannel() on a different
-- channel with an equivalent identity in the same or in a different
-- project. This makes it possible to copy a channel between projects
-- without requiring manual reverification of the channel. If the channel
-- is not in the verified state, this method will fail (in other words,
-- this may only be used if the SendNotificationChannelVerificationCode and
-- VerifyNotificationChannel paths have already been used to put the given
-- channel into the verified state).There is no guarantee that the
-- verification codes returned by this method will be of a similar
-- structure or form as the ones that are delivered to the channel via
-- SendNotificationChannelVerificationCode; while
-- VerifyNotificationChannel() will recognize both the codes delivered via
-- SendNotificationChannelVerificationCode() and returned from
-- GetNotificationChannelVerificationCode(), it is typically the case that
-- the verification codes delivered via
-- SendNotificationChannelVerificationCode() will be shorter and also have
-- a shorter expiration (e.g. codes such as \"G-123456\") whereas
-- GetVerificationCode() will typically return a much longer, websafe base
-- 64 encoded string that has a longer expiration time.
--
-- /See:/ <https://cloud.google.com/monitoring/api/ Cloud Monitoring API Reference> for @monitoring.projects.notificationChannels.getVerificationCode@.
module Network.Google.Resource.Monitoring.Projects.NotificationChannels.GetVerificationCode
(
-- * REST Resource
ProjectsNotificationChannelsGetVerificationCodeResource
-- * Creating a Request
, projectsNotificationChannelsGetVerificationCode
, ProjectsNotificationChannelsGetVerificationCode
-- * Request Lenses
, pncgvcXgafv
, pncgvcUploadProtocol
, pncgvcAccessToken
, pncgvcUploadType
, pncgvcPayload
, pncgvcName
, pncgvcCallback
) where
import Network.Google.Monitoring.Types
import Network.Google.Prelude
-- | A resource alias for @monitoring.projects.notificationChannels.getVerificationCode@ method which the
-- 'ProjectsNotificationChannelsGetVerificationCode' request conforms to.
type ProjectsNotificationChannelsGetVerificationCodeResource
=
"v3" :>
CaptureMode "name" "getVerificationCode" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GetNotificationChannelVerificationCodeRequest
:>
Post '[JSON]
GetNotificationChannelVerificationCodeResponse
-- | Requests a verification code for an already verified channel that can
-- then be used in a call to VerifyNotificationChannel() on a different
-- channel with an equivalent identity in the same or in a different
-- project. This makes it possible to copy a channel between projects
-- without requiring manual reverification of the channel. If the channel
-- is not in the verified state, this method will fail (in other words,
-- this may only be used if the SendNotificationChannelVerificationCode and
-- VerifyNotificationChannel paths have already been used to put the given
-- channel into the verified state).There is no guarantee that the
-- verification codes returned by this method will be of a similar
-- structure or form as the ones that are delivered to the channel via
-- SendNotificationChannelVerificationCode; while
-- VerifyNotificationChannel() will recognize both the codes delivered via
-- SendNotificationChannelVerificationCode() and returned from
-- GetNotificationChannelVerificationCode(), it is typically the case that
-- the verification codes delivered via
-- SendNotificationChannelVerificationCode() will be shorter and also have
-- a shorter expiration (e.g. codes such as \"G-123456\") whereas
-- GetVerificationCode() will typically return a much longer, websafe base
-- 64 encoded string that has a longer expiration time.
--
-- /See:/ 'projectsNotificationChannelsGetVerificationCode' smart constructor.
data ProjectsNotificationChannelsGetVerificationCode =
ProjectsNotificationChannelsGetVerificationCode'
{ _pncgvcXgafv :: !(Maybe Xgafv)
, _pncgvcUploadProtocol :: !(Maybe Text)
, _pncgvcAccessToken :: !(Maybe Text)
, _pncgvcUploadType :: !(Maybe Text)
, _pncgvcPayload :: !GetNotificationChannelVerificationCodeRequest
, _pncgvcName :: !Text
, _pncgvcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsNotificationChannelsGetVerificationCode' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pncgvcXgafv'
--
-- * 'pncgvcUploadProtocol'
--
-- * 'pncgvcAccessToken'
--
-- * 'pncgvcUploadType'
--
-- * 'pncgvcPayload'
--
-- * 'pncgvcName'
--
-- * 'pncgvcCallback'
projectsNotificationChannelsGetVerificationCode
:: GetNotificationChannelVerificationCodeRequest -- ^ 'pncgvcPayload'
-> Text -- ^ 'pncgvcName'
-> ProjectsNotificationChannelsGetVerificationCode
projectsNotificationChannelsGetVerificationCode pPncgvcPayload_ pPncgvcName_ =
ProjectsNotificationChannelsGetVerificationCode'
{ _pncgvcXgafv = Nothing
, _pncgvcUploadProtocol = Nothing
, _pncgvcAccessToken = Nothing
, _pncgvcUploadType = Nothing
, _pncgvcPayload = pPncgvcPayload_
, _pncgvcName = pPncgvcName_
, _pncgvcCallback = Nothing
}
-- | V1 error format.
pncgvcXgafv :: Lens' ProjectsNotificationChannelsGetVerificationCode (Maybe Xgafv)
pncgvcXgafv
= lens _pncgvcXgafv (\ s a -> s{_pncgvcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pncgvcUploadProtocol :: Lens' ProjectsNotificationChannelsGetVerificationCode (Maybe Text)
pncgvcUploadProtocol
= lens _pncgvcUploadProtocol
(\ s a -> s{_pncgvcUploadProtocol = a})
-- | OAuth access token.
pncgvcAccessToken :: Lens' ProjectsNotificationChannelsGetVerificationCode (Maybe Text)
pncgvcAccessToken
= lens _pncgvcAccessToken
(\ s a -> s{_pncgvcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pncgvcUploadType :: Lens' ProjectsNotificationChannelsGetVerificationCode (Maybe Text)
pncgvcUploadType
= lens _pncgvcUploadType
(\ s a -> s{_pncgvcUploadType = a})
-- | Multipart request metadata.
pncgvcPayload :: Lens' ProjectsNotificationChannelsGetVerificationCode GetNotificationChannelVerificationCodeRequest
pncgvcPayload
= lens _pncgvcPayload
(\ s a -> s{_pncgvcPayload = a})
-- | Required. The notification channel for which a verification code is to
-- be generated and retrieved. This must name a channel that is already
-- verified; if the specified channel is not verified, the request will
-- fail.
pncgvcName :: Lens' ProjectsNotificationChannelsGetVerificationCode Text
pncgvcName
= lens _pncgvcName (\ s a -> s{_pncgvcName = a})
-- | JSONP
pncgvcCallback :: Lens' ProjectsNotificationChannelsGetVerificationCode (Maybe Text)
pncgvcCallback
= lens _pncgvcCallback
(\ s a -> s{_pncgvcCallback = a})
instance GoogleRequest
ProjectsNotificationChannelsGetVerificationCode
where
type Rs
ProjectsNotificationChannelsGetVerificationCode
= GetNotificationChannelVerificationCodeResponse
type Scopes
ProjectsNotificationChannelsGetVerificationCode
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/monitoring"]
requestClient
ProjectsNotificationChannelsGetVerificationCode'{..}
= go _pncgvcName _pncgvcXgafv _pncgvcUploadProtocol
_pncgvcAccessToken
_pncgvcUploadType
_pncgvcCallback
(Just AltJSON)
_pncgvcPayload
monitoringService
where go
= buildClient
(Proxy ::
Proxy
ProjectsNotificationChannelsGetVerificationCodeResource)
mempty
|
brendanhay/gogol
|
gogol-monitoring/gen/Network/Google/Resource/Monitoring/Projects/NotificationChannels/GetVerificationCode.hs
|
mpl-2.0
| 9,265 | 0 | 16 | 1,814 | 821 | 496 | 325 | 127 | 1 |
module Sound.Analysis.Vamp.OutputDescriptor (
OutputDescriptor(..)
, Bins(..)
, Extents(..)
, Quantization(..)
, SampleType(..)
, peekOutputDescriptor
) where
import Bindings.Sound.Analysis.Vamp
import qualified Bindings.Sound.Analysis.Vamp.Version2 as Version2
import Foreign
import Foreign.C
import Sound.Analysis.Vamp.Types
data OutputDescriptor = OutputDescriptor {
identifier :: String
, name :: String
, description :: String
, unit :: String
, bins :: Maybe Bins
, extents :: Maybe Extents
, quantization :: Maybe Quantization
, sampleType :: SampleType
, hasDuration :: Bool
} deriving (Eq, Read, Show)
peekOutputDescriptor ::
C'VampOutputDescriptor
-> Maybe Version2.C'VampOutputDescriptor
-> IO OutputDescriptor
peekOutputDescriptor x v2 = do
x1 <- peekCString (c'VampOutputDescriptor'identifier x)
x2 <- peekCString (c'VampOutputDescriptor'name x)
x3 <- peekCString (c'VampOutputDescriptor'description x)
x4 <- peekCString (c'VampOutputDescriptor'unit x)
x5 <- if toBool (c'VampOutputDescriptor'hasFixedBinCount x)
then do
let n = fromIntegral (c'VampOutputDescriptor'binCount x)
if n > 0 && c'VampOutputDescriptor'binNames x /= nullPtr
then peekArray n (c'VampOutputDescriptor'binNames x)
>>= mapM peekCString
>>= return . Just . Bins n
else return Nothing
else return Nothing
let x6 = if toBool (c'VampOutputDescriptor'hasKnownExtents x)
then Just (Extents (realToFrac (c'VampOutputDescriptor'minValue x))
(realToFrac (c'VampOutputDescriptor'maxValue x)))
else Nothing
x7 = if toBool (c'VampOutputDescriptor'isQuantized x)
then Just (Quantization (realToFrac (c'VampOutputDescriptor'quantizeStep x)) [])
else Nothing
st = c'VampOutputDescriptor'sampleType x
x8 = if st == c'vampOneSamplePerStep
then OneSamplePerStep
else if st == c'vampFixedSampleRate
then FixedSampleRate (realToFrac (c'VampOutputDescriptor'sampleRate x))
else if st == c'vampVariableSampleRate
then VariableSampleRate (realToFrac (c'VampOutputDescriptor'sampleRate x))
else error ("Invalid sample type " ++ show st)
x9 = maybe False ((/=) 0 . Version2.c'VampOutputDescriptor'hasDuration) v2
return $ OutputDescriptor x1 x2 x3 x4 x5 x6 x7 x8 x9
|
kaoskorobase/hvamp
|
Sound/Analysis/Vamp/OutputDescriptor.hs
|
lgpl-3.0
| 2,643 | 0 | 18 | 766 | 622 | 331 | 291 | 58 | 8 |
module Propellor.Ssh where
import Propellor.Base
import Utility.UserInfo
import Utility.FileSystemEncoding
import System.PosixCompat
import Data.Time.Clock.POSIX
import qualified Data.Hash.MD5 as MD5
-- Parameters can be passed to both ssh and scp, to enable a ssh connection
-- caching socket.
--
-- If the socket already exists, check if its mtime is older than 10
-- minutes, and if so stop that ssh process, in order to not try to
-- use an old stale connection. (atime would be nicer, but there's
-- a good chance a laptop uses noatime)
sshCachingParams :: HostName -> IO [CommandParam]
sshCachingParams hn = do
home <- myHomeDir
let socketfile = socketFile home hn
createDirectoryIfMissing False (takeDirectory socketfile)
let ps =
[ Param "-o"
, Param ("ControlPath=" ++ socketfile)
, Param "-o", Param "ControlMaster=auto"
, Param "-o", Param "ControlPersist=yes"
]
maybe noop (expireold ps socketfile)
=<< catchMaybeIO (getFileStatus socketfile)
return ps
where
expireold ps f s = do
now <- truncate <$> getPOSIXTime :: IO Integer
if modificationTime s > fromIntegral now - tenminutes
then touchFile f
else do
void $ boolSystem "ssh" $
[ Param "-O", Param "stop" ] ++ ps ++
[ Param "localhost" ]
nukeFile f
tenminutes = 600
-- Generate a socket filename inside the home directory.
--
-- There's a limit in the size of unix domain sockets, of approximately
-- 100 bytes. Try to never construct a filename longer than that.
--
-- When space allows, include the full hostname in the socket filename.
-- Otherwise, include at least a partial md5sum of it,
-- to avoid using the same socket file for multiple hosts.
socketFile :: FilePath -> HostName -> FilePath
socketFile home hn = selectSocketFile
[ sshdir </> hn ++ ".sock"
, sshdir </> hn
, sshdir </> take 10 hn ++ "-" ++ md5
, sshdir </> md5
, home </> ".propellor-" ++ md5
]
(".propellor-" ++ md5)
where
sshdir = home </> ".ssh" </> "propellor"
md5 = take 9 $ MD5.md5s $ MD5.Str hn
selectSocketFile :: [FilePath] -> FilePath -> FilePath
selectSocketFile [] d = d
selectSocketFile [f] _ = f
selectSocketFile (f:fs) d
| valid_unix_socket_path f = f
| otherwise = selectSocketFile fs d
valid_unix_socket_path :: FilePath -> Bool
valid_unix_socket_path f = length (decodeW8 f) < 100 - reservedbyssh
where
-- ssh tacks on 17 or so characters when making a socket
reservedbyssh = 18
|
ArchiveTeam/glowing-computing-machine
|
src/Propellor/Ssh.hs
|
bsd-2-clause
| 2,419 | 21 | 16 | 467 | 585 | 303 | 282 | 49 | 2 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : Opengl.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:32
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Classes.Opengl (
QbindTexture(..)
, QdeleteTexture(..)
, QdoneCurrent(..)
, QdoubleBuffer(..)
, QisSharing(..)
, QmakeCurrent(..)
, QswapBuffers(..)
) where
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.ClassTypes.Gui
import Qtc.ClassTypes.Opengl
class QbindTexture a b where
bindTexture :: a -> b -> IO (Int)
class QdeleteTexture a b where
deleteTexture :: a -> b -> IO ()
class QdoneCurrent a b c | a -> c where
doneCurrent :: a -> b -> c
class QdoubleBuffer a b where
doubleBuffer :: a -> b -> IO (Bool)
class QisSharing a b where
isSharing :: a -> b -> IO (Bool)
class QmakeCurrent a b c | a -> c where
makeCurrent :: a -> b -> c
class QswapBuffers a b where
swapBuffers :: a -> b -> IO ()
|
uduki/hsQt
|
Qtc/Classes/Opengl.hs
|
bsd-2-clause
| 1,165 | 0 | 10 | 231 | 298 | 168 | 130 | -1 | -1 |
module Main where
import System.ZMQ
import Control.Concurrent (forkIO, threadDelay)
import Data.ByteString.Char8 (pack, unpack)
import Control.Monad (forM_, forever, when)
nbrClients :: Int
nbrClients = 10
nbrWorkers :: Int
nbrWorkers = 3
workerThread :: Show a => String -> Context -> a -> IO ()
workerThread url ctx i = withSocket ctx Req $ \socket -> do
let identity = "Worker-" ++ show i
setOption socket (Identity identity)
connect socket url
send socket (pack "READY") []
forever $ do
address <- receive socket []
empty <- receive socket []
request <- fmap unpack $ receive socket []
putStrLn $ identity ++ ": " ++ request
send socket address [SndMore]
send socket (pack "") [SndMore]
send socket (pack "OK") []
clientThread :: Show a => String -> Context -> a -> IO ()
clientThread url ctx i = withSocket ctx Req $ \socket -> do
let identity = "Client-" ++ show i
setOption socket (Identity identity)
connect socket url
send socket (pack "HELLO") []
reply <- fmap unpack $ receive socket []
putStrLn $ identity ++ ": " ++ reply
-- Eventually we can Put all of this in a Single Data Type
backendFunc :: PollEvent -> Int -> [String] -> Int -> Socket a -> Socket b -> IO (Int, [String], Int)
backendFunc None a b c _ _ = return (a, b, c)
backendFunc In avail_workers workers_list client_nbr backend frontend = do
worker_addr <- receive backend []
when (avail_workers >= nbrWorkers) $ error ""
empty <- fmap unpack $ receive backend []
when (empty /= "") $ error ""
let avail' = avail_workers + 1
let work_list = workers_list ++ [show avail']
client_addr <- fmap unpack $ receive backend []
if client_addr == "READY"
then return (avail', work_list, client_nbr)
else do
empty' <- fmap unpack $ receive backend []
when (empty' /= "") $ error ""
reply <- receive backend []
send frontend (pack client_addr) [SndMore]
send frontend (pack "") [SndMore]
send frontend reply []
return (avail', work_list, client_nbr - 1)
frontendFunc :: PollEvent -> Int -> [String] -> Int -> Socket a -> Socket b -> IO (Int, [String])
frontendFunc None a b _ _ _ = return (a, b)
frontendFunc In 0 b _ _ _ = return (0, b)
frontendFunc In avail_workers workers_list client_nbr frontend backend = do
client_addr <- receive frontend []
empty <- fmap unpack $ receive frontend []
when (empty /= "") $ error ""
request <- receive frontend []
let worker_id = head workers_list
send backend (pack $ show worker_id) [SndMore]
send backend (pack "") [SndMore]
send backend client_addr [SndMore]
send backend (pack "") [SndMore]
send backend request []
return (avail_workers - 1, tail workers_list)
lruQueueFunc :: Int -> [String] -> Int -> Socket a -> Socket a1 -> IO ()
lruQueueFunc avail_workers workers_list client_nbr backend frontend = do
[S backend' res1, S frontend' res2] <- poll [S backend In, S frontend In] (-1)
(avail_workers', workers_list', client_nbr') <- backendFunc res1 avail_workers workers_list client_nbr backend frontend
when (client_nbr' > 0) $ do
(avail_workers'', workers_list'') <- frontendFunc res2 avail_workers' workers_list' client_nbr' frontend backend
lruQueueFunc avail_workers'' workers_list'' client_nbr' backend frontend
main :: IO ()
main = do
let url_worker = "inproc://workers"
url_client = "inproc://clients"
client_nbr = nbrClients
withContext 1 $ \context -> do
withSocket context XRep $ \frontend -> do
bind frontend url_client
withSocket context XRep $ \backend -> do
bind backend url_worker
forM_ [1..nbrWorkers] $ \i -> forkIO (workerThread url_worker context i)
forM_ [1..nbrClients] $ \i -> forkIO (clientThread url_client context i)
lruQueueFunc 0 [] client_nbr backend frontend
threadDelay $ 1 * 1000 * 1000
|
krattai/noo-ebs
|
docs/zeroMQ-guide2/examples/Haskell/lruqueue.hs
|
bsd-2-clause
| 4,201 | 0 | 23 | 1,162 | 1,526 | 733 | 793 | 87 | 2 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module ShopData.OrderAddress where
import Database.PostgreSQL.Simple
import qualified Data.Text as DT
import qualified Snap.Snaplet.PostgresqlSimple as PS
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.FromField
( FromField (fromField), typeOid, returnError)
import Database.PostgreSQL.Simple.TypeInfo.Static (typoid, varchar)
import qualified Data.ByteString.Char8 as C8
data AddressType = Billing | Shipping
deriving (Read, Show)
instance FromField AddressType where
fromField f mdata =
if typeOid f /= typoid varchar
then returnError Incompatible f ""
else case C8.unpack `fmap` mdata of
Nothing -> returnError UnexpectedNull f ""
Just dat ->
case [ x | (x,t) <- reads dat, ("","") <- lex t ] of
[x] -> return x
_ -> returnError ConversionFailed f dat
instance ToField AddressType where
toField x = toField $ show $ x
data OrderAddress = OrderAddress
{ orderAddressId :: Int,
orderId :: Int,
addressType :: AddressType,
fullName :: DT.Text,
line1 :: DT.Text,
line2 :: DT.Text,
city :: DT.Text,
state :: DT.Text,
postcode :: DT.Text,
country :: DT.Text
} deriving (Show)
instance FromRow OrderAddress where
fromRow = OrderAddress <$> PS.field <*> PS.field <*> PS.field <*> PS.field <*> PS.field
<*> PS.field <*> PS.field <*> PS.field <*> PS.field <*> PS.field
orderAddress :: PS.HasPostgres m => Int -> AddressType -> m OrderAddress
orderAddress oid at = do
results :: [OrderAddress] <- PS.query " SELECT \
\ order_address_id,\
\ order_id,\
\ address_type,\
\ full_name,\
\ line_1,\
\ line_2,\
\ city,\
\ state,\
\ postcode,\
\ country\
\ FROM\
\ order_addresses\
\ WHERE\
\ order_id = ? AND\
\ address_type = ?"
(oid, at)
return $ head results
saveOrderAddress :: PS.HasPostgres m => OrderAddress -> m Int
saveOrderAddress oa = do
ids :: [[Int]] <- PS.query
" INSERT INTO order_addresses (\
\ order_id,\
\ address_type,\
\ full_name,\
\ line_1,\
\ line_2,\
\ city,\
\ state,\
\ postcode,\
\ country)\
\ VALUES\
\ (?, ?, ?, ?, ?, ?, ?, ?, ?)\
\ RETURNING order_address_id"
(orderId oa, (DT.pack $ show $ addressType oa), fullName oa,
line1 oa, line2 oa, city oa, state oa,
postcode oa, country oa)
return $ head $ head $ ids
|
rjohnsondev/haskellshop
|
src/ShopData/OrderAddress.hs
|
bsd-2-clause
| 3,557 | 0 | 16 | 1,677 | 642 | 351 | 291 | 52 | 1 |
module Dixi.Hamlet where
import Text.Hamlet
import Language.Haskell.TH.Quote
hml :: QuasiQuoter
hml = hamletWithSettings htmlRules (defaultHamletSettings { hamletNewlines = NoNewlines})
|
liamoc/dixi
|
Dixi/Hamlet.hs
|
bsd-3-clause
| 188 | 0 | 8 | 21 | 44 | 27 | 17 | 5 | 1 |
module Control.Monad.Trans.Coroutine
( Coroutine, runCoroutine
, Yield (..), yield
, Await (..), await
, Request (..), request
)where
import Control.Monad.Trans.Class
import Control.Monad.IO.Class
-- | coroutine monad. Here, @m@ is underling monad and @f@ is suspension functor.
-- suspension functor is a type constructor, applied to type (Coroutine f m a) to
-- get a type of suspension. See entries for specific suspension functor later
newtype Coroutine f m a = Coroutine ( m (Either (f (Coroutine f m a) ) a ) )
-- | run coroutine, returning suspension either suspension in suspension functor
-- or final result
runCoroutine :: Coroutine f m a -> m (Either (f (Coroutine f m a)) a)
runCoroutine (Coroutine m) = m
-- not exported.
coroutine :: m (Either (f (Coroutine f m a)) a) -> Coroutine f m a
coroutine = Coroutine
instance (Functor f, Functor m) => Functor (Coroutine f m) where
fmap f = coroutine . fmap (either (Left . fmap (fmap f) ) (Right . f) ). runCoroutine
instance (Functor f, Monad m) => Monad (Coroutine f m) where
return = coroutine . return . Right
m >>= k = coroutine $ runCoroutine m >>=
either (return . Left . fmap ( >>= k) ) (runCoroutine . k)
instance Functor f => MonadTrans (Coroutine f) where
lift m = coroutine $ m >>= return . Right
instance (Functor f, MonadIO m) => MonadIO (Coroutine f m) where
liftIO = lift . liftIO
--------------------------------------------------------------------------------------------
-- | this suspension functor is a combination of returned result @a@ and suspension @b@
data Yield a b = Yield a b
instance Functor (Yield a) where
fmap f (Yield a b) = Yield a (f b)
-- | provide partitial result from coroutine that @Yield@s values of type @a@
yield :: Monad m => a -> Coroutine (Yield a) m ()
yield a = coroutine $ return $ Left $ Yield a $ return ()
--------------------------------------------------------------
-- | this suspension functor represents suspension, @Await@ing more input from caller.
-- It does NOT provide way to enforce end of computation, if no input to be provided, though...
data Await a b = Await (a -> b)
instance Functor (Await a) where
fmap f (Await t) = Await $ f . t
-- | request more input from withing couroutine
await :: Monad m => Coroutine (Await a) m a
await = coroutine $ return $ Left $ Await return
---------------------------------------------------------------
-- | this suspension functor represents suspension expecting response of type @b@
-- and passing request of type a
data Request a b c = Request a (b -> c)
instance Functor (Request a b) where
fmap f (Request a r) = Request a (f . r)
-- | suspend computation, passing request to caller
request :: Monad m => a -> Coroutine (Request a b) m b
request a = coroutine $ return $ Left $ Request a return
--
-- Some agreements to be used in derived libraries.
--
-- If you wish to allow premature end of suspended computation,
-- pass to caller a computation for it. I.e. instead of use @Await a@ use
-- @Request CFE a@ suspension, where cfe is computation in underling monad, yielding needed result
-- and performing all neccecary cleenup. Of cource, one may end with passing signal value from caller,
-- but it is error-prone.
--
-- If you need error handling, keep transformer for this immedialy under Coroutine transformer.
|
permeakra/yamtl
|
Control/Monad/Trans/Coroutine.hs
|
bsd-3-clause
| 3,329 | 4 | 15 | 616 | 838 | 451 | 387 | 37 | 1 |
{-|
Module : Git.Details
Description : Functions and data types for getting the details of git projects
Copyright : (c) Michael Klein, 2016
License : BSD3
Maintainer : lambdamichael(at)gmail.com
-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE OverloadedStrings #-}
module Git.Details where
import Control.Applicative (many)
import Control.Monad (foldM)
import Data.Attoparsec.Text ( Parser
, endOfInput
, parseOnly
)
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import Data.Text.IO (readFile)
import Git.Types (Commit(..), SHA1(..))
import Git.Types.Parse (parseLogLine)
import Prelude hiding (readFile)
import System.Directory (doesDirectoryExist, doesFileExist)
-- import System.Process.Utils (simpleRun)
import Data.Git.Details (Details(..))
import Data.Git.Details.Parse (detailsParser)
import Control.Lens.Operators ((.~), (&))
import Data.Conduit (Conduit, Producer, Source, (=$=), yield)
import Data.Conduit.Process.Utils
import Data.ByteString (ByteString)
import Conduit (encodeUtf8C, lift)
import Control.Exception.Base (SomeException)
import Data.Conduit.Attoparsec (ParseError, conduitParserEither)
-- 1. make sure valid git dir (has .git dir)
-- 2. get project details: 'git remote show origin'
-- 3. get times/hashes for all commits: 'git log --pretty="%H<>%cd"' (hash<>date)
-- 4. ensure given file in dir
-- 5. attempt to parse current file (fail otherwise)
-- 6. for each commit, get and parse -> Data.Map: git show [hash]:[path] > new_path
-- 7. compile results into single tree
-- 8. output results
-- -- | `Attribute`s can be unfolded into a `Tree` and otherwise are named values.
-- data Attribute = Attr { parent :: Maybe Attribute -- ^ The parent attribute. E.g. @test@ might be the parent of @dateTest@.
-- , name :: T.Text -- ^ The name of the attribute
-- , value :: Maybe (forall a. TextShow a => a) -- ^ The value of the input, which must be a member of the class `TextShow`
-- }
-- | Check for @.git@ directory in current directory
isGitRootDir :: IO Bool
isGitRootDir = doesDirectoryExist ".git"
-- isGitRootDir2 --Producer IO Bool
isGitRootDir2 = undefined --lift $ doesDirectoryExist ".git"
-- | Get the details of the @git@ project in the current directory
-- projectDetails :: IO (Either String Details)
-- projectDetails = do
-- maybeResults <- simpleRun "git" ["remote", "show", "origin"] ""
-- case maybeResults of
-- Left err -> return $ Left err
-- Right results -> return . parseOnly detailsParser . T.pack $ results
-- singleCmdParse :: String -> Parser a -> Source IO (Either SomeException a)
-- singleCmdParse c p = yield c =$= shellC =$= processSourceC & processHandlerS ph
-- where
-- ph = defaultParserPH p
-- -- | A `ProcessHandler` built from `defaultCmd` and the provider `Text` parser
-- defaultParserPH :: Parser a -> ProcessHandler (Either ParseError a)
-- defaultParserPH p = defaultCmd & procC .~ (encodeUtf8C =$= conduitParserEither p)
-- projectDetailsS :: Source IO (Either SomeException Details)
-- projectDetailsS = singleCmdParse "git remote show origin" detailsParser
{-
-- | Given a filepath (relative to the root directory of the branch) and a
-- `Commit`, return its contents or return `Nothing` on failure
getFileInCommit :: FilePath -> Commit -> IO (Either String T.Text)
getFileInCommit path (Commit {hash=(SHA1 hashText)}) = do
let hashStr = T.unpack hashText
result <- simpleRun "git" ["show", hashStr ++ ":" ++ path] ""
case result of
Left e -> return $ Left e
Right s -> return . Right . T.pack $ s
-- | Like `getFileInCommit`, but also parses
parseFileInCommit :: FilePath -> Parser a -> Commit -> IO (Either String a)
parseFileInCommit path parser commit = do
text <- getFileInCommit path commit
case text of
Left e -> return $ Left e
Right t -> return $ parser `parseOnly` t
-- | Only insert the result into the `Map` if `Done`
insertResult :: Commit -> Either String a -> Map.Map Commit a -> Map.Map Commit a
insertResult c (Right r) m = Map.insert c r m
insertResult _ _ m = m
-- | Given a `Parser`, a `FilePath`, and a `Commit` foldable (list), returns
-- a map from each `Commit` to the result of the given file being parsed by
-- the `Parser`.
parseForCommits :: Foldable t => Parser a -> FilePath -> t Commit -> IO (Map.Map Commit a)
parseForCommits parser path = foldM (\m c -> (parseCommit c >>= \r -> return $ insertResult c r m)) m0
where
parseCommit = parseFileInCommit path parser
m0 = Map.empty
-- | This function gets all the `Commit`s for the current branch
-- An example of what this should parse:
-- @
-- ~/../prim-spoon$ git log --date=iso --pretty="%H|%cd"
-- c6b082bcf72fed2db488dfd4506f9923f742e743|2016-05-03 19:25:24 -0400
-- 3f9778e19ee89000d8cd0a1c164afb3589650c3b|2016-05-02 18:42:11 -0400
-- abae2c404d4d2c26e7fc9005cfb59a5699977c39|2016-05-02 14:11:43 -0400
-- 07c0fe75d1a621abb10dfb4fb27549b390e35b3b|2016-05-02 13:48:43 -0400
-- c8bba65dc33f4c4cab53498002beddf673b674cc|2016-05-02 13:42:59 -0400
-- 3f207b6536fc30b2eadc09edd260e413c3e4ad79|2016-05-02 13:35:35 -0400
-- 04f4163c52231f1043123e6b9b66c76b95e3f05f|2016-05-02 13:32:37 -0400
-- f80e7e8c9204dd245a3545b5216e42bc0be7af2e|2016-05-02 13:28:25 -0400
-- e37b44908f08e912373c16a899516dc07fec363d|2016-05-02 13:04:31 -0400
-- 4604bec7ae5042aac493522cdf33166c26aa285f|2016-05-02 12:55:28 -0400
-- @
--
getCommits :: IO (Either String [Commit])
getCommits = do
maybeResults <- simpleRun "git" ["log", "--date=short", "--pretty=\"%H|%cd\""] ""
case maybeResults of
Left err -> return $ Left err
Right results -> return . parseOnly (many $ parseLogLine <* endOfInput) . T.pack $ results
-- | Check that the current version of the file exists and can be parsed
checkCurrentFile :: FilePath -> Parser a -> IO Bool
checkCurrentFile path parser = do
exists <- doesFileExist path
if exists
then do
contents <- readFile path
case parser `parseOnly` contents of
Right _ -> return True
_ -> return False
else do
return False
-}
|
michaeljklein/git-details
|
src/Git/Details.hs
|
bsd-3-clause
| 6,265 | 0 | 6 | 1,202 | 306 | 212 | 94 | 28 | 1 |
module Main (main) where
import Control.Monad
import Data.List
import Data.List.Split
import System.Directory
import System.FilePath
main :: IO ()
main = forM_ modules $ \m -> do
let segs = splitOn "." m :: [String]
let dir = ("src" </>) . intercalate "/" . reverse . drop 1 . reverse $ segs
let modName = head . reverse $ segs
let fp = dir </> modName <.> "hs"
createDirectoryIfMissing True dir
writeFile fp (contents m)
return ()
contents :: String -> String
contents m = "\
\ {-# LANGUAGE PackageImports #-}\n \
\ module " ++ m ++ " (module M) where\n \
\ import \"base\" " ++ m ++ " as M\n"
modules :: [String]
modules =
[ "Control.Applicative"
, "Control.Arrow"
, "Control.Category"
, "Control.Concurrent"
, "Control.Concurrent.Chan"
, "Control.Concurrent.MVar"
, "Control.Concurrent.QSem"
, "Control.Concurrent.QSemN"
, "Control.Exception"
, "Control.Exception.Base"
, "Control.Monad"
, "Control.Monad.Fix"
, "Control.Monad.Instances"
, "Control.Monad.ST"
, "Control.Monad.ST.Lazy"
, "Control.Monad.ST.Lazy.Safe"
, "Control.Monad.ST.Lazy.Unsafe"
, "Control.Monad.ST.Safe"
, "Control.Monad.ST.Strict"
, "Control.Monad.ST.Unsafe"
, "Control.Monad.Zip"
, "Data.Bits"
, "Data.Bool"
, "Data.Char"
, "Data.Coerce"
, "Data.Complex"
, "Data.Data"
, "Data.Dynamic"
, "Data.Either"
, "Data.Eq"
, "Data.Fixed"
, "Data.Foldable"
, "Data.Function"
, "Data.Functor"
, "Data.IORef"
, "Data.Int"
, "Data.Ix"
, "Data.List"
, "Data.Maybe"
, "Data.Monoid"
, "Data.OldTypeable"
, "Data.OldTypeable.Internal"
, "Data.Ord"
, "Data.Proxy"
, "Data.Ratio"
, "Data.STRef"
, "Data.STRef.Lazy"
, "Data.STRef.Strict"
, "Data.String"
, "Data.Traversable"
, "Data.Tuple"
, "Data.Type.Bool"
, "Data.Type.Coercion"
, "Data.Type.Equality"
, "Data.Typeable"
, "Data.Typeable.Internal"
, "Data.Unique"
, "Data.Version"
, "Data.Word"
, "Debug.Trace"
, "Foreign"
, "Foreign.C"
, "Foreign.C.Error"
, "Foreign.C.String"
, "Foreign.C.Types"
, "Foreign.Concurrent"
, "Foreign.ForeignPtr"
, "Foreign.ForeignPtr.Safe"
, "Foreign.ForeignPtr.Unsafe"
, "Foreign.Marshal"
, "Foreign.Marshal.Alloc"
, "Foreign.Marshal.Array"
, "Foreign.Marshal.Error"
, "Foreign.Marshal.Pool"
, "Foreign.Marshal.Safe"
, "Foreign.Marshal.Unsafe"
, "Foreign.Marshal.Utils"
, "Foreign.Ptr"
, "Foreign.Safe"
, "Foreign.StablePtr"
, "Foreign.Storable"
, "GHC.Arr"
, "GHC.Base"
, "GHC.Char"
, "GHC.Conc"
, "GHC.Conc.IO"
, "GHC.Conc.Signal"
, "GHC.Conc.Sync"
, "GHC.ConsoleHandler"
, "GHC.Constants"
, "GHC.Desugar"
, "GHC.Enum"
, "GHC.Environment"
, "GHC.Err"
, "GHC.Exception"
, "GHC.Exts"
, "GHC.Fingerprint"
, "GHC.Fingerprint.Type"
, "GHC.Float"
, "GHC.Float.ConversionUtils"
, "GHC.Float.RealFracMethods"
, "GHC.Foreign"
, "GHC.ForeignPtr"
, "GHC.GHCi"
, "GHC.Generics"
, "GHC.IO"
, "GHC.IO.Buffer"
, "GHC.IO.BufferedIO"
, "GHC.IO.Device"
, "GHC.IO.Encoding"
, "GHC.IO.Encoding.CodePage"
, "GHC.IO.Encoding.Failure"
, "GHC.IO.Encoding.Iconv"
, "GHC.IO.Encoding.Latin1"
, "GHC.IO.Encoding.Types"
, "GHC.IO.Encoding.UTF16"
, "GHC.IO.Encoding.UTF32"
, "GHC.IO.Encoding.UTF8"
, "GHC.IO.Exception"
, "GHC.IO.FD"
, "GHC.IO.Handle"
, "GHC.IO.Handle.FD"
, "GHC.IO.Handle.Internals"
, "GHC.IO.Handle.Text"
, "GHC.IO.Handle.Types"
, "GHC.IO.IOMode"
, "GHC.IOArray"
, "GHC.IORef"
, "GHC.IP"
, "GHC.Int"
, "GHC.List"
, "GHC.MVar"
, "GHC.Num"
, "GHC.PArr"
, "GHC.Pack"
, "GHC.Profiling"
, "GHC.Ptr"
, "GHC.Read"
, "GHC.Real"
, "GHC.ST"
, "GHC.STRef"
, "GHC.Show"
, "GHC.Stable"
, "GHC.Stack"
, "GHC.Stats"
, "GHC.Storable"
, "GHC.TopHandler"
, "GHC.TypeLits"
, "GHC.Unicode"
, "GHC.Weak"
, "GHC.Word"
, "Numeric"
, "System.CPUTime"
, "System.Console.GetOpt"
, "System.Environment"
, "System.Exit"
, "System.IO"
, "System.IO.Error"
, "System.IO.Unsafe"
, "System.Info"
, "System.Mem"
, "System.Mem.StableName"
, "System.Mem.Weak"
, "System.Posix.Internals"
, "System.Posix.Types"
, "System.Timeout"
, "Text.ParserCombinators.ReadP"
, "Text.ParserCombinators.ReadPrec"
, "Text.Printf"
, "Text.Read"
, "Text.Read.Lex"
, "Text.Show"
, "Text.Show.Functions"
, "Unsafe.Coerce"
, "GHC.IO.Encoding.CodePage.API"
, "GHC.IO.Encoding.CodePage.Table"
, "GHC.Conc.Windows"
, "GHC.Windows"
, "GHC.Event"
]
|
silkapp/base-noprelude
|
Generate.hs
|
bsd-3-clause
| 4,502 | 0 | 17 | 860 | 768 | 476 | 292 | 200 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE OverloadedStrings #-}
module WithTiming.Program
( Command(..)
, InterpretedCommand(..)
, Key
, interpretPure
, Program
, readPrevious
, predict
, beginTimer
, execute
, secondsSince
, inform
, writeResult
) where
import Control.Monad.Free (Free (..), liftF)
import qualified Data.Text as T
import System.Exit (ExitCode (..))
type Key = String
-- | A type representing steps in a typical program execution. Used in the
-- Program monad via functions with names that correspond to the type
-- constructors here.
--
-- The 'time' parameter refers to the type representing timing information.
--
-- The 'next' parameter is recursive and should be of no immediate concern
-- except for the Functor implementation.
data Command time next =
ReadPrevious Key (Maybe Integer -> next)
-- ^ Read a (possibly missing) Integer value representing the duration the last time 'key' ran.
| Predict (Maybe Integer) next
-- ^ Output a prediction for the current run, which may not have a precedent.
| BeginTimer (time -> next)
-- ^ Return an object referring to the start time of the command.
| Execute T.Text (ExitCode -> next)
-- ^ Perform the shell action, returning a result in the required type.
| SecondsSince time (Integer -> next)
-- ^ Determine the number of seconds that have passed since the 'time' object was created.
| Inform String next
-- ^ Display the given string to the user.
| WriteResult Key Integer next
-- ^ Record the results of the current run.
deriving (Functor)
-- | A (Free) monad for constructing a sequence of Commands.
-- The type is parameterized here by 'shellResult', but instances will also be
-- parameterized over the Pure return type from Free.
type Program time = Free (Command time)
-- | Read a (possibly missing) Integer value representing the duration the last time 'key' ran.
readPrevious :: Key -> Program time (Maybe Integer)
readPrevious key = liftF (ReadPrevious key id)
-- | Output a prediction for the current run, which may not have a precedent.
predict :: Maybe Integer -> Program time ()
predict mdur = liftF (Predict mdur ())
-- | Return an object referring to the start time of the command.
beginTimer :: Program time time
beginTimer = liftF (BeginTimer id)
-- | Perform the shell action, returning a result in the required type.
execute :: T.Text -> Program time ExitCode
execute shell = liftF (Execute shell id)
-- | Determine the number of seconds that have passed since the 'time' object was created.
secondsSince :: time -> Program time Integer
secondsSince time = liftF (SecondsSince time id)
-- | Display the given string to the user.
inform :: String -> Program time ()
inform msg = liftF (Inform msg ())
-- | Record the results of the current run.
writeResult :: Key -> Integer -> Program time ()
writeResult key duration = liftF (WriteResult key duration ())
-- | A minimal target for interpreting a Program, useful for tests and
-- debugging. Represents concrete actions that are the result of
-- (hypothetically) running the Program.
data InterpretedCommand =
ReadingPrevious Key
| Predicting (Maybe Integer)
| BeginningTimer
| Executing T.Text
| CountingSeconds Integer
| Informing String
| WritingResult Key Integer
| Returning ExitCode
deriving (Show, Eq)
-- | An example interpreter that reduces the commands to ['InterpretedCommand'].
-- intended to be used in testing and debugging. Uses mocked values instead of
-- side-effects.
interpretPure :: (Maybe Integer) -- ^ The result of looking up a key
-> Integer -- ^ The number of seconds returned by 'secondsSince'
-> ExitCode -- ^ The result of the shell command
-> Program () ExitCode -- ^ A program that uses () for its time type and returns an 'ExitCode'
-> [InterpretedCommand] -- ^ A list of concrete actions resulting from the program.
interpretPure keyLookup seconds exitCode prog = case prog of
Free (ReadPrevious key g) -> ReadingPrevious key : recur (g keyLookup)
Free (Predict mdur next) -> Predicting mdur : recur next
Free (BeginTimer g) -> BeginningTimer : recur (g ())
Free (Execute shell g) -> Executing shell : recur (g exitCode)
Free (SecondsSince time g) -> CountingSeconds seconds : recur (g seconds)
Free (Inform msg next) -> Informing msg : recur next
Free (WriteResult key dur next) -> WritingResult key dur : recur next
Pure r -> [Returning r]
where
recur = interpretPure keyLookup seconds exitCode
|
holguinj/with-timing
|
src/WithTiming/Program.hs
|
bsd-3-clause
| 4,610 | 0 | 12 | 968 | 867 | 468 | 399 | 68 | 8 |
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TemplateHaskell #-}
-----------------------------------------------------------------------------
-- |
-- Module : Diagrams.TwoD.Grid
-- Copyright : (c) 2014 Dominic Steinitz
-- License : BSD-style (see LICENSE)
-- Maintainer : [email protected]
--
-- <<diagrams/src_Diagrams_TwoD_Grid_example1.svg#diagram=example1&height=300&width=200>>
--
-- The example above is created by the code below which generates a
-- grid, puts points on the interior and the boundary, draws dashed
-- lines around the points to which we wish to draw attention and
-- annotates the points of interest with some text.
--
-- > {-# LANGUAGE FlexibleContexts #-}
-- > {-# LANGUAGE MultiParamTypeClasses #-}
-- >
-- > import Diagrams.TwoD.Grid
-- > import Diagrams.TwoD.Text
-- >
-- > example :: (Renderable (Text Double) b, Renderable (Path V2 Double) b) =>
-- > Int -> Int -> QDiagram b V2 Double Any
-- > example n m =
-- >
-- > (gridWithHalves n m) #
-- >
-- > -- Put points on the boundary
-- > bndPts [ (1 :: Int, m + 1 ) | m <- [0,2..2 * m] ] #
-- > bndPts [ (n + 1, 1 :: Int ) | n <- [0,2..2 * n] ] #
-- > bndPts [ (2 * n + 1, m + 1 ) | m <- [0,2..2 * m] ] #
-- > bndPts [ (n + 1, 2 * m + 1 ) | n <- [0,2..2 * n] ] #
-- >
-- > intPts [ (n + 1, m + 1) | n <- [2,4..2 * n - 1] :: [Int]
-- > , m <- [2,4..2 * m - 1] :: [Int] ] #
-- >
-- > selectedLines (2 * n - 3) (3 :: Int) #
-- >
-- > ann (2 * n - 1) (1 :: Int) red #
-- > ann (2 * n + 1) (3 :: Int) red #
-- > ann (2 * n - 1) (3 :: Int) blue #
-- > ann (2 * n - 3) (3 :: Int) blue #
-- > ann (2 * n - 1) (5 :: Int) blue
-- >
-- > where
-- >
-- > ann n m c = annotate ("u_" ++ show (n `div` 2) ++ show (m `div` 2)) txtPt c n m
-- >
-- > selectedLines n m = gridLines $ selectedPairs n m
-- >
-- > selectedPairs n m = let pts = selectedList n m
-- > in zip pts (tail pts)
-- >
-- > selectedList n m = [ (n - 1, m - 1)
-- > , (n - 1, m + 1)
-- > , (n + 1, m + 1)
-- > , (n + 1, m + 3)
-- > , (n + 3, m + 3)
-- > , (n + 3, m + 1)
-- > , (n + 5, m + 1)
-- > , (n + 5, m - 1)
-- > , (n + 3, m - 1)
-- > , (n + 3, m - 3)
-- > , (n + 1, m - 3)
-- > , (n + 1, m - 1)
-- > , (n - 1, m - 1)
-- > ]
-- >
-- > txtPt t = circle cSize # opacity 0.0 # lw none
-- > ===
-- > text t # fontSize (local 0.06)
-- >
-- > intPts = placeDiagramOnGrid (circle (cSize / 2) # fc blue # opacity 0.5 # lw none)
-- > bndPts = placeDiagramOnGrid (circle (cSize / 2) # fc red # opacity 0.5 # lw none)
-- >
-- > cSize :: Double
-- > cSize = 0.03
-- >
-- > example1 = example 5 5
--
-----------------------------------------------------------------------------
module Diagrams.TwoD.Grid (
gridWithHalves
, gridWithHalves'
, annotate
, gridLine
, gridLines
, placeDiagramOnGrid
) where
import Diagrams.Prelude
import Data.List
import Data.List.Split
import Data.Typeable
data GridOpts n
= GridOpts
{ _gridLineWidth :: Measure n
, _gridYColour :: Colour Double
, _gridXColour :: Colour Double
, _gridLL :: V2 n
, _gridLR :: V2 n
, _gridUL :: V2 n
}
instance (Floating n, Ord n) => Default (GridOpts n) where
def = GridOpts
{ _gridLineWidth = thin
, _gridXColour = red
, _gridYColour = blue
, _gridLL = r2 (1.0, 1.0)
, _gridLR = r2 (2.0, 1.0)
, _gridUL = r2 (1.0, 2.0)
}
data HighlightLineOpts n
= HighlightLineOpts
{ _highLightLineColour :: Colour Double
, _highLightLineWidth :: Measure n
, _highLightLineDashingOnOff :: [Measure n]
, _highLightLineDashingOffset :: Measure n
}
instance (Floating n, Ord n) => Default (HighlightLineOpts n) where
def = HighlightLineOpts
{ _highLightLineColour = black
, _highLightLineWidth = medium
, _highLightLineDashingOnOff = [normalized 0.03, normalized 0.03]
, _highLightLineDashingOffset = output 0
}
makeLenses ''GridOpts
makeLenses ''HighlightLineOpts
-- | Name a point by grid co-ordinates.
tick :: (Floating n, Ord n)
=> (Int, Int) -> QDiagram b V2 n Any
tick (n, m) = pointDiagram origin # named (n, m)
-- | @gridWithHalves'@ with default opts.
gridWithHalves :: (Renderable (Path V2 n) b, TypeableFloat n)
=> Int -> Int -> QDiagram b V2 n Any
gridWithHalves = gridWithHalves' def
-- | Create a n by m grid. Diagrams can be placed on either the grid
-- points themselves or on points half way between grid points. The
-- latter includes points a half grid length outside of the grid
-- itself.
gridWithHalves' :: (Renderable (Path V2 n) b, TypeableFloat n)
=> GridOpts n -> Int -> Int -> QDiagram b V2 n Any
gridWithHalves' opts n m =
(mconcat lineXs # translate (r2 (llx, lly))) <>
(mconcat lineYs # translate (r2 (llx, lly))) <>
(intersections # translate (r2 (llx - delta2X, luy + delta2Y)))
where
llx :& lly = coords (opts^.gridLL)
lrx :& _ = coords (opts^.gridLR)
_ :& luy = coords (opts^.gridUL)
deltaX = (lrx - llx) / fromIntegral n
deltaY = (luy - lly) / fromIntegral m
delta2X = (lrx - llx) / fromIntegral (2 * n)
delta2Y = (luy - lly) / fromIntegral (2 * m)
ns = [0..n]
ms = [0..m]
n2s = [0..2 * n + 2]
m2s = [0..2 * m + 2]
xs = map ((* deltaX) . fromIntegral) ns
ys = map ((* deltaY) . fromIntegral) ms
lineXs = Prelude.map lineX ys
lineYs = Prelude.map lineY xs
lineX y = fromOffsets [(opts^.gridLR) ^-^ (opts^.gridLL)] #
translate (r2 (0.0, y)) #
lc (opts^.gridXColour) #
lw (opts^.gridLineWidth)
lineY x = fromOffsets [(opts^.gridUL) ^-^ (opts^.gridLL)] #
translate (r2 (x, 0.0)) #
lc (opts^.gridYColour) #
lw (opts^.gridLineWidth)
intersections = hcat $
intersperse (strutX delta2X) $
map vcat $
map (intersperse (strutY delta2Y)) $
chunksOf (2 * m + 1 + 2) [ tick (n, m) | n <- n2s, m <- m2s ]
-- | Place a diagram on a grid (which is itself a diagram) at all the
-- co-ordinates specified.
placeDiagramOnGrid :: (IsName nm, Floating n, Ord n) =>
QDiagram b V2 n Any -> [nm] -> QDiagram b V2 n Any -> QDiagram b V2 n Any
placeDiagramOnGrid d = flip $ foldr (\n -> withName n (atop . place d . location))
annotate :: (Floating n, Ord n, Typeable n) =>
String ->
(String -> QDiagram b V2 n Any) ->
Colour Double ->
Int ->
Int ->
QDiagram b V2 n Any ->
QDiagram b V2 n Any
annotate s txtPt h n m =
withName (n, m) (atop . place (addText s h) . location)
where
addText s h = txtPt s # fc h
-- | Draw a line between two named points on the grid.
gridLine :: (IsName a, IsName b,
Renderable (Path V2 n) c, TypeableFloat n) =>
a -> b -> QDiagram c V2 n Any -> QDiagram c V2 n Any
gridLine = gridLine' def
-- | Draw a line between two named points on the grid.
gridLine' :: (IsName a, IsName b,
Renderable (Path V2 n) c, TypeableFloat n) =>
HighlightLineOpts n -> a -> b -> QDiagram c V2 n Any -> QDiagram c V2 n Any
gridLine' opts u v =
withName u $ \x ->
withName v $ \y ->
atop ((location x ~~ location y) #
lc (opts^.highLightLineColour) #
lw (opts^.highLightLineWidth) #
dashing (opts^.highLightLineDashingOnOff) (opts^.highLightLineDashingOffset))
-- | Draw lines between a list of pairs of named points on the grid.
gridLines :: (Renderable (Path V2 n) c, TypeableFloat n,
IsName a, IsName b) =>
[(a, b)] -> QDiagram c V2 n Any -> QDiagram c V2 n Any
gridLines xs = foldr (.) id [ gridLine x y | (x, y) <- xs ]
|
kuribas/diagrams-contrib
|
src/Diagrams/TwoD/Grid.hs
|
bsd-3-clause
| 8,557 | 0 | 16 | 2,836 | 1,903 | 1,045 | 858 | 119 | 1 |
{-# LANGUAGE TypeFamilies #-}
-- | See "Control.Ether.Abbr".
module Control.Ether.Implicit.Abbr (R, W, S, E) where
import Control.Ether.Abbr (ReifyAbbr)
import Control.Monad.Ether.Implicit
-- | Denotes 'MonadReader'.
data R r
type instance ReifyAbbr (R r) m = MonadReader r m
-- | Denotes 'MonadWriter'.
data W w
type instance ReifyAbbr (W w) m = MonadWriter w m
-- | Denotes 'MonadState'.
data S s
type instance ReifyAbbr (S s) m = MonadState s m
-- | Denotes 'MonadExcept'.
data E e
type instance ReifyAbbr (E e) m = MonadExcept e m
|
bitemyapp/ether
|
src/Control/Ether/Implicit/Abbr.hs
|
bsd-3-clause
| 543 | 0 | 6 | 96 | 154 | 93 | 61 | -1 | -1 |
module Lib
(serveRandomVimGif) where
import VimGif (serveRandomVimGif)
|
clarkenciel/slackers
|
src/Lib.hs
|
bsd-3-clause
| 75 | 0 | 5 | 11 | 17 | 11 | 6 | 3 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving, MultiParamTypeClasses,
FlexibleInstances #-}
module Main where
import Control.Monad.State
import Control.Monad.Trans
import HarkerIRC.Client
import HarkerIRC.Types
newtype EchoMonadT m a = EchoMonad (StateT Bool (HarkerClientT m) a)
deriving (Monad, MonadIO, Functor)
instance (Monad m) => MonadState Bool (EchoMonadT m) where
get = EchoMonad $ get
put = EchoMonad . put
state = EchoMonad . state
instance MonadTrans EchoMonadT where
lift = EchoMonad . lift . lift
instance HarkerClientMonad (EchoMonadT IO) where
clientLift = EchoMonad . lift
type EchoMonad a = EchoMonadT IO a
runEchoMonad :: EchoMonad () -> IO ()
runEchoMonad (EchoMonad s) = runHarkerClient (evalStateT s False)
main = runPlugin "echo" "0.1.0.0" echo runEchoMonad
echo :: EchoMonad ()
echo = do
msg <- getMsg
echo <- get
liftIO $ putStrLn ("got msg: " ++ msg)
if msg == "!echo" then ifauth (toggle echo)
else if msg == "!help" then sendReply "!echo: enable/disable echoing"
else if echo && head msg /= '!' then sendReply msg
else return ()
toggle :: Bool -> EchoMonad ()
toggle e = modify not >> sendReply ("echo " ++ (if e then "disabled"
else "enabled"))
|
mikeyhc/harkerbot
|
src/Plugins/EchoPlugin.hs
|
bsd-3-clause
| 1,336 | 0 | 12 | 348 | 396 | 208 | 188 | 33 | 4 |
{-# LANGUAGE TemplateHaskell #-}
--------------------------------------------------------------------------------
module Application where
--------------------------------------------------------------------------------
import Control.Lens
import Snap.Snaplet
import Snap.Snaplet.Heist
--------------------------------------------------------------------------------
data App = App
{ _heist :: Snaplet (Heist App)
}
makeLenses ''App
instance HasHeist App where
heistLens = subSnaplet heist
type AppHandler = Handler App App
|
stesta/GameOfLife
|
src/Application.hs
|
bsd-3-clause
| 545 | 0 | 11 | 64 | 83 | 47 | 36 | 11 | 0 |
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : HaskDeep.Computation
-- Copyright : Mauro Taraborelli 2012
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- Computes hashes traversing recursively through a directory structure.
-- Uses a list of known hashes to audit a set of files.
--
-- Internal module.
module HaskDeep.ComputationMode
(
-- * Computation modes
ComputationMode (..)
,md5hash
,sha1hash
,sha256hash
,skein512hash
)
where
import Crypto.Hash.CryptoAPI (MD5, SHA1, SHA256, Skein512_512)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base16 as BB16
import qualified Data.Serialize as S
import qualified Data.Text as T
-- | Algorithm to compute hash.
data ComputationMode a = ComputationMode
{ symbol :: T.Text -- ^ Computation mode symbol
, runComputation :: a -> ByteString -- ^ Computation function
}
-- | MD5 computation.
md5hash :: ComputationMode MD5
md5hash = ComputationMode "md5" (BB16.encode . S.encode)
-- | SHA1 computation.
sha1hash :: ComputationMode SHA1
sha1hash = ComputationMode "sha1" (BB16.encode . S.encode)
-- | SHA256 computation.
sha256hash :: ComputationMode SHA256
sha256hash = ComputationMode "sha256" (BB16.encode . S.encode)
-- | Skein512 computation.
skein512hash :: ComputationMode Skein512_512
skein512hash = ComputationMode "skein512" (BB16.encode . S.encode)
|
maurotrb/haskdeep
|
src/HaskDeep/ComputationMode.hs
|
bsd-3-clause
| 1,518 | 0 | 9 | 304 | 248 | 155 | 93 | 24 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Curve.Const
-- Copyright : (c) 2011 Michael Sloan
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : Michael Sloan <[email protected]>
-- Stability : experimental
-- Portability : GHC only
--
-- 1D constant curve representation. Very trivial instances, represents
-- invariably returning the same value. Useful as a curve to convert
-- from.
{-# LANGUAGE TypeFamilies, FlexibleContexts, UndecidableInstances,
MultiParamTypeClasses #-}
module Data.Curve.Const where
import Data.Curve.Classes
import qualified Data.Curve.Interval as I
import Data.Curve.Util
import Numeric.Rounding
import Data.VectorSpace
data Const a = Const {constValue :: a} deriving (Show)
-- lift1
cmap :: (a -> a) -> Const a -> Const a
cmap f (Const x) = Const (f x)
-- lift2
czip :: (a -> a -> a) -> Const a -> Const a -> Const a
czip f (Const a) (Const b) = Const $ f a b
const2 f = flip $ const $ f
instance (Num a) => Curve (Const a) where
type Domain (Const a) = a
type Codomain (Const a) = a
at = const2 constValue
instance (IsFinite a) => IsFinite (Const a) where
isFinite = isFinite . constValue
instance (IsZero a) => IsZero (Const a) where
isZero = isZero . constValue
instance (Num a, Precision a) =>
FunctionBounds (Const a) where
type DomainBounds (Const a) = I.Interval a
type CodomainBounds (Const a) = I.Interval a
domain _ = I.unit
bounds _ = I.singleton . constValue
instance (Precision a) => Portionable (Const a) where
portion = const id
instance (Num a) => Offsetable (Const a) where
offset x = cmap (+x)
instance (Num a) => AdditiveGroup (Const a) where
zeroV = Const 0
(^+^) = czip (+)
negateV = cmap (negate)
instance (Num a) => VectorSpace (Const a) where
type Scalar (Const a) = a
(*^) s = cmap (s*)
instance (Num a) => Composable (Const a) (Const a) where
type CompositionType (Const a) (Const a) = Const a
compose = const2 $ id
|
mgsloan/curve
|
Data/Curve/Const.hs
|
bsd-3-clause
| 2,060 | 0 | 8 | 445 | 676 | 366 | 310 | -1 | -1 |
module QueryArrow.Gen where
configFilePath :: String
configFilePath = "../QueryArrow-gen/tdb-plugin-gen-config.yaml"
|
xu-hao/QueryArrow
|
QueryArrow-gen/src/QueryArrow/Gen.hs
|
bsd-3-clause
| 118 | 0 | 4 | 10 | 16 | 10 | 6 | 3 | 1 |
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Data.List
import Data.Text.Lazy (unpack)
import Data.Text.Lazy.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Distribution.Package
import Distribution.PackageDescription
import Distribution.PackageDescription.Parse
import Distribution.Text
import Codec.Compression.GZip (decompress, compress)
import Codec.Archive.Tar as Tar hiding (unpack)
import Codec.Archive.Tar.Entry as Tar
import System.IO
import System.Environment
import System.Directory
----------------------------------------------------------------
-- Main
----------------------------------------------------------------
upload :: FilePath -> FilePath -> IO ()
upload tarball dir = do
-- Read tarball
bs <- case tarball of
"-" -> L.getContents
_ -> L.readFile tarball
-- Retrieve cabal file
let cabal = getCabal $ Tar.read $ decompress $ forceBS bs
descr = getDescription cabal
-- Build pathes
let pkg = package $ packageDescription descr
PackageName name = pkgName pkg
version = pkgVersion pkg
-- Pathes
dirPath = name ++ "/" ++ display version ++ "/"
indexPath = dirPath ++ name ++ ".cabal"
tarDir = dir ++ "/package/"
tarPath = tarDir ++ name ++ "-" ++ display version ++ ".tar.gz"
-- Append file to index
appendToTar (dir ++ "/" ++ "00-index.tar.gz") indexPath cabal
-- Write file to directory
createDirectoryIfMissing True tarDir
L.writeFile tarPath bs
main :: IO ()
main = do
args <- getArgs
case args of
["upload", sdist, dir] -> upload sdist dir
_ -> error "Unknow command"
----------------------------------------------------------------
-- Helpers
----------------------------------------------------------------
-- | Write compressed tarball
writeTar :: FilePath -> [Entry] -> IO ()
writeTar path = L.writeFile path . compress . Tar.write
-- | Read compressed tarball
readTar :: FilePath -> IO [Entry]
readTar path = do
bs <- L.readFile path
return $ unpackTar $ forceBS bs
-- withFile path ReadMode hreadTar
-- | Read compressed tarball from file handle
hreadTar :: Handle -> IO [Entry]
hreadTar h = (unpackTar . forceBS) `fmap` L.hGetContents h
unpackTar :: L.ByteString -> [Entry]
unpackTar
= foldEntries (:) [] (error . show)
. Tar.read
. decompress
-- | Append file to the gzip compressed tarball
appendToTar :: FilePath -> FilePath -> L.ByteString -> IO ()
appendToTar tar nm content = do
-- Create entry for tarball
let entry = case toTarPath False nm of
Left e -> error e
Right path -> fileEntry path content
f <- doesFileExist tar
case f of
-- No file.
False -> writeTar tar [entry]
-- Append to existing file
True -> do
es <- readTar tar
-- Check for duplication
case find ((==nm) . entryPath) es of
Just _ -> error "Duplicate file in the tarball"
Nothing -> return ()
-- Write everything
writeTar tar (entry : es)
-- | Retrieve cabal file from tarball as bytestring
getCabal :: Show a => Entries a -> L.ByteString
getCabal Done = error "No cabal file"
getCabal (Next entry rest)
| reverse (take 6 $ reverse $ entryPath entry) == ".cabal" =
case entryContent entry of
NormalFile bs _ -> bs
_ -> error "Cabal file must be normal file"
| otherwise = getCabal rest
getCabal (Fail s) = error $ "Invalid tarball: " ++ show s
-- | Extract description from package
getDescription :: L.ByteString -> GenericPackageDescription
getDescription cabal =
case parsePackageDescription $ unpack $ decodeUtf8With lenientDecode cabal of
ParseOk _ x -> x
err -> error $ "Invalid cabal file: " ++ show err
forceBS :: L.ByteString -> L.ByteString
forceBS lazy = L.fromChunks [bs `seq` bs]
where
bs = B.concat $ L.toChunks lazy
|
Shimuuar/lackage
|
lackage.hs
|
bsd-3-clause
| 4,029 | 0 | 16 | 959 | 1,074 | 555 | 519 | 83 | 4 |
{-# OPTIONS -fno-warn-type-defaults #-}
--------------------------------------------------------------------------------
-- |
-- Module : Read
-- Copyright : (c) 2009 Sean Leather
-- License : BSD3
--
-- Maintainer : [email protected]
--
-- Tests for Text.XFormat.Read.
--------------------------------------------------------------------------------
module Read (test) where
--------------------------------------------------------------------------------
import Text.XFormat.Read
--------------------------------------------------------------------------------
test :: Bool
test = and
[ testBasic
, testClasses
, testRecursive
, testTuples
]
testBasic :: Bool
testBasic = and
[ readsf Int "5abc" == [(5, "abc")]
, readsf 'a' "5" == []
, readsf 'a' "a" == [('a', "")]
, readf "Hello" "Hello" == Just "Hello"
, readf "Hello" "Goodbye" == Nothing
, readf (Int % "." % Int ) "123.456" == Just (123 % "." % 456)
, readf (Integer % "." % Integer ) "123.456" == Just (123 % "." % 456)
]
testClasses :: Bool
testClasses = and
[ readf Read "\"34\"" == Just "34"
, readf Num "3.4" == Just 3.4
]
testRecursive :: Bool
testRecursive = and
[ readf (Char % Integer % Space % Float % Space % Double % String) "~99 9.9 0.3" ==
Just ('~' :%: (99 :%: (" " :%: (9.9 :%: (" " :%: (0.3 :%: ""))))))
, readf (Wrap '(' Int ')') "(1)" == Just ('(' % 1 % ')')
, readf (Maybe Int) "1" == Just (Just 1)
, readf (Maybe Int) "a" == Just Nothing
, readf (Choice ['(',')']) ")" == Just ')'
, readf (Either Char Int) "1" == Just (Left '1')
, readf (EitherL Int Char) "1" == Just (Left 1)
, readf (Either Int Char) "a1" == Just (Right 'a')
]
testTuples :: Bool
testTuples = and
[ readf (Char, Char) "ab" == Just ('a', 'b')
, readf (Char, Char, Char) "abc" == Just ('a', 'b', 'c')
, readf (Char, Char, Char, Char) "abcd" == Just ('a', 'b', 'c', 'd')
, readf (Char, Char, Char, Char, Char) "abcde" == Just ('a', 'b', 'c', 'd', 'e')
, readf (Char, Char, Char, Char, Char, Char) "abcdef" == Just ('a', 'b', 'c', 'd', 'e', 'f')
, readf (Char, Char, Char, Char, Char, Char, Char) "abcdefg" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g')
, readf (Char, Char, Char, Char, Char, Char, Char, Char) "abcdefgh" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h')
, readf (Char, Char, Char, Char, Char, Char, Char, Char, Char) "abcdefghi" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i')
, readf (Char, Char, Char, Char, Char, Char, Char, Char, Char, Char) "abcdefghij" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j')
, readf (Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char) "abcdefghijk" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k')
, readf (Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char) "abcdefghijkl" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l')
, readf (Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char) "abcdefghijklm" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm')
, readf (Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char) "abcdefghijklmn" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n')
, readf (Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char) "abcdefghijklmno" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o')
{-
-- More than 15 is not yet supported.
, readf (Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char, Char) "abcdefghijklmnop" == Just ('a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p')
-}
]
|
spl/xformat
|
test/Read.hs
|
bsd-3-clause
| 3,744 | 0 | 20 | 739 | 1,499 | 879 | 620 | 49 | 1 |
module Data.Digest.WebMoney.Algebra where
import Data.Bits (Bits, bitSize, shiftL, shiftR, testBit, (.&.),
(.|.))
import Data.Int (Int32, Int64)
import Data.Word (Word32, Word64)
import Control.Lens (ix, (&), (.~))
import Data.Vector (Vector, singleton, (!))
import qualified Data.Vector as V (init, last, length, null, replicate, take,
(++))
longMask :: Int64
longMask = 0xFFFFFFFF
intSize :: Int
intSize = 32
logicalShiftR :: Integral a => a -> Int -> a
logicalShiftR x i = fromIntegral ((fromIntegral x :: Word64) `shiftR` i)
logicalShiftRight :: Int32 -> Int -> Int32
logicalShiftRight x i = fromIntegral ((fromIntegral x :: Word32) `shiftR` i)
getBitsNumber :: Bits a => a -> Int
getBitsNumber x = intSize - numberOfLeadingZeros x
numberOfLeadingZeros :: Bits a => a -> Int
numberOfLeadingZeros x = length $ takeWhile (not . testBit x) [size - 1, size - 2 .. 0]
where size = bitSize x
getBitsCount :: (Bits a, Num a) => Vector a -> Int
getBitsCount xs = ( vLenght - 1 ) * intSize + getBitsNumber ( xs ! (vLenght - 1) )
where vLenght = significance xs
compareLists :: Vector Int32 -> Vector Int32 -> Ordering
compareLists lhs rhs
| lhsLenght > rhsLenght = GT
| lhsLenght < rhsLenght = LT
| otherwise = comp (V.take lhsLenght lhs) (V.take lhsLenght rhs)
where
lhsLenght = significance lhs
rhsLenght = significance rhs
comp :: Vector Int32 -> Vector Int32 -> Ordering
comp ls rs
| V.null ls || V.null rs = EQ
| lb > rb = GT
| lb < rb = LT
| otherwise = comp (V.init ls) (V.init rs)
where
lb = fromIntegral (V.last ls) .&. longMask
rb = fromIntegral (V.last rs) .&. longMask
significance :: (Eq a, Bits a, Num a) => Vector a -> Int
significance xs
| V.null xs = 0
| V.last xs == 0 = significance ( V.init xs )
| otherwise = V.length xs
shift :: Vector Int32 -> Int -> Vector Int32
shift lhs rhs
| outWordsCount <= 0 = singleton 0
| shiftBits == 0 && rhs > 0 = V.take shiftWords r0 V.++ V.take (outWordsCount - shiftWords) lhs
| rhs > 0 =
let (res, carry) = foldl shRight (r0, 0) [0 .. inWordsCount - 1]
in if inWordsCount - 1 + shiftWords < outWordsCount
then res & ix ( inWordsCount + shiftWords ) .~ (res ! (inWordsCount + shiftWords) .|. carry)
else res
| shiftBits == 0 = error "3"
| otherwise =
let carry = if outWordsCount + shiftWords < inWordsCount
then (lhs ! (outWordsCount + shiftWords)) `shiftL` ( intSize - shiftBits)
else 0
in fst $ foldl shLeft (r0, carry) [inWordsCount - 1, inWordsCount - 2 .. 0]
where
shiftBits, shiftWords, inBitsCount, inWordsCount, outBitsCount, outWordsCount :: Int
shiftBits = abs rhs `mod` intSize
shiftWords = abs rhs `div` intSize
inBitsCount = getBitsCount lhs
inWordsCount = inBitsCount `div` intSize + (if inBitsCount `mod` intSize > 0 then 1 else 0)
outBitsCount = inBitsCount + rhs
outWordsCount = outBitsCount `div` intSize + (if outBitsCount `mod` intSize > 0 then 1 else 0)
r0 = V.replicate (max inWordsCount outWordsCount) 0
shRight, shLeft :: (Vector Int32, Int32) -> Int -> (Vector Int32, Int32)
shRight (res, carry) pos = ( res & ix ( pos + shiftWords ) .~ val, nextCarry )
where
temp = lhs ! pos
val = ( temp `shiftL` shiftBits ) .|. carry
nextCarry = temp `logicalShiftRight` ( intSize - shiftBits )
shLeft (res, carry) pos = ( res & ix ( pos + shiftWords ) .~ val, nextCarry )
where
temp = lhs ! (pos + shiftWords)
val = (temp `logicalShiftRight` shiftBits) .|. carry
nextCarry = temp `shiftL` ( intSize - shiftBits )
shiftRight :: Vector Int32 -> Vector Int32
shiftRight value = fst $ foldl right (value, 0) [len-1, len-2..0]
where
len = significance value
right :: (Vector Int32, Int64) -> Int -> (Vector Int32, Int64)
right (v, carry) pos = ( v & ix pos .~ fromIntegral val, nextCarry )
where
temp, nextCarry, val :: Int64
temp = fromIntegral ( v ! pos) .&. longMask
nextCarry = (temp .&. 1) `shiftL` ( intSize - 1) .&. longMask
val = ((temp `logicalShiftR` 1) .|. carry ) .&. longMask
sub :: Vector Int32 -> Vector Int32 -> Vector Int32
sub lhs rhs
| lhsLength < rhsLength = error "Difference should not be negative."
| otherwise = modulo $ rest subscribed
where
lhsLength = significance lhs
rhsLength = significance rhs
modulo :: (Vector Int32, Int32) -> Vector Int32
modulo (_, 1) = error "Difference should not be negative."
modulo (l, _) = l
subscribed :: (Vector Int32, Int32)
subscribed = foldl substr (lhs, 0) [0..rhsLength - 1]
where
substr :: (Vector Int32, Int32) -> Int -> (Vector Int32, Int32)
substr (l, borrow) pos = ( l & ix pos .~ fromIntegral temp, nBorrow )
where
temp = (fromIntegral ( l ! pos ) .&. longMask )
- (fromIntegral ( rhs ! pos ) .&. longMask )
- fromIntegral borrow
nBorrow = if temp .&. ( 1 `shiftL` intSize ) /= 0 then 1 else 0
rest :: (Vector Int32, Int32) -> (Vector Int32, Int32)
rest (ls, b) = foldl substr (ls, b) [rhsLength..lhsLength - 1]
where
substr :: (Vector Int32, Int32) -> Int -> (Vector Int32, Int32)
substr (l, borrow) pos = ( l & ix pos .~ fromIntegral temp, nBorrow )
where
temp = (fromIntegral ( l ! pos ) .&. longMask ) - fromIntegral borrow
nBorrow = if temp .&. ( 1 `shiftL` intSize ) /= 0 then 1 else 0
remainder :: Vector Int32 -> Vector Int32 -> Vector Int32
remainder lhs rhs = divide lhs rhs
where
rhsBitsCount = getBitsCount rhs -- check attemption to divide by zero
divide :: Vector Int32 -> Vector Int32 -> Vector Int32
divide l r
| LT == compareLists l r = l
| lhsBitsCount == 0 = l
| otherwise =
let temp' = if compareLists l temp == LT then shiftRight temp else temp
in divide ( subs l temp' ) r
where
lhsBitsCount = getBitsCount l
temp = shift r (lhsBitsCount - rhsBitsCount)
subs :: Vector Int32 -> Vector Int32 -> Vector Int32
subs l t =
if compareLists l t /= LT
then subs (sub l t) t
else l
resize :: Vector Int32 -> Int -> Vector Int32
resize v l
| l < 0 = error "Invalid value for length"
| vLength < l = v V.++ V.replicate (l - vLength) 0
| otherwise = V.take l v
where vLength = V.length v
normalize :: Vector Int32 -> Vector Int32
normalize x = resize x ( significance x )
|
superduper/wmsigner
|
src/Data/Digest/WebMoney/Algebra.hs
|
mit
| 6,933 | 0 | 17 | 2,115 | 2,577 | 1,363 | 1,214 | 134 | 5 |
-- so the amount of memory deallocated should be easy enough to do
-- INPUT
import Control.Monad
import Data.List
b2ints :: B.ByteString -> [Int]
b2ints = unfoldr (B.readInt . B.dropWhile (== ' '))
main :: IO ()
main = readLn >>= flip replicateM_ secondIO
secondIO :: IO ()
secondIO = do
B.getLine
str <- B.getLine
let list = b2ints str
print . fst $ ans list
-- Computation
ans list = foldr (\x y -> if x > (snd y) then ((fst y) + x - (snd y), x) else (fst y, x)) (0, 0) list
|
paramsingh/codechef-solutions
|
src/long/jun15/cbarg.hs
|
mit
| 534 | 0 | 13 | 153 | 217 | 114 | 103 | 13 | 2 |
module Lamdu.Data.Export.JSON.Migration.ToVersion8 (migrate) where
import qualified Control.Lens as Lens
import Control.Lens.Extended ((~~>))
import qualified Data.Aeson as Aeson
import Data.Aeson.Lens (_Object)
import qualified Data.UUID.Utils as UUIDUtils
import Lamdu.Data.Export.JSON.Migration.Common (migrateToVer)
import Lamdu.Prelude
migrateTerm :: Aeson.Value -> Either Text Aeson.Value
migrateTerm (Aeson.Object x) =
case (x ^. Lens.at "fromNomId", x ^. Lens.at "fromNomVal", x ^. Lens.at "id" <&> Aeson.fromJSON) of
(Just nomId, Just nomVal, Just (Aeson.Success i)) ->
migrateTerm nomVal <&>
\fixedVal ->
"id" ~~> Aeson.toJSON i
<> "applyArg" ~~> fixedVal
<> "applyFunc" ~~>
Aeson.Object
( "id" ~~> Aeson.toJSON (UUIDUtils.augment "to-version-8" i)
<> "fromNomId" ~~> nomId
)
(Nothing, Nothing, _) -> traverse migrateTerm x
_ -> Left "Malformed from-nom term"
<&> Aeson.Object
migrateTerm x = Right x
migrateObj :: Aeson.Object -> Either Text Aeson.Object
migrateObj x =
x
& Lens.ix "val" migrateTerm
>>= (Lens.ix "repl" . _Object . Lens.ix "val") migrateTerm
migrate :: Aeson.Value -> Either Text Aeson.Value
migrate = migrateToVer 8 ((traverse . _Object) migrateObj)
|
lamdu/lamdu
|
src/Lamdu/Data/Export/JSON/Migration/ToVersion8.hs
|
gpl-3.0
| 1,344 | 0 | 20 | 318 | 419 | 225 | 194 | 31 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.MC.GSL
-- Copyright : Copyright (c) 2010, Patrick Perry <[email protected]>
-- License : BSD3
-- Maintainer : Patrick Perry <[email protected]>
-- Stability : experimental
--
-- A monad and monad transformer for monte carlo computations built on top
-- of the functions in the GNU Scientific Library.
module Control.Monad.MC.GSL (
module Control.Monad.MC.GSLBase,
module Control.Monad.MC.Sample,
module Control.Monad.MC.Repeat,
) where
import Control.Monad.MC.GSLBase
import Control.Monad.MC.Sample
import Control.Monad.MC.Repeat
|
beni55/hs-monte-carlo
|
lib/Control/Monad/MC/GSL.hs
|
bsd-3-clause
| 668 | 0 | 5 | 97 | 66 | 51 | 15 | 7 | 0 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
-- Module : Data.Array.Accelerate.CUDA.Array.Table
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2009..2014] Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Trevor L. McDonell <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Data.Array.Accelerate.CUDA.Array.Table (
-- Tables for host/device memory associations
MemoryTable, new, lookup, malloc, free, insertUnmanaged, reclaim,
CRM, ContextId, contextId
) where
import Data.Functor
import Data.Proxy
import Data.IntMap.Strict ( IntMap )
import Control.Concurrent.MVar ( MVar, newMVar, withMVar, modifyMVar )
import Control.Exception ( catch, throwIO, bracket_ )
import Control.Monad.IO.Class ( MonadIO, liftIO )
import Control.Monad.Trans.Reader
import Foreign.Ptr ( ptrToIntPtr )
import Foreign.Storable ( Storable, sizeOf )
import Foreign.CUDA.Ptr ( DevicePtr )
import Prelude hiding ( lookup )
import Foreign.CUDA.Driver.Error
import qualified Foreign.CUDA.Driver as CUDA
import qualified Data.IntMap.Strict as IM
import Data.Array.Accelerate.Array.Data ( ArrayData, ptrsOfArrayData )
import Data.Array.Accelerate.Lifetime ( unsafeGetValue )
import Data.Array.Accelerate.Array.Memory ( RemoteMemory, PrimElt )
import Data.Array.Accelerate.CUDA.Context ( Context(..), push, pop )
import Data.Array.Accelerate.CUDA.Execute.Stream ( Stream )
import qualified Data.Array.Accelerate.CUDA.Debug as D
import qualified Data.Array.Accelerate.Array.Memory as M
import qualified Data.Array.Accelerate.Array.Memory.Table as MT
-- We leverage the memory table from the accelerate base package. However, we
-- actually need multiple tables. This is because every pointer has an
-- associated CUDA context. We could have pair every DevicePtr with its
-- context and just have a single table, but the MemoryTable API in the base
-- package assumes that remote pointers can be re-used, something that would
-- not be true for pointers allocated under different contexts.
type MemoryTable = MVar (IntMap (MT.MemoryTable DevicePtr))
-- Contexts
--
type ContextId = Int
-- Referencing arrays
-- ------------------
type CRM = ReaderT (Maybe Stream) IO
instance RemoteMemory CRM where
type RemotePointer CRM = DevicePtr
malloc n = ReaderT . const $ fmap Just (CUDA.mallocArray n) `catch` \(e :: CUDAException) ->
case e of
ExitCode OutOfMemory -> return Nothing
_ -> trace ("malloc failed with unknown error for: " ++ show n)
$ throwIO e
free = ReaderT . const . trace "free/explicit free" . CUDA.free
poke n dst ad = ReaderT $ \ms -> transfer "poke" (n * sizeOfPtr dst) $
CUDA.pokeArrayAsync n (CUDA.HostPtr $ ptrsOfArrayData ad) dst ms
peek n src ad = ReaderT $ \ms -> transfer "peek" (n * sizeOfPtr src) $
CUDA.peekArrayAsync n src (CUDA.HostPtr $ ptrsOfArrayData ad) ms
castPtr _ = CUDA.castDevPtr
totalMem = ReaderT . const $ snd <$> CUDA.getMemInfo
availableMem = ReaderT . const $ fst <$> CUDA.getMemInfo
chunkSize = return 1024
-- Create a MemoryTable.
new :: IO MemoryTable
new = trace "initialise CUDA memory table" $ newMVar IM.empty
-- Look for the device pointer corresponding to a given host-side array.
--
lookup :: PrimElt a b => Context -> MemoryTable -> ArrayData a -> IO (Maybe (DevicePtr b))
lookup !ctx !ref !arr = withMVar ref $ \ct ->
case IM.lookup (contextId ctx) ct of
Nothing -> trace "lookup/context not found" $ return Nothing
Just mt -> MT.lookup mt arr
-- Allocate a new device array to be associated with the given host-side array.
-- Has the same properties as `Data.Array.Accelerate.Array.Memory.Table.malloc`
malloc :: forall a b. PrimElt a b => Context -> MemoryTable -> ArrayData a -> Int -> IO (DevicePtr b)
malloc !ctx !ref !ad !n = do
mt <- modifyMVar ref $ \ct -> blocking $ do
case IM.lookup (contextId ctx) ct of
Nothing -> trace "malloc/context not found" $ insertContext ctx ct
Just mt -> return (ct, mt)
mp <- blocking $ MT.malloc mt ad n :: IO (Maybe (DevicePtr b))
case mp of
Nothing -> throwIO (ExitCode OutOfMemory)
Just p -> return p
-- Explicitly free an array in the MemoryTable. Has the same properties as
-- `Data.Array.Accelerate.Array.Memory.Table.free`
free :: PrimElt a b => Context -> MemoryTable -> ArrayData a -> IO ()
free !ctx !ref !arr = withMVar ref $ \ct ->
case IM.lookup (contextId ctx) ct of
Nothing -> message "free/context not found"
Just mt -> MT.free (Proxy :: Proxy CRM) mt arr
-- Record an association between a host-side array and a device memory area that was
-- not allocated by accelerate. The device memory will NOT be freed when the host
-- array is garbage collected.
--
insertUnmanaged :: PrimElt a b => Context -> MemoryTable -> ArrayData a -> DevicePtr b -> IO ()
insertUnmanaged !ctx !ref !arr !ptr = do
mt <- modifyMVar ref $ \ct -> blocking $ do
case IM.lookup (contextId ctx) ct of
Nothing -> trace "insertUnmanaged/context not found" $ insertContext ctx ct
Just mt -> return (ct, mt)
blocking $ MT.insertUnmanaged mt arr ptr
insertContext :: Context -> IntMap (MT.MemoryTable DevicePtr) -> CRM (IntMap (MT.MemoryTable DevicePtr), MT.MemoryTable DevicePtr)
insertContext ctx ct = do
mt <- MT.new (\p -> bracket_ (push ctx) pop (CUDA.free p))
return (IM.insert (contextId ctx) mt ct, mt)
-- Removing entries
-- ----------------
-- Initiate garbage collection and finalise any arrays that have been marked as
-- unreachable.
--
reclaim :: MemoryTable -> IO ()
reclaim ref = withMVar ref (blocking . mapM_ MT.reclaim . IM.elems)
-- Miscellaneous
-- -------------
{-# INLINE contextId #-}
contextId :: Context -> ContextId
contextId !ctx =
let CUDA.Context !p = unsafeGetValue (deviceContext ctx)
in fromIntegral (ptrToIntPtr p)
{-# INLINE sizeOfPtr #-}
sizeOfPtr :: forall a. Storable a => DevicePtr a -> Int
sizeOfPtr _ = sizeOf (undefined :: a)
{-# INLINE blocking #-}
blocking :: CRM a -> IO a
blocking = flip runReaderT Nothing
-- Debug
-- -----
{-# INLINE showBytes #-}
showBytes :: Int -> String
showBytes x = D.showFFloatSIBase (Just 0) 1024 (fromIntegral x :: Double) "B"
{-# INLINE trace #-}
trace :: MonadIO m => String -> m a -> m a
trace msg next = message msg >> next
{-# INLINE message #-}
message :: MonadIO m => String -> m ()
message msg = liftIO $ D.traceIO D.dump_gc ("gc: " ++ msg)
{-# INLINE transfer #-}
transfer :: String -> Int -> IO () -> IO ()
transfer name bytes action
= let showRate x t = D.showFFloatSIBase (Just 3) 1024 (fromIntegral x / t) "B/s"
msg gpuTime cpuTime = "gc: " ++ name ++ ": "
++ showBytes bytes ++ " @ " ++ showRate bytes gpuTime ++ ", "
++ D.elapsed gpuTime cpuTime
in
D.timed D.dump_gc msg Nothing action
|
flowbox-public/accelerate-cuda
|
Data/Array/Accelerate/CUDA/Array/Table.hs
|
bsd-3-clause
| 7,864 | 0 | 17 | 2,047 | 1,923 | 1,016 | 907 | 117 | 3 |
-- | A light-weight wrapper around @Network.Wai@ to provide easy conduit support.
module Network.Wai.Conduit
( -- * Request body
sourceRequestBody
-- * Response body
, responseSource
, responseRawSource
-- * Re-export
, module Network.Wai
) where
import Network.Wai
import Data.Conduit
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import Control.Monad (unless)
import Network.HTTP.Types
import Data.ByteString.Builder (Builder)
import qualified Data.Conduit.List as CL
-- | Stream the request body.
--
-- Since 3.0.0
sourceRequestBody :: MonadIO m => Request -> ConduitT () ByteString m ()
sourceRequestBody req =
loop
where
go = liftIO (getRequestBodyChunk req)
loop = do
bs <- go
unless (S.null bs) $ do
yield bs
loop
-- | Create an HTTP response out of a @Source@.
--
-- Since 3.0.0
responseSource :: Status -> ResponseHeaders -> ConduitT () (Flush Builder) IO () -> Response
responseSource s hs src = responseStream s hs $ \send flush ->
runConduit $ src .| CL.mapM_ (\mbuilder ->
case mbuilder of
Chunk b -> send b
Flush -> flush)
-- | Create a raw response using @Source@ and @Sink@ conduits.
--
-- This is an adapter to Wai's @responseRaw@ for conduits.
--
-- @Source@ and @Sink@ data are provided. The @Source@ is a byte conduit from
-- the client's socket. The @Sink@ is a byte conduit to the client's socket.
--
-- The @Response@ argument is a backup response. It is sent to the client if
-- the handler does not support @responseRaw@.
--
-- Since 3.0.0
responseRawSource :: (MonadIO m, MonadIO n)
=> (ConduitT () ByteString m () -> ConduitT ByteString Void n () -> IO ())
-> Response
-> Response
responseRawSource app =
responseRaw app'
where
app' recv send =
app src sink
where
src = do
bs <- liftIO recv
unless (S.null bs) $ do
yield bs
src
sink = CL.mapM_ $ liftIO . send
|
sordina/wai
|
wai-conduit/Network/Wai/Conduit.hs
|
bsd-2-clause
| 2,146 | 0 | 15 | 591 | 478 | 259 | 219 | 44 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Init
( findCabalFiles
, initProject
, InitOpts (..)
, SnapPref (..)
, Method (..)
, makeConcreteResolver
, tryDeprecatedPath
, getImplicitGlobalProjectDir
) where
import Control.Exception (assert)
import Control.Exception.Enclosed (catchAny, handleIO)
import Control.Monad (liftM, when)
import Control.Monad.Catch (MonadMask, MonadThrow, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Lazy as L
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IntMap
import qualified Data.Foldable as F
import Data.List (isSuffixOf,sort)
import Data.List.Extra (nubOrd)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (mapMaybe)
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Distribution.PackageDescription as C
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.Find
import Path.IO
import Stack.BuildPlan
import Stack.Constants
import Stack.Package
import Stack.Solver
import Stack.Types
import System.Directory (getDirectoryContents)
findCabalFiles :: MonadIO m => Bool -> Path Abs Dir -> m [Path Abs File]
findCabalFiles recurse dir =
liftIO $ findFiles dir isCabal (\subdir -> recurse && not (isIgnored subdir))
where
isCabal path = ".cabal" `isSuffixOf` toFilePath path
isIgnored path = toFilePath (dirname path) `Set.member` ignoredDirs
-- | Special directories that we don't want to traverse for .cabal files
ignoredDirs :: Set FilePath
ignoredDirs = Set.fromList
[ ".git"
, "dist"
, ".stack-work"
]
-- | Generate stack.yaml
initProject :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, HasGHCVariant env, MonadLogger m, MonadBaseControl IO m)
=> Path Abs Dir
-> InitOpts
-> m ()
initProject currDir initOpts = do
let dest = currDir </> stackDotYaml
dest' = toFilePath dest
exists <- fileExists dest
when (not (forceOverwrite initOpts) && exists) $
error ("Refusing to overwrite existing stack.yaml, " <>
"please delete before running stack init " <>
"or if you are sure use \"--force\"")
cabalfps <- findCabalFiles (includeSubDirs initOpts) currDir
$logInfo $ "Writing default config file to: " <> T.pack dest'
$logInfo $ "Basing on cabal files:"
mapM_ (\path -> $logInfo $ "- " <> T.pack (toFilePath path)) cabalfps
$logInfo ""
when (null cabalfps) $ error "In order to init, you should have an existing .cabal file. Please try \"stack new\" instead"
(warnings,gpds) <- fmap unzip (mapM readPackageUnresolved cabalfps)
sequence_ (zipWith (mapM_ . printCabalFileWarning) cabalfps warnings)
(r, flags, extraDeps) <- getDefaultResolver cabalfps gpds initOpts
let p = Project
{ projectPackages = pkgs
, projectExtraDeps = extraDeps
, projectFlags = flags
, projectResolver = r
, projectExtraPackageDBs = []
}
pkgs = map toPkg cabalfps
toPkg fp = PackageEntry
{ peValidWanted = Nothing
, peExtraDepMaybe = Nothing
, peLocation = PLFilePath $
case stripDir currDir $ parent fp of
Nothing
| currDir == parent fp -> "."
| otherwise -> assert False $ toFilePath $ parent fp
Just rel -> toFilePath rel
, peSubdirs = []
}
$logInfo $ "Selected resolver: " <> resolverName r
liftIO $ L.writeFile dest' $ B.toLazyByteString $ renderStackYaml p
$logInfo $ "Wrote project config to: " <> T.pack dest'
-- | Render a stack.yaml file with comments, see:
-- https://github.com/commercialhaskell/stack/issues/226
renderStackYaml :: Project -> B.Builder
renderStackYaml p =
case Yaml.toJSON p of
Yaml.Object o -> renderObject o
_ -> assert False $ B.byteString $ Yaml.encode p
where
renderObject o =
B.byteString "# For more information, see: https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md\n\n" <>
F.foldMap (goComment o) comments <>
goOthers (o `HM.difference` HM.fromList comments) <>
B.byteString
"# Control whether we use the GHC we find on the path\n\
\# system-ghc: true\n\n\
\# Require a specific version of stack, using version ranges\n\
\# require-stack-version: -any # Default\n\
\# require-stack-version: >= 0.1.4.0\n\n\
\# Override the architecture used by stack, especially useful on Windows\n\
\# arch: i386\n\
\# arch: x86_64\n\n\
\# Extra directories used by stack for building\n\
\# extra-include-dirs: [/path/to/dir]\n\
\# extra-lib-dirs: [/path/to/dir]\n"
comments =
[ ("resolver", "Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2)")
, ("packages", "Local packages, usually specified by relative directory name")
, ("extra-deps", "Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3)")
, ("flags", "Override default flag values for local packages and extra-deps")
, ("extra-package-dbs", "Extra package databases containing global packages")
]
goComment o (name, comment) =
case HM.lookup name o of
Nothing -> assert False mempty
Just v ->
B.byteString "# " <>
B.byteString comment <>
B.byteString "\n" <>
B.byteString (Yaml.encode $ Yaml.object [(name, v)]) <>
B.byteString "\n"
goOthers o
| HM.null o = mempty
| otherwise = assert False $ B.byteString $ Yaml.encode o
getSnapshots' :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m, MonadBaseControl IO m)
=> m (Maybe Snapshots)
getSnapshots' =
liftM Just getSnapshots `catchAny` \e -> do
$logError $
"Unable to download snapshot list, and therefore could " <>
"not generate a stack.yaml file automatically"
$logError $
"This sometimes happens due to missing Certificate Authorities " <>
"on your system. For more information, see:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/issues/234"
$logError ""
$logError "You can try again, or create your stack.yaml file by hand. See:"
$logError ""
$logError " https://github.com/commercialhaskell/stack/blob/release/doc/yaml_configuration.md"
$logError ""
$logError $ "Exception was: " <> T.pack (show e)
return Nothing
-- | Get the default resolver value
getDefaultResolver :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, HasGHCVariant env, MonadLogger m, MonadBaseControl IO m)
=> [Path Abs File] -- ^ cabal files
-> [C.GenericPackageDescription] -- ^ cabal descriptions
-> InitOpts
-> m (Resolver, Map PackageName (Map FlagName Bool), Map PackageName Version)
getDefaultResolver cabalfps gpds initOpts =
case ioMethod initOpts of
MethodSnapshot snapPref -> do
msnapshots <- getSnapshots'
names <-
case msnapshots of
Nothing -> return []
Just snapshots -> getRecommendedSnapshots snapshots snapPref
mpair <- findBuildPlan gpds names
case mpair of
Just (snap, flags) ->
return (ResolverSnapshot snap, flags, Map.empty)
Nothing -> throwM $ NoMatchingSnapshot names
MethodResolver aresolver -> do
resolver <- makeConcreteResolver aresolver
mpair <-
case resolver of
ResolverSnapshot name -> findBuildPlan gpds [name]
ResolverCompiler _ -> return Nothing
ResolverCustom _ _ -> return Nothing
case mpair of
Just (snap, flags) ->
return (ResolverSnapshot snap, flags, Map.empty)
Nothing -> return (resolver, Map.empty, Map.empty)
MethodSolver -> do
(compilerVersion, extraDeps) <- cabalSolver Ghc (map parent cabalfps) Map.empty Map.empty []
return
( ResolverCompiler compilerVersion
, Map.filter (not . Map.null) $ fmap snd extraDeps
, fmap fst extraDeps
)
getRecommendedSnapshots :: (MonadIO m, MonadMask m, MonadReader env m, HasConfig env, HasHttpManager env, HasGHCVariant env, MonadLogger m, MonadBaseControl IO m)
=> Snapshots
-> SnapPref
-> m [SnapName]
getRecommendedSnapshots snapshots pref = do
-- Get the most recent LTS and Nightly in the snapshots directory and
-- prefer them over anything else, since odds are high that something
-- already exists for them.
existing <-
liftM (reverse . sort . mapMaybe (parseSnapName . T.pack)) $
snapshotsDir >>=
liftIO . handleIO (const $ return [])
. getDirectoryContents . toFilePath
let isLTS LTS{} = True
isLTS Nightly{} = False
isNightly Nightly{} = True
isNightly LTS{} = False
names = nubOrd $ concat
[ take 2 $ filter isLTS existing
, take 2 $ filter isNightly existing
, map (uncurry LTS)
(take 2 $ reverse $ IntMap.toList $ snapshotsLts snapshots)
, [Nightly $ snapshotsNightly snapshots]
]
namesLTS = filter isLTS names
namesNightly = filter isNightly names
case pref of
PrefNone -> return names
PrefLTS -> return $ namesLTS ++ namesNightly
PrefNightly -> return $ namesNightly ++ namesLTS
data InitOpts = InitOpts
{ ioMethod :: !Method
-- ^ Preferred snapshots
, forceOverwrite :: Bool
-- ^ Overwrite existing files
, includeSubDirs :: Bool
-- ^ If True, include all .cabal files found in any sub directories
}
data SnapPref = PrefNone | PrefLTS | PrefNightly
-- | Method of initializing
data Method = MethodSnapshot SnapPref | MethodResolver AbstractResolver | MethodSolver
-- | Turn an 'AbstractResolver' into a 'Resolver'.
makeConcreteResolver :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m, HasHttpManager env, MonadLogger m)
=> AbstractResolver
-> m Resolver
makeConcreteResolver (ARResolver r) = return r
makeConcreteResolver ar = do
snapshots <- getSnapshots
r <-
case ar of
ARResolver r -> assert False $ return r
ARGlobal -> do
config <- asks getConfig
implicitGlobalDir <- getImplicitGlobalProjectDir config
let fp = implicitGlobalDir </> stackDotYaml
(ProjectAndConfigMonoid project _, _warnings) <-
liftIO (Yaml.decodeFileEither $ toFilePath fp)
>>= either throwM return
return $ projectResolver project
ARLatestNightly -> return $ ResolverSnapshot $ Nightly $ snapshotsNightly snapshots
ARLatestLTSMajor x ->
case IntMap.lookup x $ snapshotsLts snapshots of
Nothing -> error $ "No LTS release found with major version " ++ show x
Just y -> return $ ResolverSnapshot $ LTS x y
ARLatestLTS
| IntMap.null $ snapshotsLts snapshots -> error $ "No LTS releases found"
| otherwise ->
let (x, y) = IntMap.findMax $ snapshotsLts snapshots
in return $ ResolverSnapshot $ LTS x y
$logInfo $ "Selected resolver: " <> resolverName r
return r
-- | Get the location of the implicit global project directory.
-- If the directory already exists at the deprecated location, its location is returned.
-- Otherwise, the new location is returned.
getImplicitGlobalProjectDir
:: (MonadIO m, MonadLogger m)
=> Config -> m (Path Abs Dir)
getImplicitGlobalProjectDir config =
--TEST no warning printed
liftM fst $ tryDeprecatedPath
Nothing
dirExists
(implicitGlobalProjectDir stackRoot)
(implicitGlobalProjectDirDeprecated stackRoot)
where
stackRoot = configStackRoot config
-- | If deprecated path exists, use it and print a warning.
-- Otherwise, return the new path.
tryDeprecatedPath
:: (MonadIO m, MonadLogger m)
=> Maybe T.Text -- ^ Description of file for warning (if Nothing, no deprecation warning is displayed)
-> (Path Abs a -> m Bool) -- ^ Test for existence
-> Path Abs a -- ^ New path
-> Path Abs a -- ^ Deprecated path
-> m (Path Abs a, Bool) -- ^ (Path to use, whether it already exists)
tryDeprecatedPath mWarningDesc exists new old = do
newExists <- exists new
if newExists
then return (new, True)
else do
oldExists <- exists old
if oldExists
then do
case mWarningDesc of
Nothing -> return ()
Just desc ->
$logWarn $ T.concat
[ "Warning: Location of ", desc, " at '"
, T.pack (toFilePath old)
, "' is deprecated; rename it to '"
, T.pack (toFilePath new)
, "' instead" ]
return (old, True)
else return (new, False)
|
meiersi-11ce/stack
|
src/Stack/Init.hs
|
bsd-3-clause
| 14,947 | 0 | 21 | 4,935 | 3,200 | 1,628 | 1,572 | 280 | 8 |
-- Example for building a fabric and generating VHDL/Dot output of a circuit.
-- You are highly encouraged to run both main and main2 and view the VHDL output.
import Language.KansasLava
-- for dut2
import Data.Sized.Unsigned
-- define a circuit
halfAdder a b = (sum,carry)
where sum = xor2 a b
carry = and2 a b
fullAdder a b cin = (sum,cout)
where (s1,c1) = halfAdder a b
(sum,c2) = halfAdder cin s1
cout = xor2 c1 c2
-- turn it into a fabric
-- inStdLogic is like the vhdl std_logic, which is a wire that can be either high or low
dut = do
a <- inStdLogic "a"
b <- inStdLogic "b"
cin <- inStdLogic "cin"
let (sum,cout) = fullAdder a b cin
outStdLogic "sum" sum
outStdLogic "cout" cout
main = do
-- reify the circuit into a kansas lava entity graph (kleg)
kleg <- reifyFabric dut
-- write out a vhdl file, given entity name, file name, and the kleg
writeVhdlCircuit "fullAdder" "fullAdder.vhdl" kleg
-- write out a dot file of the circuit, on linux, view with:
-- dot -Tpng fullAdder.dot > fullAdder.png
-- assuming you have dot tools installed
writeDotCircuit "fullAdder.dot" kleg
-- just print out the kleg, output shown below
print kleg
--------------------------------------------------------------------------------
-- inStdLogicVector is a group of wires representing some type 'a'
-- You must provide enough type ascriptions that the types of inputs and
-- outputs can be deduced by the type checker. Here, since 'register' has
-- the type signature:
--
-- register :: (Rep a, Clock clk) => a -> CSeq clk a -> CSeq clk a
--
-- Ascribing the type 'U4' to the first argument is sufficient to deduce
-- the types of 'inp' and 'out' (which must be "CSeq clk U4")
dut2 = do
inp <- inStdLogicVector "in"
let out = register (4 :: U4) inp
outStdLogicVector "out" out
main2 = do
kleg <- reifyFabric dut2
writeVhdlCircuit "u4reg" "u4reg.vhdl" kleg
{- ------------------------- Output --------------------------------------------
------------------------------------------------------------------------------
-- Inputs --
------------------------------------------------------------------------------
a$0 : B
b$0 : B
cin$0 : B
------------------------------------------------------------------------------
-- Outputs --
------------------------------------------------------------------------------
sum$0 <- (2).o0 : B
cout$0 <- (4).o0 : B
------------------------------------------------------------------------------
-- Entities --
------------------------------------------------------------------------------
(4) xor2
out o0:B
in i0 <- (5).o0 : B
in i1 <- (6).o0 : B
(6) and2
out o0:B
in i0 <- cin$0 : B
in i1 <- (3).o0 : B
(5) and2
out o0:B
in i0 <- a$0 : B
in i1 <- b$0 : B
(2) xor2
out o0:B
in i0 <- cin$0 : B
in i1 <- (3).o0 : B
(3) xor2
out o0:B
in i0 <- a$0 : B
in i1 <- b$0 : B
------------------------------------------------------------------------------
----------------------------------------------------------------------------- -}
|
andygill/kansas-lava
|
examples/VHDL.hs
|
bsd-3-clause
| 3,425 | 0 | 11 | 922 | 318 | 163 | 155 | 28 | 1 |
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances #-}
module Graphics.QML.Test.SimpleTest where
import Graphics.QML.Objects
import Graphics.QML.Test.Framework
import Graphics.QML.Test.MayGen
import Graphics.QML.Test.TestObject
import Graphics.QML.Test.ScriptDSL (Expr, Prog)
import qualified Graphics.QML.Test.ScriptDSL as S
import Test.QuickCheck.Arbitrary
import Control.Applicative
import Data.Typeable
import Data.Int
import Data.Text (Text)
import qualified Data.Text as T
makeCall :: Int -> String -> [Expr] -> Prog
makeCall n name es = S.eval $ S.var n `S.dot` name `S.call` es
saveCall :: Int -> Int -> String -> [Expr] -> Prog
saveCall v n name es = S.saveVar v $ S.var n `S.dot` name `S.call` es
testCall :: Int -> String -> [Expr] -> Expr -> Prog
testCall n name es r = S.assert $ S.eq (S.var n `S.dot` name `S.call` es) r
setProp :: Int -> String -> Expr -> Prog
setProp n name ex = S.set (S.var n `S.dot` name) ex
saveProp :: Int -> Int -> String -> Prog
saveProp v n name = S.saveVar v $ S.var n `S.dot` name
testProp :: Int -> String -> Expr -> Prog
testProp n name r = S.assert $ S.eq (S.var n `S.dot` name) r
checkArg :: (Show a, Eq a) => a -> a -> IO (Either TestFault ())
checkArg v w = return $
if v == w then Right () else Left TBadActionData
retVoid :: IO ()
retVoid = return ()
data SimpleMethods
= SMTrivial
| SMTernary Int32 Int32 Int32 Int32
| SMGetInt Int32
| SMSetInt Int32
| SMGetDouble Double
| SMSetDouble Double
| SMGetText Text
| SMSetText Text
| SMGetObject Int
| SMSetObject Int
deriving (Eq, Show, Typeable)
instance TestAction SimpleMethods where
legalActionIn (SMSetObject n) env = testEnvIsaJ n testObjectType env
legalActionIn _ _ = True
nextActionsFor env = mayOneof [
pure SMTrivial,
SMTernary <$>
fromGen arbitrary <*> fromGen arbitrary <*>
fromGen arbitrary <*> fromGen arbitrary,
SMGetInt <$> fromGen arbitrary,
SMSetInt <$> fromGen arbitrary,
SMGetDouble <$> fromGen arbitrary,
SMSetDouble <$> fromGen arbitrary,
SMGetText . T.pack <$> fromGen arbitrary,
SMSetText . T.pack <$> fromGen arbitrary,
pure . SMGetObject $ testEnvNextJ env,
SMSetObject <$> mayElements (testEnvListJ testObjectType env)]
updateEnvRaw (SMGetObject n) = testEnvStep . testEnvSerial (\s ->
testEnvSetJ n testObjectType s)
updateEnvRaw _ = testEnvStep
actionRemote SMTrivial n = makeCall n "trivial" []
actionRemote (SMTernary v1 v2 v3 v4) n = testCall n "ternary" [
S.literal v1, S.literal v2, S.literal v3] $ S.literal v4
actionRemote (SMGetInt v) n = testCall n "getInt" [] $ S.literal v
actionRemote (SMSetInt v) n = makeCall n "setInt" [S.literal v]
actionRemote (SMGetDouble v) n = testCall n "getDouble" [] $ S.literal v
actionRemote (SMSetDouble v) n = makeCall n "setDouble" [S.literal v]
actionRemote (SMGetText v) n = testCall n "getText" [] $ S.literal v
actionRemote (SMSetText v) n = makeCall n "setText" [S.literal v]
actionRemote (SMGetObject v) n = saveCall v n "getObject" []
actionRemote (SMSetObject v) n = makeCall n "setObject" [S.var v]
mockObjDef = [
defMethod "trivial" $ \m -> checkAction m SMTrivial retVoid,
defMethod "ternary" $ \m v1 v2 v3 -> expectAction m $ \a -> case a of
SMTernary w1 w2 w3 w4 ->
(fmap . fmap) (const w4) $ checkArg (v1,v2,v3) (w1,w2,w3)
_ -> return $ Left TBadActionCtor,
defMethod "getInt" $ \m -> expectAction m $ \a -> case a of
SMGetInt v -> return $ Right v
_ -> return $ Left TBadActionCtor,
defMethod "setInt" $ \m v -> checkAction m (SMSetInt v) retVoid,
defMethod "getDouble" $ \m -> expectAction m $ \a -> case a of
SMGetDouble v -> return $ Right v
_ -> return $ Left TBadActionCtor,
defMethod "setDouble" $ \m v -> checkAction m (SMSetDouble v) retVoid,
defMethod "getText" $ \m -> expectAction m $ \a -> case a of
SMGetText v -> return $ Right v
_ -> return $ Left TBadActionCtor,
defMethod "setText" $ \m v -> checkAction m (SMSetText v) retVoid,
defMethod "getObject" $ \m -> expectAction m $ \a -> case a of
SMGetObject _ -> getTestObject m
_ -> return $ Left TBadActionCtor,
defMethod "setObject" $ \m v -> expectAction m $ \a -> case a of
SMSetObject w -> setTestObject m v w
_ -> return $ Left TBadActionCtor]
data SimpleProperties
= SPGetIntConst Int32
| SPGetIntRO Int32
| SPGetInt Int32
| SPSetInt Int32
| SPGetDouble Double
| SPSetDouble Double
| SPGetText Text
| SPSetText Text
| SPGetObject Int
| SPSetObject Int
deriving (Eq, Show, Typeable)
instance TestAction SimpleProperties where
legalActionIn (SPSetObject n) env = testEnvIsaJ n testObjectType env
legalActionIn _ _ = True
nextActionsFor env = mayOneof [
SPGetIntConst <$> fromGen arbitrary,
SPGetIntRO <$> fromGen arbitrary,
SPGetInt <$> fromGen arbitrary,
SPSetInt <$> fromGen arbitrary,
SPGetDouble <$> fromGen arbitrary,
SPSetDouble <$> fromGen arbitrary,
SPGetText . T.pack <$> fromGen arbitrary,
SPSetText . T.pack <$> fromGen arbitrary,
pure . SPGetObject $ testEnvNextJ env,
SPSetObject <$> mayElements (testEnvListJ testObjectType env)]
updateEnvRaw (SPGetObject n) = testEnvStep . testEnvSerial (\s ->
testEnvSetJ n testObjectType s)
updateEnvRaw _ = testEnvStep
actionRemote (SPGetIntConst v) n = testProp n "propIntConst" $ S.literal v
actionRemote (SPGetIntRO v) n = testProp n "propIntRO" $ S.literal v
actionRemote (SPGetInt v) n = testProp n "propIntR" $ S.literal v
actionRemote (SPSetInt v) n = setProp n "propIntW" $ S.literal v
actionRemote (SPGetDouble v) n = testProp n "propDoubleR" $ S.literal v
actionRemote (SPSetDouble v) n = setProp n "propDoubleW" $ S.literal v
actionRemote (SPGetText v) n = testProp n "propTextR" $ S.literal v
actionRemote (SPSetText v) n = setProp n "propTextW" $ S.literal v
actionRemote (SPGetObject v) n = saveProp v n "propObjectR"
actionRemote (SPSetObject v) n = setProp n "propObjectW" $ S.var v
mockObjDef = [
-- There are seperate properties for testing accessors and mutators
-- because QML produces spurious reads when writing.
defPropertyRO "propIntConst"
(\m -> expectAction m $ \a -> case a of
SPGetIntConst v -> return $ Right v
_ -> return $ Left TBadActionCtor),
defPropertyRO "propIntRO"
(\m -> expectAction m $ \a -> case a of
SPGetIntRO v -> return $ Right v
_ -> return $ Left TBadActionCtor),
defPropertyRW "propIntR"
(\m -> expectAction m $ \a -> case a of
SPGetInt v -> return $ Right v
_ -> return $ Left TBadActionCtor)
(\m _ -> badAction m),
defPropertyRW "propIntW"
(\_ -> makeDef) (\m v -> checkAction m (SPSetInt v) retVoid),
defPropertyRW "propDoubleR"
(\m -> expectAction m $ \a -> case a of
SPGetDouble v -> return $ Right v
_ -> return $ Left TBadActionCtor)
(\m _ -> badAction m),
defPropertyRW "propDoubleW"
(\_ -> makeDef) (\m v -> checkAction m (SPSetDouble v) retVoid),
defPropertyRW "propTextR"
(\m -> expectAction m $ \a -> case a of
SPGetText v -> return $ Right v
_ -> return $ Left TBadActionCtor)
(\m _ -> badAction m),
defPropertyRW "propTextW"
(\_ -> makeDef) (\m v -> checkAction m (SPSetText v) retVoid),
defPropertyRW "propObjectR"
(\m -> expectAction m $ \a -> case a of
SPGetObject _ -> getTestObject m
_ -> return $ Left TBadActionCtor)
(\m _ -> badAction m),
defPropertyRW "propObjectW"
(\_ -> makeDef)
(\m v -> expectAction m $ \a -> case a of
SPSetObject w -> setTestObject m (fromObjRef v) w
_ -> return $ Left TBadActionCtor)]
|
johntyree/HsQML
|
test/Graphics/QML/Test/SimpleTest.hs
|
bsd-3-clause
| 8,519 | 0 | 17 | 2,496 | 2,938 | 1,492 | 1,446 | 176 | 2 |
{-# LANGUAGE RebindableSyntax #-}
-- | Bindings to make the popular containers library compatible with subhask
module SubHask.Compatibility.Containers
where
import qualified Data.Foldable as F
import qualified Data.Map as M
import qualified Data.IntMap as IM
import qualified Data.Map.Strict as MS
import qualified Data.IntMap.Strict as IMS
import qualified Data.Set as Set
import qualified Data.Sequence as Seq
import qualified Prelude as P
import SubHask.Algebra
import SubHask.Algebra.Container
import SubHask.Algebra.Ord
import SubHask.Algebra.Parallel
import SubHask.Category
import SubHask.Category.Trans.Constrained
import SubHask.Category.Trans.Monotonic
import SubHask.Compatibility.Base
import SubHask.Internal.Prelude
import SubHask.Monad
import SubHask.TemplateHaskell.Deriving
-------------------------------------------------------------------------------
-- | This is a thin wrapper around Data.Sequence
newtype Seq a = Seq (Seq.Seq a)
deriving (Read,Show,NFData)
mkMutable [t| forall a. Seq a |]
type instance Scalar (Seq a) = Int
type instance Logic (Seq a) = Bool
type instance Elem (Seq a) = a
type instance SetElem (Seq a) b = Seq b
instance (Eq a, Arbitrary a) => Arbitrary (Seq a) where
arbitrary = P.fmap fromList arbitrary
instance Normed (Seq a) where
{-# INLINE size #-}
size (Seq s) = Seq.length s
instance Eq a => Eq_ (Seq a) where
{-# INLINE (==) #-}
(Seq a1)==(Seq a2) = F.toList a1==F.toList a2
instance POrd a => POrd_ (Seq a) where
{-# INLINE inf #-}
inf a1 a2 = fromList $ inf (toList a1) (toList a2)
instance POrd a => MinBound_ (Seq a) where
{-# INLINE minBound #-}
minBound = empty
instance Semigroup (Seq a) where
{-# INLINE (+) #-}
(Seq a1)+(Seq a2) = Seq $ a1 Seq.>< a2
instance Monoid (Seq a) where
{-# INLINE zero #-}
zero = Seq $ Seq.empty
instance Eq a => Container (Seq a) where
{-# INLINE elem #-}
elem e (Seq a) = elem e $ F.toList a
{-# INLINE notElem #-}
notElem = not elem
instance Constructible (Seq a) where
{-# INLINE cons #-}
{-# INLINE snoc #-}
{-# INLINE singleton #-}
{-# INLINE fromList1 #-}
cons e (Seq a) = Seq $ e Seq.<| a
snoc (Seq a) e = Seq $ a Seq.|> e
singleton e = Seq $ Seq.singleton e
fromList1 x xs = Seq $ Seq.fromList (x:xs)
instance ValidEq a => Foldable (Seq a) where
{-# INLINE toList #-}
toList (Seq a) = F.toList a
{-# INLINE uncons #-}
uncons (Seq a) = if Seq.null a
then Nothing
else Just (Seq.index a 0, Seq $ Seq.drop 1 a)
{-# INLINE unsnoc #-}
unsnoc (Seq e) = if Seq.null e
then Nothing
else Just (Seq $ Seq.take (Seq.length e-1) e, Seq.index e 0)
-- foldMap f (Seq a) = F.foldMap f a
{-# INLINE foldr #-}
{-# INLINE foldr' #-}
{-# INLINE foldr1 #-}
foldr f e (Seq a) = F.foldr f e a
foldr' f e (Seq a) = F.foldr' f e a
foldr1 f (Seq a) = F.foldr1 f a
-- foldr1' f (Seq a) = F.foldr1' f a
{-# INLINE foldl #-}
{-# INLINE foldl' #-}
{-# INLINE foldl1 #-}
foldl f e (Seq a) = F.foldl f e a
foldl' f e (Seq a) = F.foldl' f e a
foldl1 f (Seq a) = F.foldl1 f a
-- foldl1' f (Seq a) = F.foldl1' f a
instance (ValidEq a) => Partitionable (Seq a) where
{-# INLINABLE partition #-}
partition n (Seq xs) = go xs
where
go :: Seq.Seq a -> [Seq a]
go xs = if Seq.null xs
then []
else Seq a:go b
where
(a,b) = Seq.splitAt len xs
size = Seq.length xs
len = size `div` n
+ if size `rem` n == 0 then 0 else 1
{-# INLINABLE partitionInterleaved #-}
partitionInterleaved n xs = foldl' go (P.replicate n empty) xs
where
go (r:rs) x = rs+[r`snoc`x]
-------------------------------------------------------------------------------
-- | This is a thin wrapper around Data.Map
newtype Map i e = Map (M.Map (WithPreludeOrd i) (WithPreludeOrd e))
deriving (Show,NFData)
mkMutable [t| forall i e. Map i e |]
type instance Scalar (Map i e) = Int
type instance Logic (Map i e) = Bool
type instance Index (Map i e) = i
type instance SetIndex (Map i e) i' = Map i' e
type instance Elem (Map i e) = e
type instance SetElem (Map i e) e' = Map i e'
-- misc classes
instance (Eq e, Ord i, Semigroup e, Arbitrary i, Arbitrary e) => Arbitrary (Map i e) where
arbitrary = P.fmap fromIxList arbitrary
-- comparisons
instance (Eq i, Eq e) => Eq_ (Map i e) where
{-# INLINE (==) #-}
(Map m1)==(Map m2) = m1 P.== m2
instance (Ord i, Eq e) => POrd_ (Map i e) where
{-# INLINE inf #-}
inf (Map m1) (Map m2) = Map $ M.differenceWith go (M.intersection m1 m2) m2
where
go v1 v2 = if v1==v2 then Just v1 else Nothing
instance (Ord i, POrd e) => MinBound_ (Map i e) where
{-# INLINE minBound #-}
minBound = zero
-- algebra
instance Ord i => Semigroup (Map i e) where
{-# INLINE (+) #-}
(Map m1)+(Map m2) = Map $ M.union m1 m2
instance Ord i => Monoid (Map i e) where
{-# INLINE zero #-}
zero = Map $ M.empty
instance Normed (Map i e) where
{-# INLINE size #-}
size (Map m) = M.size m
-- indexed containers
instance (Ord i, Eq e) => IxContainer (Map i e) where
{-# INLINE lookup #-}
{-# INLINE hasIndex #-}
lookup i (Map m) = P.fmap unWithPreludeOrd $ M.lookup (WithPreludeOrd i) m
hasIndex (Map m) i = M.member (WithPreludeOrd i) m
{-# INLINE toIxList #-}
{-# INLINE indices #-}
{-# INLINE values #-}
{-# INLINE imap #-}
toIxList (Map m) = map (\(WithPreludeOrd i,WithPreludeOrd e)->(i,e)) $ M.assocs m
indices (Map m) = map unWithPreludeOrd $ M.keys m
values (Map m) = map unWithPreludeOrd $ M.elems m
imap f (Map m) = Map $ M.mapWithKey (\(WithPreludeOrd i) (WithPreludeOrd e) -> WithPreludeOrd $ f i e) m
instance (Ord i, Eq e) => IxConstructible (Map i e) where
{-# INLINE singletonAt #-}
singletonAt i e = Map $ M.singleton (WithPreludeOrd i) (WithPreludeOrd e)
{-# INLINE consAt #-}
consAt i e (Map m) = Map $ M.insert (WithPreludeOrd i) (WithPreludeOrd e) m
----------------------------------------
-- | This is a thin wrapper around Data.Map.Strict
newtype Map' i e = Map' (MS.Map (WithPreludeOrd i) (WithPreludeOrd e))
deriving (Show,NFData)
mkMutable [t| forall i e. Map' i e |]
type instance Scalar (Map' i e) = Int
type instance Logic (Map' i e) = Bool
type instance Index (Map' i e) = i
type instance SetIndex (Map' i e) i' = Map' i' e
type instance Elem (Map' i e) = e
type instance SetElem (Map' i e) e' = Map' i e'
-- misc classes
instance (Eq e, Ord i, Semigroup e, Arbitrary i, Arbitrary e) => Arbitrary (Map' i e) where
arbitrary = P.fmap fromIxList arbitrary
-- comparisons
instance (Eq i, Eq e) => Eq_ (Map' i e) where
{-# INLINE (==) #-}
(Map' m1)==(Map' m2) = m1 P.== m2
instance (Ord i, Eq e) => POrd_ (Map' i e) where
{-# INLINE inf #-}
inf (Map' m1) (Map' m2) = Map' $ MS.differenceWith go (MS.intersection m1 m2) m2
where
go v1 v2 = if v1==v2 then Just v1 else Nothing
instance (Ord i, POrd e) => MinBound_ (Map' i e) where
{-# INLINE minBound #-}
minBound = zero
-- algebra
instance Ord i => Semigroup (Map' i e) where
{-# INLINE (+) #-}
(Map' m1)+(Map' m2) = Map' $ MS.union m1 m2
instance Ord i => Monoid (Map' i e) where
{-# INLINE zero #-}
zero = Map' $ MS.empty
instance Normed (Map' i e) where
{-# INLINE size #-}
size (Map' m) = MS.size m
-- indexed containers
instance (Ord i, Eq e) => IxContainer (Map' i e) where
{-# INLINE lookup #-}
{-# INLINE hasIndex #-}
lookup i (Map' m) = P.fmap unWithPreludeOrd $ MS.lookup (WithPreludeOrd i) m
hasIndex (Map' m) i = MS.member (WithPreludeOrd i) m
{-# INLINE toIxList #-}
{-# INLINE indices #-}
{-# INLINE values #-}
{-# INLINE imap #-}
toIxList (Map' m) = map (\(WithPreludeOrd i,WithPreludeOrd e)->(i,e)) $ MS.assocs m
indices (Map' m) = map unWithPreludeOrd $ MS.keys m
values (Map' m) = map unWithPreludeOrd $ MS.elems m
imap f (Map' m) = Map' $ MS.mapWithKey (\(WithPreludeOrd i) (WithPreludeOrd e) -> WithPreludeOrd $ f i e) m
instance (Ord i, Eq e) => IxConstructible (Map' i e) where
{-# INLINE singletonAt #-}
singletonAt i e = Map' $ MS.singleton (WithPreludeOrd i) (WithPreludeOrd e)
{-# INLINE consAt #-}
consAt i e (Map' m) = Map' $ MS.insert (WithPreludeOrd i) (WithPreludeOrd e) m
-------------------------------------------------------------------------------
-- | This is a thin wrapper around Data.IntMap
newtype IntMap e = IntMap (IM.IntMap (WithPreludeOrd e))
deriving (Read,Show,NFData)
mkMutable [t| forall a. IntMap a |]
type instance Scalar (IntMap e) = Int
type instance Logic (IntMap e) = Bool
type instance Index (IntMap e) = IM.Key
type instance Elem (IntMap e) = e
type instance SetElem (IntMap e) e' = IntMap e'
-- misc classes
instance (Eq e, Semigroup e, Arbitrary e) => Arbitrary (IntMap e) where
{-# INLINABLE arbitrary #-}
arbitrary = P.fmap fromIxList arbitrary
-- comparisons
instance (Eq e) => Eq_ (IntMap e) where
{-# INLINE (==) #-}
(IntMap m1)==(IntMap m2) = m1 P.== m2
instance (Eq e) => POrd_ (IntMap e) where
{-# INLINE inf #-}
inf (IntMap m1) (IntMap m2) = IntMap $ IM.differenceWith go (IM.intersection m1 m2) m2
where
go v1 v2 = if v1==v2 then Just v1 else Nothing
instance (POrd e) => MinBound_ (IntMap e) where
{-# INLINE minBound #-}
minBound = zero
-- algebra
instance Semigroup (IntMap e) where
{-# INLINE (+) #-}
(IntMap m1)+(IntMap m2) = IntMap $ IM.union m1 m2
instance Monoid (IntMap e) where
{-# INLINE zero #-}
zero = IntMap $ IM.empty
instance Normed (IntMap e) where
{-# INLINE size #-}
size (IntMap m) = IM.size m
-- indexed container
instance (Eq e) => IxConstructible (IntMap e) where
{-# INLINE singletonAt #-}
{-# INLINE consAt #-}
singletonAt i e = IntMap $ IM.singleton i (WithPreludeOrd e)
consAt i e (IntMap m) = IntMap $ IM.insert i (WithPreludeOrd e) m
instance (Eq e) => IxContainer (IntMap e) where
{-# INLINE lookup #-}
{-# INLINE hasIndex #-}
lookup i (IntMap m) = P.fmap unWithPreludeOrd $ IM.lookup i m
hasIndex (IntMap m) i = IM.member i m
{-# INLINE toIxList #-}
{-# INLINE indices #-}
{-# INLINE values #-}
{-# INLINE imap #-}
toIxList (IntMap m) = map (\(i,WithPreludeOrd e)->(i,e)) $ IM.assocs m
indices (IntMap m) = IM.keys m
values (IntMap m) = map unWithPreludeOrd $ IM.elems m
imap f (IntMap m) = IntMap $ IM.mapWithKey (\i (WithPreludeOrd e) -> WithPreludeOrd $ f i e) m
----------------------------------------
-- | This is a thin wrapper around Data.IntMap.Strict
newtype IntMap' e = IntMap' (IMS.IntMap (WithPreludeOrd e))
deriving (Read,Show,NFData)
mkMutable [t| forall a. IntMap' a |]
type instance Scalar (IntMap' e) = Int
type instance Logic (IntMap' e) = Bool
type instance Index (IntMap' e) = IMS.Key
type instance Elem (IntMap' e) = e
type instance SetElem (IntMap' e) e' = IntMap' e'
-- misc classes
instance (Eq e, Semigroup e, Arbitrary e) => Arbitrary (IntMap' e) where
{-# INLINABLE arbitrary #-}
arbitrary = P.fmap fromIxList arbitrary
-- comparisons
instance (Eq e) => Eq_ (IntMap' e) where
{-# INLINE (==) #-}
(IntMap' m1)==(IntMap' m2) = m1 P.== m2
instance (Eq e) => POrd_ (IntMap' e) where
{-# INLINE inf #-}
inf (IntMap' m1) (IntMap' m2) = IntMap' $ IMS.differenceWith go (IMS.intersection m1 m2) m2
where
go v1 v2 = if v1==v2 then Just v1 else Nothing
instance (POrd e) => MinBound_ (IntMap' e) where
{-# INLINE minBound #-}
minBound = zero
-- algebra
instance Semigroup (IntMap' e) where
{-# INLINE (+) #-}
(IntMap' m1)+(IntMap' m2) = IntMap' $ IMS.union m1 m2
instance Monoid (IntMap' e) where
{-# INLINE zero #-}
zero = IntMap' $ IMS.empty
instance Normed (IntMap' e) where
{-# INLINE size #-}
size (IntMap' m) = IMS.size m
-- container
instance (Eq e) => IxConstructible (IntMap' e) where
{-# INLINABLE singletonAt #-}
{-# INLINABLE consAt #-}
singletonAt i e = IntMap' $ IMS.singleton i (WithPreludeOrd e)
consAt i e (IntMap' m) = IntMap' $ IMS.insert i (WithPreludeOrd e) m
instance (Eq e) => IxContainer (IntMap' e) where
{-# INLINE lookup #-}
{-# INLINE hasIndex #-}
lookup i (IntMap' m) = P.fmap unWithPreludeOrd $ IMS.lookup i m
hasIndex (IntMap' m) i = IMS.member i m
{-# INLINE toIxList #-}
{-# INLINE indices #-}
{-# INLINE values #-}
{-# INLINE imap #-}
toIxList (IntMap' m) = map (\(i,WithPreludeOrd e)->(i,e)) $ IMS.assocs m
indices (IntMap' m) = IMS.keys m
values (IntMap' m) = map unWithPreludeOrd $ IMS.elems m
imap f (IntMap' m) = IntMap' $ IMS.mapWithKey (\i (WithPreludeOrd e) -> WithPreludeOrd $ f i e) m
-------------------------------------------------------------------------------
-- | This is a thin wrapper around the container's set type
newtype Set a = Set (Set.Set (WithPreludeOrd a))
deriving (Show,NFData)
mkMutable [t| forall a. Set a |]
instance (Ord a, Arbitrary a) => Arbitrary (Set a) where
{-# INLINABLE arbitrary #-}
arbitrary = P.fmap fromList arbitrary
type instance Scalar (Set a) = Int
type instance Logic (Set a) = Logic a
type instance Elem (Set a) = a
type instance SetElem (Set a) b = Set b
instance Normed (Set a) where
{-# INLINE size #-}
size (Set s) = Set.size s
instance Eq a => Eq_ (Set a) where
{-# INLINE (==) #-}
(Set s1)==(Set s2) = s1'==s2'
where
s1' = removeWithPreludeOrd $ Set.toList s1
s2' = removeWithPreludeOrd $ Set.toList s2
removeWithPreludeOrd [] = []
removeWithPreludeOrd (WithPreludeOrd x:xs) = x:removeWithPreludeOrd xs
instance Ord a => POrd_ (Set a) where
{-# INLINE inf #-}
inf (Set s1) (Set s2) = Set $ Set.intersection s1 s2
instance Ord a => MinBound_ (Set a) where
{-# INLINE minBound #-}
minBound = Set $ Set.empty
instance Ord a => Lattice_ (Set a) where
{-# INLINE sup #-}
sup (Set s1) (Set s2) = Set $ Set.union s1 s2
instance Ord a => Semigroup (Set a) where
{-# INLINE (+) #-}
(Set s1)+(Set s2) = Set $ Set.union s1 s2
instance Ord a => Monoid (Set a) where
{-# INLINE zero #-}
zero = Set $ Set.empty
instance Ord a => Abelian (Set a)
instance Ord a => Container (Set a) where
{-# INLINE elem #-}
{-# INLINE notElem #-}
elem a (Set s) = Set.member (WithPreludeOrd a) s
notElem a (Set s) = not $ Set.member (WithPreludeOrd a) s
instance Ord a => Constructible (Set a) where
{-# INLINE singleton #-}
singleton a = Set $ Set.singleton (WithPreludeOrd a)
{-# INLINE fromList1 #-}
fromList1 a as = Set $ Set.fromList $ map WithPreludeOrd (a:as)
instance Ord a => Foldable (Set a) where
{-# INLINE foldl #-}
{-# INLINE foldl' #-}
{-# INLINE foldr #-}
{-# INLINE foldr' #-}
foldl f a (Set s) = Set.foldl (\a (WithPreludeOrd e) -> f a e) a s
foldl' f a (Set s) = Set.foldl' (\a (WithPreludeOrd e) -> f a e) a s
foldr f a (Set s) = Set.foldr (\(WithPreludeOrd e) a -> f e a) a s
foldr' f a (Set s) = Set.foldr' (\(WithPreludeOrd e) a -> f e a) a s
-------------------
-- |
--
-- FIXME: implement this in terms of @Lexical@ and @Set@
--
-- FIXME: add the @Constrained@ Monad
data LexSet a where
LexSet :: Ord a => Set a -> LexSet a
mkMutable [t| forall a. LexSet a |]
type instance Scalar (LexSet a) = Int
type instance Logic (LexSet a) = Bool
type instance Elem (LexSet a) = a
type instance SetElem (LexSet a) b = LexSet b
instance Show a => Show (LexSet a) where
show (LexSet s) = "LexSet "++show (toList s)
instance Eq_ (LexSet a) where
(LexSet a1)==(LexSet a2) = Lexical a1==Lexical a2
instance POrd_ (LexSet a) where
inf (LexSet a1) (LexSet a2) = LexSet $ unLexical $ inf (Lexical a1) (Lexical a2)
(LexSet a1) < (LexSet a2) = Lexical a1 < Lexical a2
(LexSet a1) <= (LexSet a2) = Lexical a1 <= Lexical a2
instance Lattice_ (LexSet a) where
sup (LexSet a1) (LexSet a2) = LexSet $ unLexical $ sup (Lexical a1) (Lexical a2)
(LexSet a1) > (LexSet a2) = Lexical a1 > Lexical a2
(LexSet a1) >= (LexSet a2) = Lexical a1 >= Lexical a2
instance Ord_ (LexSet a)
instance Semigroup (LexSet a) where
(LexSet a1)+(LexSet a2) = LexSet $ a1+a2
instance Ord a => Monoid (LexSet a) where
zero = LexSet zero
instance (Ord a ) => Container (LexSet a) where
elem x (LexSet s) = elem x s
instance (Ord a ) => Constructible (LexSet a) where
fromList1 a as = LexSet $ fromList1 a as
instance (Ord a ) => Normed (LexSet a) where
size (LexSet s) = size s
instance (Ord a ) => MinBound_ (LexSet a) where
minBound = zero
instance (Ord a ) => Foldable (LexSet a) where
foldl f a (LexSet s) = foldl f a s
foldl' f a (LexSet s) = foldl' f a s
foldl1 f (LexSet s) = foldl1 f s
foldl1' f (LexSet s) = foldl1' f s
foldr f a (LexSet s) = foldr f a s
foldr' f a (LexSet s) = foldr' f a s
foldr1 f (LexSet s) = foldr1 f s
foldr1' f (LexSet s) = foldr1' f s
liftPreludeOrd :: (a -> b) -> WithPreludeOrd a -> WithPreludeOrd b
liftPreludeOrd f (WithPreludeOrd a) = WithPreludeOrd $ f a
instance Functor OrdHask LexSet where
fmap (ConstrainedT f) = proveConstrained go
where
go (LexSet (Set s)) = LexSet $ Set $ Set.map (liftPreludeOrd f) s
instance Monad OrdHask LexSet where
return_ = proveConstrained singleton
join = proveConstrained $ \(LexSet s) -> foldl1' (+) s
instance Functor Mon LexSet where
fmap (MonT f) = unsafeProveMon go
where
go (LexSet (Set s)) = LexSet $ Set $ Set.mapMonotonic (liftPreludeOrd f) s
-- | FIXME: is there a more efficient implementation?
instance Monad Mon LexSet where
return_ = unsafeProveMon singleton
join = unsafeProveMon $ \(LexSet s) -> foldl1' (+) s
instance Then LexSet where
(LexSet a)>>(LexSet b) = LexSet b
|
abailly/subhask
|
src/SubHask/Compatibility/Containers.hs
|
bsd-3-clause
| 18,261 | 0 | 14 | 4,526 | 6,910 | 3,569 | 3,341 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Applicative
-- Copyright : Conor McBride and Ross Paterson 2005
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This module describes a structure intermediate between a functor and
-- a monad (technically, a strong lax monoidal functor). Compared with
-- monads, this interface lacks the full power of the binding operation
-- '>>=', but
--
-- * it has more instances.
--
-- * it is sufficient for many uses, e.g. context-free parsing, or the
-- 'Data.Traversable.Traversable' class.
--
-- * instances can perform analysis of computations before they are
-- executed, and thus produce shared optimizations.
--
-- This interface was introduced for parsers by Niklas Röjemo, because
-- it admits more sharing than the monadic interface. The names here are
-- mostly based on parsing work by Doaitse Swierstra.
--
-- For more details, see
-- <http://www.soi.city.ac.uk/~ross/papers/Applicative.html Applicative Programming with Effects>,
-- by Conor McBride and Ross Paterson.
module Control.Applicative (
-- * Applicative functors
Applicative(..),
-- * Alternatives
Alternative(..),
-- * Instances
Const(..), WrappedMonad(..), WrappedArrow(..), ZipList(..),
-- * Utility functions
(<$>), (<$), (<**>),
liftA, liftA2, liftA3,
optional,
) where
import Control.Category hiding ((.), id)
import Control.Arrow
import Data.Maybe
import Data.Tuple
import Data.Eq
import Data.Ord
import Data.Foldable (Foldable(..))
import Data.Functor ((<$>))
import Data.Functor.Const (Const(..))
import GHC.Base
import GHC.Generics
import GHC.List (repeat, zipWith)
import GHC.Read (Read)
import GHC.Show (Show)
newtype WrappedMonad m a = WrapMonad { unwrapMonad :: m a }
deriving (Generic, Generic1, Monad)
instance Monad m => Functor (WrappedMonad m) where
fmap f (WrapMonad v) = WrapMonad (liftM f v)
instance Monad m => Applicative (WrappedMonad m) where
pure = WrapMonad . pure
WrapMonad f <*> WrapMonad v = WrapMonad (f `ap` v)
instance MonadPlus m => Alternative (WrappedMonad m) where
empty = WrapMonad mzero
WrapMonad u <|> WrapMonad v = WrapMonad (u `mplus` v)
newtype WrappedArrow a b c = WrapArrow { unwrapArrow :: a b c }
deriving (Generic, Generic1)
instance Arrow a => Functor (WrappedArrow a b) where
fmap f (WrapArrow a) = WrapArrow (a >>> arr f)
instance Arrow a => Applicative (WrappedArrow a b) where
pure x = WrapArrow (arr (const x))
WrapArrow f <*> WrapArrow v = WrapArrow (f &&& v >>> arr (uncurry id))
instance (ArrowZero a, ArrowPlus a) => Alternative (WrappedArrow a b) where
empty = WrapArrow zeroArrow
WrapArrow u <|> WrapArrow v = WrapArrow (u <+> v)
-- | Lists, but with an 'Applicative' functor based on zipping, so that
--
-- @f '<$>' 'ZipList' xs1 '<*>' ... '<*>' 'ZipList' xsn = 'ZipList' (zipWithn f xs1 ... xsn)@
--
newtype ZipList a = ZipList { getZipList :: [a] }
deriving ( Show, Eq, Ord, Read, Functor
, Foldable, Generic, Generic1)
-- See Data.Traversable for Traversabel instance due to import loops
instance Applicative ZipList where
pure x = ZipList (repeat x)
ZipList fs <*> ZipList xs = ZipList (zipWith id fs xs)
-- extra functions
-- | One or none.
optional :: Alternative f => f a -> f (Maybe a)
optional v = Just <$> v <|> pure Nothing
|
tolysz/prepare-ghcjs
|
spec-lts8/base/Control/Applicative.hs
|
bsd-3-clause
| 3,760 | 0 | 11 | 772 | 836 | 474 | 362 | 53 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, TemplateHaskell, PolymorphicComponents, ConstraintKinds, RecordWildCards #-}
module Lamdu.Sugar.Convert.Monad
( Context(..), TagParamInfo(..), RecordParamsInfo(..)
, scHoleInferContext, scStructureInferContext, scWithVarsInferContext
, scCodeAnchors, scSpecialFunctions, scTagParamInfos, scRecordParamsInfos
, scInferContexts
, ConvertM(..), run
, readContext, liftCTransaction, liftTransaction, local
, codeAnchor
, getP
, convertSubexpression
) where
import Control.Applicative (Applicative(..), (<$>))
import Control.Lens ((^.))
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.Reader (ReaderT, runReaderT)
import Control.MonadA (MonadA)
import Data.Map (Map)
import Data.Monoid (Monoid)
import Data.Store.Guid (Guid)
import Data.Store.IRef (Tag)
import Lamdu.Sugar.Internal
import Lamdu.Sugar.Types.Internal
import qualified Control.Lens as Lens
import qualified Control.Monad.Trans.Reader as Reader
import qualified Data.Store.Transaction as Transaction
import qualified Lamdu.Data.Anchors as Anchors
import qualified Lamdu.Sugar.Types as Sugar
data TagParamInfo = TagParamInfo
{ tpiFromParameters :: Guid
, tpiJumpTo :: Guid
}
data RecordParamsInfo m = RecordParamsInfo
{ rpiFromDefinition :: Guid
, rpiJumpTo :: T m Guid
}
data Context m = Context
{ _scHoleInferContext :: InferContext m
, _scStructureInferContext :: InferContext m
, _scWithVarsInferContext :: InferContext m
, _scCodeAnchors :: Anchors.CodeProps m
, _scSpecialFunctions :: Anchors.SpecialFunctions (Tag m)
, _scTagParamInfos :: Map Guid TagParamInfo -- tag guids
, _scRecordParamsInfos :: Map Guid (RecordParamsInfo m) -- param guids
, scConvertSubexpression :: forall a. Monoid a => Sugar.InputExpr m a -> ConvertM m (ExpressionU m a)
}
scInferContexts :: Lens.Traversal' (Context m) (InferContext m)
scInferContexts f ctx@Context{..} =
newCtx
<$> f _scHoleInferContext
<*> f _scStructureInferContext
<*> f _scWithVarsInferContext
where
newCtx holeInferContext structureInferContext withVarsInferContext =
ctx
{ _scHoleInferContext = holeInferContext
, _scStructureInferContext = structureInferContext
, _scWithVarsInferContext = withVarsInferContext
}
newtype ConvertM m a = ConvertM (ReaderT (Context m) (CT m) a)
deriving (Functor, Applicative, Monad)
Lens.makeLenses ''Context
run :: MonadA m => Context m -> ConvertM m a -> CT m a
run ctx (ConvertM action) = runReaderT action ctx
readContext :: MonadA m => ConvertM m (Context m)
readContext = ConvertM Reader.ask
local :: Monad m => (Context m -> Context m) -> ConvertM m a -> ConvertM m a
local f (ConvertM act) = ConvertM $ Reader.local f act
liftCTransaction :: MonadA m => CT m a -> ConvertM m a
liftCTransaction = ConvertM . lift
liftTransaction :: MonadA m => T m a -> ConvertM m a
liftTransaction = liftCTransaction . lift
codeAnchor :: MonadA m => (Anchors.CodeProps m -> a) -> ConvertM m a
codeAnchor f = f . (^. scCodeAnchors) <$> readContext
getP :: MonadA m => Transaction.MkProperty m a -> ConvertM m a
getP = liftTransaction . Transaction.getP
convertSubexpression :: (MonadA m, Monoid a) => Sugar.InputExpr m a -> ConvertM m (ExpressionU m a)
convertSubexpression exprI = do
convertSub <- scConvertSubexpression <$> readContext
convertSub exprI
|
schell/lamdu
|
Lamdu/Sugar/Convert/Monad.hs
|
gpl-3.0
| 3,378 | 0 | 14 | 534 | 987 | 549 | 438 | 74 | 1 |
{-# LANGUAGE MagicHash, UnboxedTuples #-}
--module Foo where
import GHC.Exts
--you can use this if you want to test running it...
main = print (I# (
f1pat 1# +# f1prepat 1#
+# f2pat 1# +# f2prepat 1#
+# f3pat 1# +# f3prepat 1#
))
--unboxed tuples are of sizes 1,2,3...
--(normal tuples are 0,2,3...)
--make sure it's really the _unboxed_ tuples
--being used by putting unboxed values in,
--which are forbidden in boxed tuples
f1 :: Int# -> (# Int# #)
f1 i = (# i #)
-- a space is needed in (# #) so that it's not
-- lexed/parsed as an operator named "##"
--(even though the error message about mismatched
--kinds for "instance Functor (# #)" names the type
--as "(##)"
-- Kind mis-match
-- Expected kind `* -> *', but `(##)' has kind `? -> (#)'
-- In the instance declaration for `Functor (##)'
f1prefix :: Int# -> (# #)
f1prefix i = (# #)
--test that prefix and non-prefix versions
--are the same type by switching the case-argument
f1pat a = case f1prefix a of (# #) -> 1#
f1prepat a = case f1 a of (# i #) -> i +# 1#
f2 :: Int# -> (# Int#, Int# #)
f2 i = (# i, i #)
f2prefix :: Int# -> (#,#) Int# Int#
f2prefix i = (#,#) i i
f2pat a = case f2prefix a of (# i, j #) -> i +# j
f2prepat a = case f2 a of (#,#) i j -> i +# j
f3 :: Int# -> (# Int#, Int#, Int# #)
f3 i = (# i, i, i #)
f3prefix :: Int# -> (#,,#) Int# Int# Int#
f3prefix i = (#,,#) i i i
f3pat a = case f3prefix a of (# i, j, k #) -> i +# j +# k
f3prepat a = case f3 a of (#,,#) i j k -> i +# j +# k
|
urbanslug/ghc
|
testsuite/tests/parser/should_compile/read063.hs
|
bsd-3-clause
| 1,488 | 14 | 14 | 348 | 430 | 231 | 199 | 24 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Strict #-}
-- | Internalising bindings.
module Futhark.Internalise.Bindings
( internaliseAttrs,
internaliseAttr,
bindingFParams,
bindingLoopParams,
bindingLambdaParams,
stmPat,
)
where
import Control.Monad.Reader hiding (mapM)
import Data.Bifunctor
import qualified Data.Map.Strict as M
import Data.Maybe
import qualified Futhark.IR.SOACS as I
import Futhark.Internalise.Monad
import Futhark.Internalise.TypesValues
import Futhark.Util
import Language.Futhark as E hiding (matchDims)
internaliseAttr :: E.AttrInfo VName -> InternaliseM I.Attr
internaliseAttr (E.AttrAtom (E.AtomName v) _) =
pure $ I.AttrName v
internaliseAttr (E.AttrAtom (E.AtomInt x) _) =
pure $ I.AttrInt x
internaliseAttr (E.AttrComp f attrs _) =
I.AttrComp f <$> mapM internaliseAttr attrs
internaliseAttrs :: [E.AttrInfo VName] -> InternaliseM I.Attrs
internaliseAttrs = fmap (mconcat . map I.oneAttr) . mapM internaliseAttr
bindingFParams ::
[E.TypeParam] ->
[E.Pat] ->
([I.FParam I.SOACS] -> [[I.FParam I.SOACS]] -> InternaliseM a) ->
InternaliseM a
bindingFParams tparams params m = do
flattened_params <- mapM flattenPat params
let params_idents = concat flattened_params
params_ts <-
internaliseParamTypes $
map (flip E.setAliases () . E.unInfo . E.identType . fst) params_idents
let num_param_idents = map length flattened_params
num_param_ts = map (sum . map length) $ chunks num_param_idents params_ts
let shape_params = [I.Param mempty v $ I.Prim I.int64 | E.TypeParamDim v _ <- tparams]
shape_subst = M.fromList [(I.paramName p, [I.Var $ I.paramName p]) | p <- shape_params]
bindingFlatPat params_idents (concat params_ts) $ \valueparams -> do
let (certparams, valueparams') = unzip $ map fixAccParam (concat valueparams)
I.localScope (I.scopeOfFParams $ catMaybes certparams ++ shape_params ++ valueparams') $
substitutingVars shape_subst $
m (catMaybes certparams ++ shape_params) $ chunks num_param_ts valueparams'
where
fixAccParam (I.Param attrs pv (I.Acc acc ispace ts u)) =
( Just (I.Param attrs acc $ I.Prim I.Unit),
I.Param attrs pv (I.Acc acc ispace ts u)
)
fixAccParam p = (Nothing, p)
bindingLoopParams ::
[E.TypeParam] ->
E.Pat ->
[I.Type] ->
([I.FParam I.SOACS] -> [I.FParam I.SOACS] -> InternaliseM a) ->
InternaliseM a
bindingLoopParams tparams pat ts m = do
pat_idents <- flattenPat pat
pat_ts <- internaliseLoopParamType (E.patternStructType pat) ts
let shape_params = [I.Param mempty v $ I.Prim I.int64 | E.TypeParamDim v _ <- tparams]
shape_subst = M.fromList [(I.paramName p, [I.Var $ I.paramName p]) | p <- shape_params]
bindingFlatPat pat_idents pat_ts $ \valueparams ->
I.localScope (I.scopeOfFParams $ shape_params ++ concat valueparams) $
substitutingVars shape_subst $ m shape_params $ concat valueparams
bindingLambdaParams ::
[E.Pat] ->
[I.Type] ->
([I.LParam I.SOACS] -> InternaliseM a) ->
InternaliseM a
bindingLambdaParams params ts m = do
params_idents <- concat <$> mapM flattenPat params
bindingFlatPat params_idents ts $ \params' ->
I.localScope (I.scopeOfLParams $ concat params') $ m $ concat params'
processFlatPat ::
Show t =>
[(E.Ident, [E.AttrInfo VName])] ->
[t] ->
InternaliseM ([[I.Param t]], VarSubsts)
processFlatPat x y = processFlatPat' [] x y
where
processFlatPat' pat [] _ = do
let (vs, substs) = unzip pat
return (reverse vs, M.fromList substs)
processFlatPat' pat ((p, attrs) : rest) ts = do
attrs' <- internaliseAttrs attrs
(ps, rest_ts) <- handleMapping attrs' ts <$> internaliseBindee p
processFlatPat' ((ps, (E.identName p, map (I.Var . I.paramName) ps)) : pat) rest rest_ts
handleMapping _ ts [] =
([], ts)
handleMapping attrs (t : ts) (r : rs) =
let (ps, ts') = handleMapping attrs ts rs
in (I.Param attrs r t : ps, ts')
handleMapping _ [] _ =
error $ "handleMapping: insufficient identifiers in pattern." ++ show (x, y)
internaliseBindee :: E.Ident -> InternaliseM [VName]
internaliseBindee bindee = do
let name = E.identName bindee
n <- internalisedTypeSize $ E.unInfo $ E.identType bindee
case n of
1 -> return [name]
_ -> replicateM n $ newVName $ baseString name
bindingFlatPat ::
Show t =>
[(E.Ident, [E.AttrInfo VName])] ->
[t] ->
([[I.Param t]] -> InternaliseM a) ->
InternaliseM a
bindingFlatPat idents ts m = do
(ps, substs) <- processFlatPat idents ts
local (\env -> env {envSubsts = substs `M.union` envSubsts env}) $
m ps
-- | Flatten a pattern. Returns a list of identifiers.
flattenPat :: MonadFreshNames m => E.Pat -> m [(E.Ident, [E.AttrInfo VName])]
flattenPat = flattenPat'
where
flattenPat' (E.PatParens p _) =
flattenPat' p
flattenPat' (E.PatAttr attr p _) =
map (second (attr :)) <$> flattenPat' p
flattenPat' (E.Wildcard t loc) = do
name <- newVName "nameless"
flattenPat' $ E.Id name t loc
flattenPat' (E.Id v (Info t) loc) =
return [(E.Ident v (Info t) loc, mempty)]
-- XXX: treat empty tuples and records as unit.
flattenPat' (E.TuplePat [] loc) =
flattenPat' (E.Wildcard (Info $ E.Scalar $ E.Record mempty) loc)
flattenPat' (E.RecordPat [] loc) =
flattenPat' (E.Wildcard (Info $ E.Scalar $ E.Record mempty) loc)
flattenPat' (E.TuplePat pats _) =
concat <$> mapM flattenPat' pats
flattenPat' (E.RecordPat fs loc) =
flattenPat' $ E.TuplePat (map snd $ sortFields $ M.fromList fs) loc
flattenPat' (E.PatAscription p _ _) =
flattenPat' p
flattenPat' (E.PatLit _ t loc) =
flattenPat' $ E.Wildcard t loc
flattenPat' (E.PatConstr _ _ ps _) =
concat <$> mapM flattenPat' ps
stmPat ::
E.Pat ->
[I.Type] ->
([VName] -> InternaliseM a) ->
InternaliseM a
stmPat pat ts m = do
pat' <- flattenPat pat
bindingFlatPat pat' ts $ m . map I.paramName . concat
|
diku-dk/futhark
|
src/Futhark/Internalise/Bindings.hs
|
isc
| 6,028 | 0 | 19 | 1,284 | 2,322 | 1,165 | 1,157 | 144 | 11 |
{-|
Module : Parser
Copyright : (c) Jakub Šoustar, 2016
License : MIT
Maintainer : Jakub Šoustar <[email protected]>, xsoust02 <[email protected]>
Part of the rv-2-rka (re-2-efa) project.
This module provides lexical and syntactic parsing capabilities.
The scanner converts an input string into list of tokens.
The parser converts a list of tokens into a parse tree.
-}
module Parser
( Operator (..)
, Symbol (..)
, Token (..)
, ParseTree (..)
, parseTree
, tokenize
)
where
import Data.Char
{-|
Represents a recognized operator.
-}
data Operator
= Concatenation -- ^ Concatenation of two regular expressions.
| Alternation -- ^ Alternation of two regular expressions.
| KleeneStar -- ^ Kleene star of regular expression.
deriving Eq
{-|
Represents a recognized symbol.
-}
data Symbol
= Character Char -- ^ Single a-z (lowercase) character.
| Epsilon -- ^ Empty string.
| Empty -- ^ Empty input. Used to handle special case when no input is provided.
deriving (Eq, Ord)
{-|
Represents a lexical token. Token may either be an operator or a symbol.
-}
data Token
= Operator Operator
| Symbol Symbol
deriving Eq
{-|
Represents a parse tree of tokens.
-}
data ParseTree
= Branch Token [ParseTree] -- ^ Branch node with 0-n children.
| Leaf Token -- ^ Leaf node.
{-|
Represents a forest of parse trees. Used for regular expression validation.
-}
type ParseForest = [ParseTree]
instance Show Operator where
show Concatenation = "."
show Alternation = "+"
show KleeneStar = "*"
instance Show Symbol where
show (Character char) = [char]
show Epsilon = ""
show Empty = ""
instance Show Token where
show (Operator operator) = show operator
show (Symbol symbol) = show symbol
{-|
Textual representation of parse tree in postfix notation.
-}
instance Show ParseTree where
show tree = case tree of
(Branch token branches) -> concatMap show branches ++ show token
(Leaf token) -> show token
{-|
Parse a list of tokens into parse tree forest.
-}
parseForest :: [Token] -> ParseForest
parseForest = foldl foldToken []
where
-- Concatenation. Pop two nodes from the stack, combine them into new one, and push it onto the stack.
foldToken (top : top' : stack) (Operator Concatenation) = Branch (Operator Concatenation) [top', top] : stack
-- Alternation. Pop two nodes from the stack, combine them into new one, and push it onto the stack.
foldToken (top : top' : stack) (Operator Alternation) = Branch (Operator Alternation) [top', top] : stack
-- Kleene star. Pop a node from the stack, wrap it with new one, and push it onto the stack.
foldToken (top : stack) (Operator KleeneStar) = Branch (Operator KleeneStar) [top] : stack
-- Character. Create new leaf node and push it onto the stack.
foldToken stack (Symbol (Character token)) = Leaf (Symbol (Character token)) : stack
-- Special case. No input was provided.
foldToken _ (Symbol Empty) = [Leaf (Symbol Empty)]
foldToken _ _ = error "Parser: ParseForest: Invalid Regular Expression."
{-|
Parse a list of tokens into parse tree.
-}
parseTree :: [Token] -> ParseTree
parseTree tokens
-- RE is valid only if tokens can be reduced into a single parse tree.
| length forest == 1 = head forest
-- Trees, trees everywhere!
| otherwise = error "Parser: ParseTree: Incomplete Regular Expression."
where
forest = parseForest tokens
{-|
Convert a string into a list of tokens.
Recognized tokens are:
- Empty string
- Lowercase characters from English alphabet ([a-z])
- Plus sign (+)
- Asterisk (*)
- Dot (.)
-}
tokenize :: String -> [Token]
tokenize string
| not $ null string = map convert string
| otherwise = [Symbol Empty]
where
convert char
| isAsciiLower char = Symbol (Character char)
| char == '.' = Operator Concatenation
| char == '+' = Operator Alternation
| char == '*' = Operator KleeneStar
| otherwise = error $ "Parser: Tokenize: Invalid Symbol " ++ show char ++ "."
|
J4kubS/FLP
|
rv-2-rka/Parser.hs
|
mit
| 4,078 | 16 | 12 | 892 | 805 | 423 | 382 | 64 | 6 |
-----------------------------------------------------------------------------
--
-- Module : Topology
-- Copyright :
-- License : AllRightsReserved
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Topology (
Topology(..)
, adjMatrix
, adjArray
) where
import Data.Array (Array)
import Data.Array.ST (freeze)
import Data.Matrix (Matrix)
import Control.Monad.ST (runST)
import Control.Monad (forM_)
import Utils (newSTArray, fromArray)
import Cluster (Cluster, append, connect)
import Connections (Connections)
import qualified Connections
import qualified Cluster
data Topology = Topology Cluster Connections
adjArray :: Topology -> Array (Int, Int) Int
adjArray (Topology cluster conns) = runST $ do
let clusterNum = Connections.clusterNum conns
let nodesInCluster = Cluster.nodesNum cluster
let nodesNum = clusterNum * nodesInCluster
matrix <- newSTArray ((1, 1), (nodesNum, nodesNum)) 0
mapM_ (append cluster matrix) [0 .. clusterNum - 1]
forM_ (Connections.conns conns) (connect cluster matrix)
freeze matrix
adjMatrix :: Topology -> Matrix Int
adjMatrix = fromArray . adjArray
|
uvNikita/TopologyCalc
|
src/Topology.hs
|
mit
| 1,326 | 0 | 12 | 292 | 324 | 184 | 140 | 26 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module ElasticSearch.Document where
import Control.Lens ((^.))
import Data.Aeson (ToJSON, FromJSON)
import Data.Default
import Data.Text (unpack)
import ElasticSearch.Internal
import ElasticSearch.Types
import ElasticSearch.Document.Types
import URI.TH
import URI.Types
putDocument :: ToJSON a => Index -> Type -> a -> ElasticSearch IndexPutOptions IndexPutResponse
putDocument i t x f = case fmap unpack $ opts ^. id' of
Just mid -> put [uri| /{i}/{t}{/mid}{?opts} |] x
Nothing -> post [uri| /{i}/{t}{?opts} |] x
where
opts :: IndexPutOptions
opts = f def
getDocument :: FromJSON a => Index -> Id -> ElasticSearch IndexGetOptions (Maybe (IndexGetResponse a))
getDocument i ident f = get [uri| /{i}/{mt}/{ident}{?opts} |]
where
mt = maybe "_all" unpack $ opts ^. type'
opts :: IndexGetOptions
opts = f def
deleteDocument :: Index -> Type -> Id -> ElasticSearch IndexDeleteOptions IndexDeleteResponse
deleteDocument i t id f = delete [uri| /{i}/{t}/{id}{?opts} |]
where
opts :: IndexDeleteOptions
opts = f def
updateDocument :: Index -> Type -> Id -> IndexUpdate -> ElasticSearch IndexUpdateOptions IndexUpdateResponse
updateDocument i t id u f = post [uri| /{i}/{t}/{id}/_update{?opts} |] u
where
opts :: IndexUpdateOptions
opts = f def
-- performBulkDocumentActions :: [BulkDocumentOperation] -> ElasticSearchM BulkOperationResponse
-- deleteDocumentsByQuery
|
SaneApp/elastic-search-api
|
src/ElasticSearch/Document.hs
|
mit
| 1,443 | 0 | 12 | 245 | 394 | 215 | 179 | 30 | 2 |
{-# LANGUAGE CPP, ExistentialQuantification, FlexibleInstances, OverloadedStrings, RecordWildCards, TemplateHaskell, TypeSynonymInstances #-}
module Model where
import Data.Aeson
import qualified Data.Aeson as A
import qualified Data.ByteString as B
import Data.List
import Data.Monoid
import qualified Data.Map as M
import Data.Proxy
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Vector as V
import Web.Users.Types
import Model.Internal
mkProxy :: a -> Proxy a
mkProxy _ = Proxy
object' :: [(T.Text, Value)] -> Value
object' = object . filter (not . isDflt . snd)
where
isDflt Null = True
isDflt (Bool b) = not b
isDflt (Array a) = V.null a
isDflt (String a) = T.null a
isDflt _ = False
type Keys = [(T.Text, T.Text)]
data Error = forall e. ToJSON e => UserStorageBackendError e
| InvalidUserError
| NoSuchCommandError
| ParseError String
| NoSharedKeyError
| PubKeyFormatError
| NoPubKeyError
| MissingOptionsError
| SignVerifyError
| AuthError
| AuthKeyError
| AuthPassError
| AuthNeededError
| CouldNotSendMailError
instance ToJSON CreateUserError where
toJSON UsernameOrEmailAlreadyTaken = A.String "create_user_user_or_email_taken"
toJSON InvalidPassword = A.String "create_user_invalid_password"
instance ToJSON UpdateUserError where
toJSON UsernameOrEmailAlreadyExists = A.String "update_user_user_or_email_exists"
toJSON UserDoesntExit = A.String "update_user_user_doesnt_exist"
instance ToJSON TokenError where
toJSON _ = A.String "token"
instance ToJSON Error where
toJSON (UserStorageBackendError e) = object [ "code" .= toJSON e ]
toJSON NoSuchCommandError = object [ "code" .= A.String "no_such_command" ]
toJSON MissingOptionsError = object [ "code" .= A.String "missing_options" ]
toJSON CouldNotSendMailError = object [ "code" .= A.String "could_not_send_mail" ]
#ifdef DEBUG
toJSON InvalidUserError = object [ "code" .= A.String "invalid_user" ]
toJSON (ParseError e) = object [ "code" .= A.String "parse_error", "reason" .= A.String (T.pack e) ]
toJSON NoSharedKeyError = object [ "code" .= A.String "no_shared_key" ]
toJSON PubKeyFormatError = object [ "code" .= A.String "pubkey_format_error" ]
toJSON NoPubKeyError = object [ "code" .= A.String "no_pubkey" ]
toJSON SignVerifyError = object [ "code" .= A.String "verify" ]
toJSON AuthError = object [ "code" .= A.String "auth" ]
toJSON AuthKeyError = object [ "code" .= A.String "auth_key" ]
toJSON AuthPassError = object [ "code" .= A.String "auth_pass" ]
toJSON AuthNeededError = object [ "code" .= A.String "auth_needed" ]
#else
toJSON InvalidUserError = object [ "code" .= A.String "auth" ]
toJSON (ParseError _) = object [ "code" .= A.String "auth" ]
toJSON NoSharedKeyError = object [ "code" .= A.String "auth" ]
toJSON PubKeyFormatError = object [ "code" .= A.String "auth" ]
toJSON NoPubKeyError = object [ "code" .= A.String "auth" ]
toJSON SignVerifyError = object [ "code" .= A.String "auth" ]
toJSON AuthError = object [ "code" .= A.String "auth" ]
toJSON AuthKeyError = object [ "code" .= A.String "auth" ]
toJSON AuthPassError = object [ "code" .= A.String "auth" ]
toJSON AuthNeededError = object [ "code" .= A.String "auth" ]
#endif
data UserData = UserData
{ usrNumber :: Maybe T.Text
, usrSshKeys :: M.Map T.Text T.Text
}
deriveJSON' "usr" ''UserData
data DhRequest = DhRequest { dhReqHash :: T.Text }
data DhCmdRequest = DhCmdRequest { dhClCommand :: T.Text
, dhClOptions :: Keys
, dhClSig :: Maybe (T.Text, T.Text)
} deriving Show
hashCmdRequest :: DhCmdRequest -> B.ByteString
hashCmdRequest DhCmdRequest {..} =
TE.encodeUtf8 $ dhClCommand
<> (T.concat $ sort $ map (uncurry T.append) dhClOptions)
deriveJSON' "dhReq" ''DhRequest
deriveJSON' "dhCl" ''DhCmdRequest
type Response = Either Error Value
response :: Value -> Response
response = Right
responseOk :: Response
responseOk = Right ""
responseFail :: Error -> Response
responseFail = Left
instance ToJSON Response where
toJSON (Right v) = object' [ "status" .= ("ok" :: T.Text)
, "response" .= v ]
toJSON (Left e) = object' [ "status" .= ("error" :: T.Text)
, "error" .= toJSON e ]
|
pkamenarsky/usercfg
|
server/src/Model.hs
|
mit
| 4,973 | 0 | 12 | 1,477 | 1,085 | 573 | 512 | 88 | 5 |
module Rebase.Data.Vector.Fusion.Stream.Monadic
(
module Data.Vector.Fusion.Stream.Monadic
)
where
import Data.Vector.Fusion.Stream.Monadic
|
nikita-volkov/rebase
|
library/Rebase/Data/Vector/Fusion/Stream/Monadic.hs
|
mit
| 143 | 0 | 5 | 12 | 29 | 22 | 7 | 4 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent
import Control.Invariant
import Control.Lens
import Control.Monad
import Data.Functor.Compose
import Data.List
import Data.Monoid ((<>))
import Document.Document hiding (system)
import Document.Tests.SmallMachine as SM
import qualified UnitB.Test as UB
import qualified Latex.Test as LT
import Logic.UnitTest
import qualified Z3.Test as ZT
import qualified Document.Test as DOC
import qualified Utilities.Test as UT
import qualified Code.Test as Code
import qualified Documentation.Test as Sum
import Shelly hiding (time,get)
import Options.Applicative
import System.Directory
import System.Exit
import System.Process
import Test.UnitTest hiding (QuickCheckProps)
import Test.QuickCheck.Lens
import Utilities.TimeIt
import Test.QuickCheck.ZoomEq
selected_test_case :: TestCase
selected_test_case = test_cases
"Selected Literate Unit-B Test Case"
[ SM.test_case ]
test_case :: TestCase
test_case = test_cases
"Literate Unit-B Test Suite"
[ DOC.test_case
, UB.test_case
, LT.test_case
, ZT.test_case
-- , FMT.test_case
, UT.test_case
, Code.test_case
, Sum.test_case
]
data TestSelection = QuickCheckOnly Int | All Int | POCasesOnly
selectTestCase :: Parser TestCase
selectTestCase = flag Main.test_case selected_test_case
( long "selected-case"
<> help "Execute only the hard coded test case selection" )
executionMode :: Parser TestSelection
executionMode =
flag' POCasesOnly
( long "po-only"
<> help "among all the test cases, only run the verification test cases and check the PO" )
<|> ( flag' QuickCheckOnly
( long "quickcheck-only"
<> help "run only the QuickCheck properites." )
<|> pure All)
<*> (option auto
( short 'c'
<> metavar "TEST-COUNT"
<> help "TEST-COUNT specifies the number of examples to test. Default to 100 " )
<|> pure 100)
runSelection :: TestSelection -> TestCase -> IO Bool
runSelection (All n) t = run_test_cases_with t $ argMaxSuccess .= n
runSelection (QuickCheckOnly n) t = run_quickCheck_suite_with t $ argMaxSuccess .= n
runSelection POCasesOnly t = run_poTestSuite t
testScript :: IO Bool
testScript = do
x <- SM.case0
let x' = x & mapped.mapped %~ getCompose
m' = getCompose SM.m0_machine
m = SM.m0_machine
print $ x == Right [SM.m0_machine]
print $ x' == Right [m']
print $ invariantMessage $ x .== Right [m]
print $ invariantMessage $ x' .== Right [m']
return True
parseSelection :: Parser (IO Bool)
parseSelection =
flag' testScript
( long "select-script"
<> help "run hard coded test script" )
<|> runSelection <$> executionMode <*> selectTestCase
trashTestFiles :: IO ()
trashTestFiles = do
xs <- getDirectoryContents "."
setNumCapabilities 8
void $ system "rm actual* expected* po-* log*.z3"
main :: IO ()
main = timeIt $ do
let opts = info (helper <*> parseSelection)
( fullDesc
<> progDesc "Test Literate Unit-B"
<> header "test - the Literate Unit-B test suite" )
writeFile "syntax.txt" $ unlines syntaxSummary
trashTestFiles
-- b <- run_quickCheck_suite_with Main.test_case $ argMaxSuccess .= 1000
-- b <- run_poTestSuite Main.test_case
b <- join $ execParser opts
if b
then do
putStrLn "\n***************"
putStrLn "*** SUCCESS ***"
putStrLn "***************"
exitSuccess
else do
putStrLn "\n***************"
putStrLn "*** FAILURE ***"
putStrLn "***************"
exitFailure
|
literate-unitb/literate-unitb
|
suite/test.hs
|
mit
| 3,849 | 0 | 15 | 1,009 | 870 | 455 | 415 | -1 | -1 |
module Y2017.M10.D13.Solution where
{--
Today, for something COMPLETELY different, ... yet UTTERLY the same ...
We're going to build and then deploy an application.
Take the ETL process you've built over the ... is it a month now? IT'S A MONTH!
... last month and create an application. Run it on the 'command line' or the
shell or as a web service. Use the below referenced filepaths as compressed
archived and load these data sets up to your PostgreSQL data store.
LOOK AT YOU! PRODUCTION READY!
But your application needs a name! What shall you call your app?
Well, obviously: Y2017.M10.D13.Solution, of course!
Let's do this.
--}
import Control.Arrow (second, (&&&))
import Control.Monad (zipWithM_)
import Control.Monad.State
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.Map as Map
import Data.Maybe (catMaybes)
import Data.Time
import Data.Time.Clock
import Database.PostgreSQL.Simple
import System.Environment
-- below imports available via 1HaskellADay git repository
import Data.MemoizingTable (MemoizingTable)
import qualified Data.MemoizingTable as Mem
import Store.SQL.Connection
import Store.SQL.Util.Indexed
import Store.SQL.Util.Inserts (inserter)
import Store.SQL.Util.Pivots (joinValue)
import Y2017.M09.D25.Solution (Article, title, metadata)
import Y2017.M09.D28.Solution (insertArts)
import Y2017.M09.D29.Solution (raw2persons, insertPers, insertArtPersJoinStmt)
import Y2017.M10.D02.Solution
import Y2017.M10.D03.Solution -- subject memoization
main' :: [String] -> IO ()
main' files@(_:_) = withConnection (flip mapM_ files . runETL)
main' [] = putStrLn (unlines ["", "etl <compressed archives>", "",
"\tscans compressed archive and stores in database pointed to in "
++ "environment"])
-- main takes a filepath argument, reads in that compressed archive and stores
-- the reified result into the PostgreSQL database.
runETL :: Connection -> String -> IO ()
runETL conn archive =
getCurrentTime >>= \start ->
putStrLn "ETL process: start." >>
extractBlocks <$> BL.readFile archive >>=
transformLoad conn >>
getCurrentTime >>=
putStrLn . ("ETL process complete: " ++)
. show . flip diffUTCTime start
-- say we need to modify the blocks, e.g.: to replace special characters with
-- ASCII-equivalents:
transformLoad :: Connection -> [Block] -> IO ()
transformLoad conn blocks =
insertBlocks conn blocks >>= \ixblks ->
let articles = join (zipWith block2Article ixblks blocks) in
insertArts conn articles >>= \ixarts ->
let rns = catMaybes (zipWith art2RawNames ixarts articles)
pers = concatMap raw2persons rns in
insertPers conn pers >>= \ixpers ->
inserter conn insertArtPersJoinStmt (zipWith joinValue pers ixpers) >>
fetchSubjects conn >>= \isubs ->
let memtable = Mem.start (map ixsubj2pair isubs)
stat = execState (zipWithM_ getSubjectsMT ixarts articles)
(memtable, Map.empty) in
uploadMT conn (fst stat) >>= \ixsubs ->
let tab = Mem.update (map ixsubj2pair ixsubs) (fst stat)
pivs = evalState buildSubjectPivots (tab, snd stat)
subjs = mt2IxSubjects tab in
insertSubjPivot conn pivs
mt2IxSubjects :: MemoizingTable Integer Subject -> [IxValue String]
mt2IxSubjects = map (uncurry IxV . second subj) . Map.toList . Mem.fromTable
dir :: FilePath
dir = "Y2017/M10/D13/"
sources :: FilePath -> [String] -> [FilePath]
sources dir = map (((dir ++ nyt) ++) . (++ ".txt.gz"))
where nyt = "NYTOnline_08-29-17_09-05-17_pt"
{--
>>> connectInfo
ConnectInfo {connectHost = "...", ...}
>>> conn <- connect it
I separated out runETL so I can sequence ETL operations with one connection:
>>> mapM_ (runETL conn) (sources dir $ words "1 2")
ETL process: start.
ETL process complete: 2.379852s
ETL process: start.
ETL process complete: 2.699108s
>>> close conn
AUTOMATION!
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2017/M10/D13/Solution.hs
|
mit
| 3,996 | 0 | 27 | 802 | 810 | 449 | 361 | 61 | 1 |
-----------------------------------------------------------------------
--
-- Haskell: The Craft of Functional Programming
-- Simon Thompson
-- (c) Addison-Wesley, 1996-2011.
--
-- CalcParseLib.hs
--
-- Library functions for parsing
-- Note that this is not a monadic approach to parsing.
--
-----------------------------------------------------------------------
module CalcParseLib where
import Data.Char
infixr 5 >*>
--
-- The type of parsers.
--
type Parse a b = [a] -> [(b,[a])]
--
-- Some basic parsers
--
--
-- Fail on any input.
--
none :: Parse a b
none inp = []
--
-- Succeed, returning the value supplied.
--
succeed :: b -> Parse a b
succeed val inp = [(val,inp)]
--
-- token t recognises t as the first value in the input.
--
token :: Eq a => a -> Parse a a
token t (x:xs)
| t==x = [(t,xs)]
| otherwise = []
token t [] = []
--
-- spot whether an element with a particular property is the
-- first element of input.
--
spot :: (a -> Bool) -> Parse a a
spot p (x:xs)
| p x = [(x,xs)]
| otherwise = []
spot p [] = []
--
-- Examples.
--
bracket = token '('
dig = spot isDigit
-- Succeeds with value given when the input is empty.
endOfInput :: b -> Parse a b
endOfInput x [] = [(x,[])]
endOfInput x _ = []
--
-- Combining parsers
--
--
-- alt p1 p2 recognises anything recogniseed by p1 or by p2.
--
alt :: Parse a b -> Parse a b -> Parse a b
alt p1 p2 inp = p1 inp ++ p2 inp
exam1 = (bracket `alt` dig) "234"
--
-- Apply one parser then the second to the result(s) of the first.
--
(>*>) :: Parse a b -> Parse a c -> Parse a (b,c)
--
(>*>) p1 p2 inp
= [((y,z),rem2) | (y,rem1) <- p1 inp , (z,rem2) <- p2 rem1 ]
--
-- Transform the results of the parses according to the function.
--
build :: Parse a b -> (b -> c) -> Parse a c
build p f inp = [ (f x,rem) | (x,rem) <- p inp ]
--
-- Recognise a list of objects.
--
--
list :: Parse a b -> Parse a [b]
list p = (succeed [])
`alt`
((p >*> list p) `build` convert)
where
convert = uncurry (:)
--
-- Some variants...
-- A non-empty list of objects.
--
neList :: Parse a b -> Parse a [b]
neList p = (p `build` (:[]))
`alt`
((p >*> list p) `build` (uncurry (:)))
-- Zero or one object.
optional :: Parse a b -> Parse a [b]
optional p = (succeed [])
`alt`
(p `build` (:[]))
-- A given number of objects.
nTimes :: Int -> Parse a b -> Parse a [b]
nTimes 0 p = succeed []
nTimes n p = (p >*> nTimes (n-1) p) `build` (uncurry (:))
--
-- Monadic parsing on top of this library
newtype SParse a b = SParse { sparse :: (Parse a b) }
instance Monad (SParse a) where
return x = SParse (succeed x)
fail s = SParse none
(SParse pr) >>= f
= SParse (\st -> concat [ sparse (f x) rest | (x,rest) <- pr st ])
|
fboyer/craft3e
|
Calculator/CalcParseLib.hs
|
mit
| 2,983 | 0 | 13 | 881 | 1,042 | 585 | 457 | 53 | 1 |
{-# LANGUAGE NoMonomorphismRestriction
#-}
module Plugins.Gallery.Gallery.Manual44 where
import Diagrams.Prelude
circles = (c ||| c) === (c ||| c) where c = circle 1 # fc fuchsia
example = circles # centerXY # view (p2 (-1,-1)) (r2 (1.3, 0.7))
|
andrey013/pluginstest
|
Plugins/Gallery/Gallery/Manual44.hs
|
mit
| 256 | 0 | 10 | 50 | 100 | 56 | 44 | 5 | 1 |
import qualified Data.Map as M
import Helpers (stringToIntList)
digitFactorials :: Integer -> Integer
digitFactorials y = sum [ x | x <- [3..y], isFactorialSum x ]
factorialMap :: Integer -> Maybe Integer
factorialMap x = M.lookup (fromInteger x) m
where m = M.fromList $ (0, 1) : [ (x, product [1..x]) | x <- [1..9]]
isFactorialSum :: Integer -> Bool
isFactorialSum x = let l = map factorialMap $ stringToIntList $ show x
in case (Just (x==) <*> (sum <$> sequence l)) of
Just(y) -> y
_ -> False
-- digitFactorials 50000 == 40730
|
samidarko/euler
|
problem034.hs
|
mit
| 606 | 0 | 12 | 170 | 235 | 124 | 111 | 12 | 2 |
{-# htermination compare :: (Ratio Int) -> (Ratio Int) -> Ordering #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_compare_7.hs
|
mit
| 71 | 0 | 2 | 12 | 3 | 2 | 1 | 1 | 0 |
module Brainfuck.Interpreter (eval_naive,InterpreterResult(..)) where
import Control.Monad.Trans.State
import Brainfuck.Types
import Data.Word
import Data.Int
data InterpreterResult = InterpreterResult {
ir_stdout :: [Word8]
,ir_tape :: ([Word8], Word8, [Word8])
} deriving (Show)
eval_naive :: Program -> [Word8] -> InterpreterResult
eval_naive prog input = snd $ execState (mapM_ go prog) (input, InterpreterResult [] (repeat 0, 0, repeat 0))
go :: BF -> State ([Word8], InterpreterResult) ()
go (ValInc inc) = modify (\(inp, InterpreterResult stdout (back, curr, next)) ->
(inp, InterpreterResult stdout (back, curr + inc, next)))
go (PtrInc off) = modify (\(inp, InterpreterResult stdout tape) ->
(inp, InterpreterResult stdout (move (fromIntegral off) tape)))
go IOOutput = modify (\(inp, InterpreterResult stdout (back, curr, next)) ->
(inp, InterpreterResult (curr : stdout) (back, curr, next)))
go IORead = modify (\(inp, InterpreterResult stdout (back, curr, next)) ->
case inp of
[] -> ([], InterpreterResult stdout (back, 0, next))
(x : xs) -> (xs, InterpreterResult stdout (back, x, next)))
go (While prog) = do (_, InterpreterResult _ (_, curr, _)) <- get
if curr == 0
then return ()
else mapM_ go prog >> go (While prog)
move :: Int -> ([Word8], Word8, [Word8]) -> ([Word8], Word8, [Word8])
move off (back, curr, next)
| off > 0 = let (new_curr:new_next) = drop off (curr : next)
new_back = take off (curr : next)
in (reverse new_back ++ back, new_curr, new_next)
| off == 0 = (back , curr, next)
| off < 0 = let (a,b,c) = move (-off) (next, curr, back)
in (c,b,a)
test_bf_prog =
[ValInc 10
,While [
ValInc (-1)
,PtrInc 1
,ValInc 1
,PtrInc 1
,ValInc 5
,PtrInc (-2)
]
]
|
EXio4/brixy
|
src/Brainfuck/Interpreter.hs
|
mit
| 2,200 | 0 | 13 | 781 | 846 | 469 | 377 | 43 | 3 |
module CR.Util.Aeson
( aesonOpts
)
where
import Data.Aeson.Types
import Data.Char
aesonOpts :: Int -> Options
aesonOpts dropCount =
defaultOptions
{ fieldLabelModifier = drop dropCount
, constructorTagModifier = camelTo2 '_'
, allNullaryToStringTag = True
, omitNothingFields = False
, sumEncoding = ObjectWithSingleField
}
|
agrafix/compile-registry
|
src/CR/Util/Aeson.hs
|
mit
| 362 | 0 | 7 | 80 | 78 | 47 | 31 | 12 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.MimeTypeArray
(item, item_, itemUnsafe, itemUnchecked, namedItem, namedItem_,
namedItemUnsafe, namedItemUnchecked, getLength, MimeTypeArray(..),
gTypeMimeTypeArray)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.item Mozilla MimeTypeArray.item documentation>
item :: (MonadDOM m) => MimeTypeArray -> Word -> m (Maybe MimeType)
item self index
= liftDOM ((self ^. jsf "item" [toJSVal index]) >>= fromJSVal)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.item Mozilla MimeTypeArray.item documentation>
item_ :: (MonadDOM m) => MimeTypeArray -> Word -> m ()
item_ self index
= liftDOM (void (self ^. jsf "item" [toJSVal index]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.item Mozilla MimeTypeArray.item documentation>
itemUnsafe ::
(MonadDOM m, HasCallStack) => MimeTypeArray -> Word -> m MimeType
itemUnsafe self index
= liftDOM
(((self ^. jsf "item" [toJSVal index]) >>= fromJSVal) >>=
maybe (Prelude.error "Nothing to return") return)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.item Mozilla MimeTypeArray.item documentation>
itemUnchecked ::
(MonadDOM m) => MimeTypeArray -> Word -> m MimeType
itemUnchecked self index
= liftDOM
((self ^. jsf "item" [toJSVal index]) >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.namedItem Mozilla MimeTypeArray.namedItem documentation>
namedItem ::
(MonadDOM m, ToJSString name) =>
MimeTypeArray -> name -> m (Maybe MimeType)
namedItem self name = liftDOM ((self ! name) >>= fromJSVal)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.namedItem Mozilla MimeTypeArray.namedItem documentation>
namedItem_ ::
(MonadDOM m, ToJSString name) => MimeTypeArray -> name -> m ()
namedItem_ self name = liftDOM (void (self ! name))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.namedItem Mozilla MimeTypeArray.namedItem documentation>
namedItemUnsafe ::
(MonadDOM m, ToJSString name, HasCallStack) =>
MimeTypeArray -> name -> m MimeType
namedItemUnsafe self name
= liftDOM
(((self ! name) >>= fromJSVal) >>=
maybe (Prelude.error "Nothing to return") return)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.namedItem Mozilla MimeTypeArray.namedItem documentation>
namedItemUnchecked ::
(MonadDOM m, ToJSString name) =>
MimeTypeArray -> name -> m MimeType
namedItemUnchecked self name
= liftDOM ((self ! name) >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MimeTypeArray.length Mozilla MimeTypeArray.length documentation>
getLength :: (MonadDOM m) => MimeTypeArray -> m Word
getLength self
= liftDOM (round <$> ((self ^. js "length") >>= valToNumber))
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/MimeTypeArray.hs
|
mit
| 3,886 | 0 | 14 | 619 | 930 | 530 | 400 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Codec.Xlsx.Types.Common
( CellRef(..)
, singleCellRef
, fromSingleCellRef
, fromSingleCellRefNoting
, Range
, mkRange
, fromRange
, SqRef(..)
, XlsxText(..)
, xlsxTextToCellValue
, Formula(..)
, CellValue(..)
, ErrorType(..)
, DateBase(..)
, dateFromNumber
, dateToNumber
, int2col
, col2int
) where
import GHC.Generics (Generic)
import Control.Arrow
import Control.DeepSeq (NFData)
import Control.Monad (forM, guard)
import qualified Data.ByteString as BS
import Data.Char
import Data.Ix (inRange)
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time.Calendar (Day, addDays, diffDays, fromGregorian)
import Data.Time.Clock (UTCTime(UTCTime), picosecondsToDiffTime)
import Safe
import Text.XML
import Text.XML.Cursor
import Codec.Xlsx.Parser.Internal
import Codec.Xlsx.Types.RichText
import Codec.Xlsx.Writer.Internal
-- | convert column number (starting from 1) to its textual form (e.g. 3 -> \"C\")
int2col :: Int -> Text
int2col = T.pack . reverse . map int2let . base26
where
int2let 0 = 'Z'
int2let x = chr $ (x - 1) + ord 'A'
base26 0 = []
base26 i = let i' = (i `mod` 26)
i'' = if i' == 0 then 26 else i'
in seq i' (i' : base26 ((i - i'') `div` 26))
-- | reverse to 'int2col'
col2int :: Text -> Int
col2int = T.foldl' (\i c -> i * 26 + let2int c) 0
where
let2int c = 1 + ord c - ord 'A'
-- | Excel cell or cell range reference (e.g. @E3@)
-- See 18.18.62 @ST_Ref@ (p. 2482)
newtype CellRef = CellRef
{ unCellRef :: Text
} deriving (Eq, Ord, Show, Generic)
instance NFData CellRef
-- | Render position in @(row, col)@ format to an Excel reference.
--
-- > mkCellRef (2, 4) == "D2"
singleCellRef :: (Int, Int) -> CellRef
singleCellRef = CellRef . singleCellRefRaw
singleCellRefRaw :: (Int, Int) -> Text
singleCellRefRaw (row, col) = T.concat [int2col col, T.pack (show row)]
-- | reverse to 'mkCellRef'
fromSingleCellRef :: CellRef -> Maybe (Int, Int)
fromSingleCellRef = fromSingleCellRefRaw . unCellRef
fromSingleCellRefRaw :: Text -> Maybe (Int, Int)
fromSingleCellRefRaw t = do
let (colT, rowT) = T.span (inRange ('A', 'Z')) t
guard $ not (T.null colT) && not (T.null rowT) && T.all isDigit rowT
row <- decimal rowT
return (row, col2int colT)
-- | reverse to 'mkCellRef' expecting valid reference and failig with
-- a standard error message like /"Bad cell reference 'XXX'"/
fromSingleCellRefNoting :: CellRef -> (Int, Int)
fromSingleCellRefNoting ref = fromJustNote errMsg $ fromSingleCellRefRaw txt
where
txt = unCellRef ref
errMsg = "Bad cell reference '" ++ T.unpack txt ++ "'"
-- | Excel range (e.g. @D13:H14@), actually store as as 'CellRef' in
-- xlsx
type Range = CellRef
-- | Render range
--
-- > mkRange (2, 4) (6, 8) == "D2:H6"
mkRange :: (Int, Int) -> (Int, Int) -> Range
mkRange fr to = CellRef $ T.concat [singleCellRefRaw fr, T.pack ":", singleCellRefRaw to]
-- | reverse to 'mkRange'
fromRange :: Range -> Maybe ((Int, Int), (Int, Int))
fromRange r =
case T.split (== ':') (unCellRef r) of
[from, to] -> (,) <$> fromSingleCellRefRaw from <*> fromSingleCellRefRaw to
_ -> Nothing
-- | A sequence of cell references
--
-- See 18.18.76 "ST_Sqref (Reference Sequence)" (p.2488)
newtype SqRef = SqRef [CellRef]
deriving (Eq, Ord, Show, Generic)
instance NFData SqRef
-- | Common type containing either simple string or rich formatted text.
-- Used in @si@, @comment@ and @is@ elements
--
-- E.g. @si@ spec says: "If the string is just a simple string with formatting applied
-- at the cell level, then the String Item (si) should contain a single text
-- element used to express the string. However, if the string in the cell is
-- more complex - i.e., has formatting applied at the character level - then the
-- string item shall consist of multiple rich text runs which collectively are
-- used to express the string.". So we have either a single "Text" field, or
-- else a list of "RichTextRun"s, each of which is some "Text" with layout
-- properties.
--
-- TODO: Currently we do not support @phoneticPr@ (Phonetic Properties, 18.4.3,
-- p. 1723) or @rPh@ (Phonetic Run, 18.4.6, p. 1725).
--
-- Section 18.4.8, "si (String Item)" (p. 1725)
--
-- See @CT_Rst@, p. 3903
data XlsxText = XlsxText Text
| XlsxRichText [RichTextRun]
deriving (Eq, Ord, Show, Generic)
instance NFData XlsxText
xlsxTextToCellValue :: XlsxText -> CellValue
xlsxTextToCellValue (XlsxText txt) = CellText txt
xlsxTextToCellValue (XlsxRichText rich) = CellRich rich
-- | A formula
--
-- See 18.18.35 "ST_Formula (Formula)" (p. 2457)
newtype Formula = Formula {unFormula :: Text}
deriving (Eq, Ord, Show, Generic)
instance NFData Formula
-- | Cell values include text, numbers and booleans,
-- standard includes date format also but actually dates
-- are represented by numbers with a date format assigned
-- to a cell containing it
data CellValue
= CellText Text
| CellDouble Double
| CellBool Bool
| CellRich [RichTextRun]
| CellError ErrorType
deriving (Eq, Ord, Show, Generic)
instance NFData CellValue
-- | The evaluation of an expression can result in an error having one
-- of a number of error values.
--
-- See Annex L, L.2.16.8 "Error values" (p. 4764)
data ErrorType
= ErrorDiv0
-- ^ @#DIV/0!@ - Intended to indicate when any number, including
-- zero, is divided by zero.
| ErrorNA
-- ^ @#N/A@ - Intended to indicate when a designated value is not
-- available. For example, some functions, such as @SUMX2MY2@,
-- perform a series of operations on corresponding elements in two
-- arrays. If those arrays do not have the same number of elements,
-- then for some elements in the longer array, there are no
-- corresponding elements in the shorter one; that is, one or more
-- values in the shorter array are not available. This error value
-- can be produced by calling the function @NA@.
| ErrorName
-- ^ @#NAME?@ - Intended to indicate when what looks like a name is
-- used, but no such name has been defined. For example, @XYZ/3@,
-- where @XYZ@ is not a defined name. @Total is & A10@, where
-- neither @Total@ nor @is@ is a defined name. Presumably, @"Total
-- is " & A10@ was intended. @SUM(A1C10)@, where the range @A1:C10@
-- was intended.
| ErrorNull
-- ^ @#NULL!@ - Intended to indicate when two areas are required to
-- intersect, but do not. For example, In the case of @SUM(B1 C1)@,
-- the space between @B1@ and @C1@ is treated as the binary
-- intersection operator, when a comma was intended.
| ErrorNum
-- ^ @#NUM!@ - Intended to indicate when an argument to a function
-- has a compatible type, but has a value that is outside the domain
-- over which that function is defined. (This is known as a domain
-- error.) For example, Certain calls to @ASIN@, @ATANH@, @FACT@,
-- and @SQRT@ might result in domain errors. Intended to indicate
-- that the result of a function cannot be represented in a value of
-- the specified type, typically due to extreme magnitude. (This is
-- known as a range error.) For example, @FACT(1000)@ might result
-- in a range error.
| ErrorRef
-- ^ @#REF!@ - Intended to indicate when a cell reference is
-- invalid. For example, If a formula contains a reference to a
-- cell, and then the row or column containing that cell is deleted,
-- a @#REF!@ error results. If a worksheet does not support 20,001
-- columns, @OFFSET(A1,0,20000)@ results in a @#REF!@ error.
| ErrorValue
-- ^ @#VALUE!@ - Intended to indicate when an incompatible type
-- argument is passed to a function, or an incompatible type operand
-- is used with an operator. For example, In the case of a function
-- argument, a number was expected, but text was provided. In the
-- case of @1+"ABC"@, the binary addition operator is not defined for
-- text.
deriving (Eq, Ord, Show, Generic)
instance NFData ErrorType
-- | Specifies date base used for conversion of serial values to and
-- from datetime values
--
-- See Annex L, L.2.16.9.1 "Date Conversion for Serial Values" (p. 4765)
data DateBase
= DateBase1900
-- ^ 1900 date base system, the lower limit is January 1, -9999
-- 00:00:00, which has serial value -4346018. The upper-limit is
-- December 31, 9999, 23:59:59, which has serial value
-- 2,958,465.9999884. The base date for this date base system is
-- December 30, 1899, which has a serial value of 0.
| DateBase1904
-- ^ 1904 backward compatibility date-base system, the lower limit
-- is January 1, 1904, 00:00:00, which has serial value 0. The upper
-- limit is December 31, 9999, 23:59:59, which has serial value
-- 2,957,003.9999884. The base date for this date base system is
-- January 1, 1904, which has a serial value of 0.
deriving (Eq, Show, Generic)
instance NFData DateBase
baseDate :: DateBase -> Day
baseDate DateBase1900 = fromGregorian 1899 12 30
baseDate DateBase1904 = fromGregorian 1904 1 1
-- | Converts serial value into datetime according to the specified
-- date base. Because Excel treats 1900 as a leap year even though it isn't,
-- this function converts any numbers that represent some time in /1900-02-29/
-- in Excel to `UTCTime` /1900-03-01 00:00/.
-- See https://docs.microsoft.com/en-gb/office/troubleshoot/excel/wrongly-assumes-1900-is-leap-year for details.
--
-- > show (dateFromNumber DateBase1900 42929.75) == "2017-07-13 18:00:00 UTC"
-- > show (dateFromNumber DateBase1900 60) == "1900-03-01 00:00:00 UTC"
-- > show (dateFromNumber DateBase1900 61) == "1900-03-01 00:00:00 UTC"
dateFromNumber :: forall t. RealFrac t => DateBase -> t -> UTCTime
dateFromNumber b d
-- 60 is Excel's 2020-02-29 00:00 and 61 is Excel's 2020-03-01
| b == DateBase1900 && d < 60 = getUTCTime (d + 1)
| b == DateBase1900 && d >= 60 && d < 61 = getUTCTime (61 :: t)
| otherwise = getUTCTime d
where
getUTCTime n =
let
(numberOfDays, fractionOfOneDay) = properFraction n
day = addDays numberOfDays $ baseDate b
diffTime = picosecondsToDiffTime (round (fractionOfOneDay * 24*60*60*1E12))
in
UTCTime day diffTime
-- | Converts datetime into serial value.
-- Because Excel treats 1900 as a leap year even though it isn't,
-- the numbers that represent times in /1900-02-29/ in Excel, in the range /[60, 61[/,
-- are never generated by this function for `DateBase1900`. This means that
-- under those conditions this is not an inverse of `dateFromNumber`.
-- See https://docs.microsoft.com/en-gb/office/troubleshoot/excel/wrongly-assumes-1900-is-leap-year for details.
dateToNumber :: Fractional a => DateBase -> UTCTime -> a
dateToNumber b (UTCTime day diffTime) = numberOfDays + fractionOfOneDay
where
numberOfDays = fromIntegral (diffDays excel1900CorrectedDay $ baseDate b)
fractionOfOneDay = realToFrac diffTime / (24 * 60 * 60)
marchFirst1900 = fromGregorian 1900 3 1
excel1900CorrectedDay = if day < marchFirst1900
then addDays (-1) day
else day
{-------------------------------------------------------------------------------
Parsing
-------------------------------------------------------------------------------}
-- | See @CT_Rst@, p. 3903
instance FromCursor XlsxText where
fromCursor cur = do
let
ts = cur $/ element (n_ "t") >=> contentOrEmpty
rs = cur $/ element (n_ "r") >=> fromCursor
case (ts,rs) of
([t], []) ->
return $ XlsxText t
([], _:_) ->
return $ XlsxRichText rs
_ ->
fail "invalid item"
instance FromXenoNode XlsxText where
fromXenoNode root = do
(mCh, rs) <-
collectChildren root $ (,) <$> maybeChild "t" <*> fromChildList "r"
mT <- mapM contentX mCh
case mT of
Just t -> return $ XlsxText t
Nothing ->
case rs of
[] -> Left $ "missing rich text subelements"
_ -> return $ XlsxRichText rs
instance FromAttrVal CellRef where
fromAttrVal = fmap (first CellRef) . fromAttrVal
instance FromAttrBs CellRef where
-- we presume that cell references contain only latin letters,
-- numbers and colon
fromAttrBs = pure . CellRef . T.decodeLatin1
instance FromAttrVal SqRef where
fromAttrVal t = do
rs <- mapM (fmap fst . fromAttrVal) $ T.split (== ' ') t
readSuccess $ SqRef rs
instance FromAttrBs SqRef where
fromAttrBs bs = do
-- split on space
rs <- forM (BS.split 32 bs) fromAttrBs
return $ SqRef rs
-- | See @ST_Formula@, p. 3873
instance FromCursor Formula where
fromCursor cur = [Formula . T.concat $ cur $/ content]
instance FromXenoNode Formula where
fromXenoNode = fmap Formula . contentX
instance FromAttrVal Formula where
fromAttrVal t = readSuccess $ Formula t
instance FromAttrBs Formula where
fromAttrBs = fmap Formula . fromAttrBs
instance FromAttrVal ErrorType where
fromAttrVal "#DIV/0!" = readSuccess ErrorDiv0
fromAttrVal "#N/A" = readSuccess ErrorNA
fromAttrVal "#NAME?" = readSuccess ErrorName
fromAttrVal "#NULL!" = readSuccess ErrorNull
fromAttrVal "#NUM!" = readSuccess ErrorNum
fromAttrVal "#REF!" = readSuccess ErrorRef
fromAttrVal "#VALUE!" = readSuccess ErrorValue
fromAttrVal t = invalidText "ErrorType" t
instance FromAttrBs ErrorType where
fromAttrBs "#DIV/0!" = return ErrorDiv0
fromAttrBs "#N/A" = return ErrorNA
fromAttrBs "#NAME?" = return ErrorName
fromAttrBs "#NULL!" = return ErrorNull
fromAttrBs "#NUM!" = return ErrorNum
fromAttrBs "#REF!" = return ErrorRef
fromAttrBs "#VALUE!" = return ErrorValue
fromAttrBs x = unexpectedAttrBs "ErrorType" x
{-------------------------------------------------------------------------------
Rendering
-------------------------------------------------------------------------------}
-- | See @CT_Rst@, p. 3903
instance ToElement XlsxText where
toElement nm si = Element {
elementName = nm
, elementAttributes = Map.empty
, elementNodes = map NodeElement $
case si of
XlsxText text -> [elementContent "t" text]
XlsxRichText rich -> map (toElement "r") rich
}
instance ToAttrVal CellRef where
toAttrVal = toAttrVal . unCellRef
-- See 18.18.76, "ST_Sqref (Reference Sequence)", p. 2488.
instance ToAttrVal SqRef where
toAttrVal (SqRef refs) = T.intercalate " " $ map toAttrVal refs
-- | See @ST_Formula@, p. 3873
instance ToElement Formula where
toElement nm (Formula txt) = elementContent nm txt
instance ToAttrVal ErrorType where
toAttrVal ErrorDiv0 = "#DIV/0!"
toAttrVal ErrorNA = "#N/A"
toAttrVal ErrorName = "#NAME?"
toAttrVal ErrorNull = "#NULL!"
toAttrVal ErrorNum = "#NUM!"
toAttrVal ErrorRef = "#REF!"
toAttrVal ErrorValue = "#VALUE!"
|
qrilka/xlsx
|
src/Codec/Xlsx/Types/Common.hs
|
mit
| 14,997 | 0 | 18 | 3,057 | 2,718 | 1,485 | 1,233 | 221 | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.