code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Main (
main
) where
import Criterion.Config
import Criterion.Main
import System.Random
import qualified Signal.Wavelet.C1Bench as C1
import qualified Signal.Wavelet.Eval.CommonBench as EC
import qualified Signal.Wavelet.Eval1Bench as E1
import qualified Signal.Wavelet.Eval2Bench as E2
import qualified Signal.Wavelet.List.CommonBench as LC
import qualified Signal.Wavelet.List1Bench as L1
import qualified Signal.Wavelet.List2Bench as L2
import qualified Signal.Wavelet.Repa1Bench as R1
import qualified Signal.Wavelet.Repa2Bench as R2
import qualified Signal.Wavelet.Repa3Bench as R3
import qualified Signal.Wavelet.Vector1Bench as V1
import qualified Signal.Wavelet.Repa.LibraryBench as RL
main :: IO ()
main = return (mkStdGen 1232134332) >>=
defaultMainWith benchConfig (return ()) . benchmarks
benchmarks :: RandomGen g => g -> [Benchmark]
benchmarks gen =
let lsSize = 6
sigSize = 2 * 8192
lDataDwt = L1.dataDwt gen lsSize sigSize
cDataDwt = C1.dataDwt lDataDwt
rDataDwt = R1.dataDwt lDataDwt
vDataDwt = V1.dataDwt lDataDwt
cDataLattice = C1.dataLattice lDataDwt
vDataLattice = V1.dataLattice lDataDwt
rDataLattice = R1.dataLattice lDataDwt
rDataToPairs = R1.dataToPairs lDataDwt
rDataFromPairs = R1.dataFromPairs lDataDwt
rDataCslCsr = R1.dataCslCsr lDataDwt
rDataCslNCsrN = R1.dataCslNCsrN lDataDwt
rDataExtend = R2.dataExtend lDataDwt
r3DataLattice = R3.dataLattice lDataDwt
lDataLattice = LC.dataLattice lDataDwt
lDataExtend = LC.dataExtend lDataDwt
rDataCompute = RL.dataCompute lDataDwt
rDataCopy = RL.dataCopy lDataDwt
rDataExtract = RL.dataExtract lDataDwt
rDataAppend = RL.dataAppend lDataDwt
rDataBckperm = RL.dataBckperm lDataDwt
rDataMap = RL.dataMap lDataDwt
rDataTraverse = RL.dataTraverse lDataDwt
in [ -- See: Note [C/FFI criterion bug]
bgroup "DWT" . (:[]) $ bcompare
[
bench "C1 Seq" $ whnf C1.benchDwt cDataDwt
, bench "Vector1 Seq" $ whnf V1.benchDwt vDataDwt
, bench "Repa1 Seq" $ whnf R1.benchDwtS rDataDwt
, bench "Repa1 Par" $ whnf R1.benchDwtP rDataDwt
, bench "Repa2 Seq" $ whnf R2.benchDwtS rDataDwt
, bench "Repa2 Par" $ whnf R2.benchDwtP rDataDwt
, bench "Repa3 Seq" $ whnf R3.benchDwtS rDataDwt
, bench "Repa3 Par" $ whnf R3.benchDwtP rDataDwt
, bench "List1 Seq" $ nf L1.benchDwt lDataDwt
, bench "List2 Seq" $ nf L2.benchDwt lDataDwt
, bench "Eval1 Par" $ nf E1.benchDwt lDataDwt
, bench "Eval2 Par" $ nf E2.benchDwt lDataDwt
]
, bgroup "IDWT" . (:[]) $ bcompare
[
bench "C1 Seq" $ whnf C1.benchIdwt cDataDwt
, bench "Vector1 Seq" $ whnf V1.benchIdwt vDataDwt
, bench "Repa1 Seq" $ whnf R1.benchIdwtS rDataDwt
, bench "Repa1 Par" $ whnf R1.benchIdwtP rDataDwt
, bench "Repa2 Seq" $ whnf R2.benchIdwtS rDataDwt
, bench "Repa2 Par" $ whnf R2.benchIdwtP rDataDwt
, bench "Repa3 Seq" $ whnf R3.benchIdwtS rDataDwt
, bench "Repa3 Par" $ whnf R3.benchIdwtP rDataDwt
, bench "List1 Seq" $ nf L1.benchIdwt lDataDwt
, bench "List2 Seq" $ nf L2.benchIdwt lDataDwt
, bench "Eval1 Par" $ nf E1.benchIdwt lDataDwt
, bench "Eval2 Par" $ nf E2.benchIdwt lDataDwt
]
, bgroup "C1"
[
bench "Lattice Seq" $ whnf C1.benchLattice cDataLattice
]
, bgroup "Vector1"
[
bench "Lattice Seq" $ whnf V1.benchLattice vDataLattice
]
, bgroup "Repa1"
[
bench "Lattice Seq" $ whnf R1.benchLatticeS rDataLattice
, bench "Lattice Par" $ whnf R1.benchLatticeP rDataLattice
, bench "ToPairs Seq" $ whnf R1.benchToPairsS rDataToPairs
, bench "ToPairs Par" $ whnf R1.benchToPairsP rDataToPairs
, bench "FromPairs Seq" $ whnf R1.benchFromPairsS rDataFromPairs
, bench "FromPairs Par" $ whnf R1.benchFromPairsP rDataFromPairs
, bench "Csl Seq" $ whnf R1.benchCslS rDataCslCsr
, bench "Csl Par" $ whnf R1.benchCslP rDataCslCsr
, bench "CslP Seq" $ whnf R1.benchCslSP rDataCslCsr
, bench "CslP Par" $ whnf R1.benchCslPP rDataCslCsr
, bench "Csr Seq" $ whnf R1.benchCsrS rDataCslCsr
, bench "Csr Par" $ whnf R1.benchCsrP rDataCslCsr
, bench "CsrP Seq" $ whnf R1.benchCsrSP rDataCslCsr
, bench "CsrP Par" $ whnf R1.benchCsrPP rDataCslCsr
, bench "CslN Seq" $ whnf R1.benchCslNS rDataCslNCsrN
, bench "CslN Par" $ whnf R1.benchCslNP rDataCslNCsrN
, bench "CsrN Seq" $ whnf R1.benchCsrNS rDataCslNCsrN
, bench "CsrN Par" $ whnf R1.benchCsrNP rDataCslNCsrN
, bench "Lat+Frc+Csl Seq" $ whnf R1.benchLatticeForceCslS rDataLattice
, bench "Lat+Frc+Csl Par" $ whnf R1.benchLatticeForceCslP rDataLattice
, bench "Lattice+Csl Seq" $ whnf R1.benchLatticeCslS rDataLattice
, bench "Lattice+Csl Par" $ whnf R1.benchLatticeCslP rDataLattice
]
, bgroup "Repa2"
[
bench "Lattice Seq" $ whnf R2.benchLatticeS rDataLattice
, bench "Lattice Par" $ whnf R2.benchLatticeP rDataLattice
, bench "Trim+lattice Seq"$ whnf R2.benchTrimLatticeS rDataLattice
, bench "Trim+lattice Par"$ whnf R2.benchTrimLatticeP rDataLattice
, bench "ExtendFront Seq" $ whnf R2.benchExtendFrontS rDataExtend
, bench "ExtendFront Par" $ whnf R2.benchExtendFrontP rDataExtend
, bench "ExtendEnd Seq" $ whnf R2.benchExtendEndS rDataExtend
, bench "ExtendEnd Par" $ whnf R2.benchExtendEndP rDataExtend
]
, bgroup "Repa3"
[
bench "Lattice Seq" $ whnf R3.benchLatticeS r3DataLattice
, bench "Lattice Par" $ whnf R3.benchLatticeP r3DataLattice
]
, bgroup "List.Common"
[
bench "Lattice Seq" $ nf LC.benchLattice lDataLattice
, bench "ExtendFront Seq" $ nf LC.benchExtendFront lDataExtend
, bench "ExtendEnd Seq" $ nf LC.benchExtendEnd lDataExtend
]
, bgroup "Eval.Common"
[
bench "Lattice Par" $ nf EC.benchLattice lDataLattice
]
, bgroup "Repa built-in functions"
[
bench "computeS" $ whnf RL.benchComputeS rDataCompute
, bench "computeP" $ whnfIO (RL.benchComputeP rDataCompute)
, bench "copyS" $ whnf RL.benchCopyS rDataCopy
, bench "copyP" $ whnfIO (RL.benchCopyP rDataCopy)
, bench "extractS" $ whnf RL.benchExtractS rDataExtract
, bench "extractP" $ whnfIO (RL.benchExtractP rDataExtract)
, bench "appendS" $ whnf RL.benchAppendS rDataAppend
, bench "appendP" $ whnfIO (RL.benchAppendP rDataAppend)
, bench "backpermuteS" $ whnf RL.benchBckpermS rDataBckperm
, bench "backpermuteP" $ whnfIO (RL.benchBckpermP rDataBckperm)
, bench "mapS" $ whnf RL.benchMapS rDataMap
, bench "mapP" $ whnfIO (RL.benchMapP rDataMap)
, bench "traverseS" $ whnf RL.benchTraverseS rDataTraverse
, bench "traverseP" $ whnfIO (RL.benchTraverseP rDataTraverse)
]
]
benchConfig :: Config
benchConfig = defaultConfig {
cfgPerformGC = ljust True
}
-- Note [C/FFI criterion bug]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- When benchmarking C bindings with criterion the first benchmark returns
-- correct result. All other benchmarks that use FFI estimate run time to be
-- longer. This does not happen always and seems to depend on CPU and size of
-- processed data. These are possibly cache effects. This bug does not occur on
-- some machines. If you observe any of below it means your results are affected
-- by the bug:
--
-- a) time needed to run IDWT/C1 benchmark is significantly longer than DWT/C1
-- b) C1/Lattice takes longer than Vector1/Lattice
| jstolarek/lattice-structure-hs | bench/MainBenchmarkSuite.hs | bsd-3-clause | 8,573 | 0 | 14 | 2,655 | 1,915 | 963 | 952 | 146 | 1 |
{- PiForall language, OPLSS, Summer 2013 -}
{-# LANGUAGE TypeSynonymInstances,ExistentialQuantification,FlexibleInstances, UndecidableInstances, FlexibleContexts,
ViewPatterns, DefaultSignatures
#-}
{-# OPTIONS_GHC -Wall -fno-warn-unused-matches -fno-warn-name-shadowing #-}
-- | A Pretty Printer.
module PrettyPrint(Disp(..), D(..)) where
import Syntax
import Unbound.LocallyNameless hiding (empty,Data,Refl)
import Unbound.LocallyNameless.Alpha
import Unbound.LocallyNameless.Ops
import Control.Monad.Identity
import Control.Monad.Reader
import Text.PrettyPrint as PP
import Text.ParserCombinators.Parsec.Pos (SourcePos, sourceName, sourceLine, sourceColumn)
import Text.ParserCombinators.Parsec.Error (ParseError)
import Control.Applicative ((<$>), (<*>))
import qualified Data.Set as S
-- | The 'Disp' class governs types which can be turned into 'Doc's
class Disp d where
disp :: d -> Doc
default disp :: (Display d, Alpha d) => d -> Doc
disp = cleverDisp
-- This function tries to pretty-print terms using the lowest number in
-- the names of the variable (i.e. as close to what the user originally
-- wrote.)
cleverDisp :: (Display d, Alpha d) => d -> Doc
cleverDisp d =
runIdentity (runReaderT (display d) initDI)
instance Disp Term
instance Rep a => Disp (Name a)
instance Disp Telescope
instance Disp Pattern
instance Disp Match
instance Disp String where
disp = text
instance Disp Int where
disp = text . show
instance Disp Integer where
disp = text . show
instance Disp Double where
disp = text . show
instance Disp Float where
disp = text . show
instance Disp Char where
disp = text . show
instance Disp Bool where
disp = text . show
instance Disp a => Disp (Maybe a) where
disp (Just a) = text "Just" <+> disp a
disp Nothing = text "Nothing"
instance (Disp a, Disp b) => Disp (Either a b) where
disp (Left a) = text "Left" <+> disp a
disp (Right a) = text "Right" <+> disp a
instance Disp ParseError where
disp = text . show
instance Disp SourcePos where
disp p = text (sourceName p) <> colon <> int (sourceLine p) <>
colon <> int (sourceColumn p) <> colon
-- | Error message quoting
data D = DS String -- ^ String literal
| forall a . Disp a => DD a -- ^ Displayable value
instance Disp D where
disp (DS s) = text s
disp (DD d) = nest 2 $ disp d
-- might be a hack to do the nesting here???
instance Disp [D] where
disp dl = sep $ map disp dl
-------------------------------------------------------------------------
-- Modules and Decls
-------------------------------------------------------------------------
instance Disp Module where
disp m = text "module" <+> disp (moduleName m) <+> text "where" $$
vcat (map disp (moduleImports m)) $$
disp (moduleEntries m)
instance Disp ModuleImport where
disp (ModuleImport i) = text "import" <+> disp i
instance Disp [Decl] where
disp = vcat . map disp
instance Disp Decl where
disp (Def n r@(Ind bnd _)) |
name2String(fst(fst(unsafeUnbind bnd)))==name2String n = disp r
disp (Def n term) = disp n <+> text "=" <+> disp term
disp (Sig n ty) =
disp n <+> text ":" <+> disp ty
disp (Axiom n ty) =
text "axiom"
<+> disp n <+> text ":" <+> disp ty
disp (Data n params lev constructors) =
hang (text "data" <+> disp n <+> disp params
<+> colon <+> text "Type" <+> text (show lev)
<+> text "where")
2 (vcat $ map disp constructors)
instance Disp ConstructorDef where
disp (ConstructorDef _ c Empty) = text c
disp (ConstructorDef _ c tele) = text c <+> text "of" <+> disp tele
-------------------------------------------------------------------------
-- The Display class
-------------------------------------------------------------------------
-- | The data structure for information about the display
--
data DispInfo = DI
{
showAnnots :: Bool, -- ^ should we show the annotations?
dispAvoid :: S.Set AnyName -- ^ names that have been used
}
instance LFresh (Reader DispInfo) where
lfresh nm = do
let s = name2String nm
di <- ask;
return $ head (filter (\x -> AnyName x `S.notMember` (dispAvoid di))
(map (makeName s) [0..]))
getAvoids = dispAvoid <$> ask
avoid names = local upd where
upd di = di { dispAvoid =
(S.fromList names) `S.union` (dispAvoid di) }
-- | An empty 'DispInfo' context
initDI :: DispInfo
initDI = DI False S.empty
type M a = (ReaderT DispInfo Identity) a
-- | The 'Display' class is like the 'Disp' class. It qualifies
-- types that can be turned into 'Doc'. The difference is that the
-- type might need the 'DispInfo' context to control the parameters
-- of pretty-printing
class (Alpha t) => Display t where
-- | Convert a value to a 'Doc'.
display :: t -> M Doc
instance Display String where
display = return . text
instance Display Int where
display = return . text . show
instance Display Integer where
display = return . text . show
instance Display Double where
display = return . text . show
instance Display Float where
display = return . text . show
instance Display Char where
display = return . text . show
instance Display Bool where
display = return . text . show
-------------------------------------------------------------------------
-------------------------------------------------------------------------
bindParens :: Doc -> Doc
bindParens d = d
mandatoryBindParens :: Doc -> Doc
mandatoryBindParens d = parens d
instance Display Annot where
display (Annot Nothing) = return $ empty
display (Annot (Just x)) = do
st <- ask
if (showAnnots st) then
(text ":" <+>) <$> (display x)
else return $ empty
instance Display Term where
display (Var n) = display n
display (isNumeral -> Just i) = display i
display (TCon n args) = do
dn <- display n
dargs <- mapM display args
return $ dn <+> hsep dargs
display (DCon n args annot) = do
dn <- display n
dargs <- mapM display args
dannot <- display annot
return $ dn <+> hsep dargs <+> dannot
display (Type n) = if n == 0 then
return $ text "Type"
else
return $ text "Type" <+> (text $ show n)
display (TySquash t) = do
dt <- display t
return $ text "[|" <+> dt <+> text "|]"
display (Quotient t r) = do
dt <- display t
dr <- display r
return $ dt <+> text "//" <+> dr
display (QBox x (Annot mty)) = do
dx <- display x
case mty of
Nothing -> return $ text "<" <+> dx <+> text ">"
Just ty -> do
dty <- display ty
return $ text "<" <+> dx <+> text ":" <+> dty <+> text ">"
display (QElim p s rsp x) = do
dp <- display p
ds <- display s
drsp <- display rsp
dx <- display x
return $ text "expose" <+> dx <+> text "under" <+> dp <+> text "with" <+> ds <+> text "by" <+> drsp
display (Pi bnd) = do
lunbind bnd $ \((n,a), b) -> do
da <- display (unembed a)
dn <- display n
db <- display b
let lhs = mandatoryBindParens $
if (n `elem` fv b) then
(dn <+> colon <+> da)
else
da
return $ lhs <+> text "->" <+> db
display (PiC bnd) = do
lunbind bnd $ \((n,a), (c, b)) -> do
da <- display (unembed a)
dn <- display n
db <- display b
dc <- display c
let lhs = mandatoryBindParens $
if (n `elem` fv b) then
(dn <+> colon <+> da)
else
da
return $ lhs <+> text "|" <+> dc <+> text "->" <+> db
display a@(Lam b) = do
(binds, body) <- gatherBinders a
return $ hang (sep binds) 2 body
display (Smaller a b) = do
da <- display a
db <- display b
return $ da <+> text "<" <+> db
display (Trivial _) = do
return $ text "trivial"
display (Induction _ xs) = do
return $ text "induction"
display (Refl ann evidence) = do
dev <- display evidence
return $ text "refl" <+> dev
display (Ind binding annot) =
lunbind binding $ \ ((n,x),body) -> do
dn <- display n
-- return dn
dx <- display x
db <- display body
dann <- display annot
return $ text "ind" <+> dn <+> bindParens dx <+> text "="
<+> db <+> dann
display (App f x) = do
df <- display f
dx <- display x
let wrapf f = case f of
Var _ -> id
App _ _ -> id
Pos _ a -> wrapf a
Ann _ _ -> id
TrustMe _ -> id
Hole _ _ -> braces
_ -> parens
return $ wrapf f df <+> dx
display (Pos _ e) = display e
display (Let bnd) = do
lunbind bnd $ \ ((x,a) , b) -> do
da <- display (unembed a)
dx <- display x
db <- display b
return $ sep [text "let" <+> bindParens dx
<+> text "=" <+> da
<+> text "in",
db]
display (Case scrut alts annot) = do
dscrut <- display scrut
dalts <- mapM display alts
dannot <- display annot
return $ text "case" <+> dscrut <+> text "of" $$
(nest 2 $ vcat $ dalts) <+> dannot
display (Subst a b mbnd) = do
da <- display a
db <- display b
dat <- maybe (return (text "")) (\ bnd -> do
lunbind bnd $ \(xs,c) -> do
dxs <- display xs
dc <- display c
return $ text "at" <+> dxs <+> text "." <+> dc) mbnd
return $ fsep [text "subst" <+> da,
text "by" <+> db,
dat]
display (TyEq a b s t) = do
let disp' (x, Annot Nothing) = display x
disp' (x, Annot (Just ty)) = do
dx <- display x
dty <- display ty
return $ dx <+> text ":" <+> dty
da <- disp' (a, s)
db <- disp' (b, t)
return $ da <+> text "=" <+> db
display (Contra ty mty) = do
dty <- display ty
da <- display mty
return $ text "contra" <+> dty <+> da
display (Ann a b) = do
da <- display a
db <- display b
return $ parens (da <+> text ":" <+> db)
display (TrustMe ma) = do
da <- display ma
return $ text "TRUSTME" <+> da
display (Hole n (Annot mTy)) = do
dn <- display n
da <- maybe (return $ text "??") display mTy
return $ text "{" <+> dn <+> text ":" <+> da <+> text "}"
display (Sigma bnd) =
lunbind bnd $ \ ((x,unembed->tyA),tyB) -> do
dx <- display x
dA <- display tyA
dB <- display tyB
return $ text "{" <+> dx <+> text ":" <+> dA
<+> text "|" <+> dB <+> text "}"
display (Prod a b ann) = do
da <- display a
db <- display b
dann <- display ann
return $ parens (da <+> text "," <+> db) <+> dann
display (Pcase a bnd ann) = do
da <- display a
dann <- display ann
lunbind bnd $ \ ((x,y), body) -> do
dx <- display x
dy <- display y
dbody <- display body
return $ text "pcase" <+> da <+> text "of"
<+> text "(" <+> dx <+> text "," <+> dy <+> text ")"
<+> text "->" <+> dbody <+> dann
display (TyUnit) = return $ text "One"
display (TyEmpty) = return $ text "Zero"
display (LitUnit) = return $ text "tt"
instance Display Match where
display (Match bd) =
lunbind bd $ \ (pat, ubd) -> do
dpat <- display pat
dubd <- display ubd
return $ hang (dpat <+> text "->") 2 dubd
instance Display Pattern where
display (PatCon c []) = (display c)
display (PatCon c args) =
parens <$> ((<+>) <$> (display c) <*> (hsep <$> (mapM display args)))
display (PatVar x) = display x
instance Display Telescope where
display Empty = return empty
display (Cons bnd) = goTele bnd
goTele :: (IsEmbed t, Alpha t, Display t1,
Display (Embedded t), Display t2) =>
Rebind (t1, t) t2 -> M Doc
goTele bnd = do
let ((n, unembed->ty), tele) = unrebind bnd
dn <- display n
dty <- display ty
dtele <- display tele
return $ mandatoryBindParens (dn <+> colon <+> dty) <+> dtele
gatherBinders :: Term -> M ([Doc], Doc)
gatherBinders (Lam b) =
lunbind b $ \((n,unembed->ma), body) -> do
dn <- display n
dt <- display ma
(rest, body) <- gatherBinders body
return $ (text "\\" <> bindParens (dn <+> dt) <+> text "." : rest, body)
gatherBinders (Ind binding ann) =
lunbind binding $ \ ((n,x),body) -> do
dn <- display n
dx <- display x
(rest,body) <- gatherBinders body
return (text "ind" <+> dn <+> bindParens dx <+> text "=" : rest,
body)
gatherBinders body = do
db <- display body
return ([], db)
-- Assumes that all terms were opened safely earlier.
instance Rep a => Display (Name a) where
display n = return $ (text . name2String) n
instance Disp [Term] where
disp = vcat . map disp
instance Disp [(Name Term,Term)] where
disp = vcat . map disp
instance Disp (TName,Term) where
disp (n,t) = parens $ (disp n) <> comma <+> disp t
| jonsterling/Luitzen | src/PrettyPrint.hs | bsd-3-clause | 13,162 | 0 | 23 | 3,881 | 5,034 | 2,455 | 2,579 | 341 | 1 |
-- |
-- Module : BenchmarkOps
-- Copyright : (c) 2018 Harendra Kumar
--
-- License : MIT
-- Maintainer : [email protected]
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module NestedOps where
import Control.Exception (try)
import GHC.Exception (ErrorCall)
import qualified Streamly as S hiding (runStream)
import qualified Streamly.Prelude as S
linearCount :: Int
linearCount = 100000
-- double nested loop
nestedCount2 :: Int
-- nestedCount2 = round (fromIntegral linearCount**(1/2::Double))
nestedCount2 = 100
-- triple nested loop
nestedCount3 :: Int
nestedCount3 = round (fromIntegral linearCount**(1/3::Double))
-------------------------------------------------------------------------------
-- Stream generation and elimination
-------------------------------------------------------------------------------
type Stream m a = S.SerialT m a
{-# INLINE source #-}
source :: (S.MonadAsync m, S.IsStream t) => Int -> Int -> t m Int
source = sourceUnfoldrM
{-# INLINE sourceUnfoldrM #-}
sourceUnfoldrM :: (S.IsStream t, S.MonadAsync m) => Int -> Int -> t m Int
sourceUnfoldrM n value = S.serially $ S.unfoldrM step n
where
step cnt =
if cnt > n + value
then return Nothing
else return (Just (cnt, cnt + 1))
{-# INLINE sourceUnfoldr #-}
sourceUnfoldr :: (Monad m, S.IsStream t) => Int -> Int -> t m Int
sourceUnfoldr start n = S.unfoldr step start
where
step cnt =
if cnt > start + n
then Nothing
else Just (cnt, cnt + 1)
{-# INLINE runStream #-}
runStream :: Monad m => Stream m a -> m ()
runStream = S.drain
{-# INLINE runToList #-}
runToList :: Monad m => Stream m a -> m [a]
runToList = S.toList
-------------------------------------------------------------------------------
-- Benchmark ops
-------------------------------------------------------------------------------
{-# INLINE toNullAp #-}
toNullAp
:: (S.IsStream t, S.MonadAsync m, Monad (t m))
=> (t m Int -> S.SerialT m Int) -> Int -> m ()
toNullAp t start = runStream . t $
(+) <$> source start nestedCount2 <*> source start nestedCount2
{-# INLINE toNull #-}
toNull
:: (S.IsStream t, S.MonadAsync m, Monad (t m))
=> (t m Int -> S.SerialT m Int) -> Int -> m ()
toNull t start = runStream . t $ do
x <- source start nestedCount2
y <- source start nestedCount2
return $ x + y
{-# INLINE toNull3 #-}
toNull3
:: (S.IsStream t, S.MonadAsync m, Monad (t m))
=> (t m Int -> S.SerialT m Int) -> Int -> m ()
toNull3 t start = runStream . t $ do
x <- source start nestedCount3
y <- source start nestedCount3
z <- source start nestedCount3
return $ x + y + z
{-# INLINE toList #-}
toList
:: (S.IsStream t, S.MonadAsync m, Monad (t m))
=> (t m Int -> S.SerialT m Int) -> Int -> m [Int]
toList t start = runToList . t $ do
x <- source start nestedCount2
y <- source start nestedCount2
return $ x + y
{-# INLINE toListSome #-}
toListSome
:: (S.IsStream t, S.MonadAsync m, Monad (t m))
=> (t m Int -> S.SerialT m Int) -> Int -> m [Int]
toListSome t start =
runToList . t $ S.take 1000 $ do
x <- source start nestedCount2
y <- source start nestedCount2
return $ x + y
{-# INLINE filterAllOut #-}
filterAllOut
:: (S.IsStream t, S.MonadAsync m, Monad (t m))
=> (t m Int -> S.SerialT m Int) -> Int -> m ()
filterAllOut t start = runStream . t $ do
x <- source start nestedCount2
y <- source start nestedCount2
let s = x + y
if s < 0
then return s
else S.nil
{-# INLINE filterAllIn #-}
filterAllIn
:: (S.IsStream t, S.MonadAsync m, Monad (t m))
=> (t m Int -> S.SerialT m Int) -> Int -> m ()
filterAllIn t start = runStream . t $ do
x <- source start nestedCount2
y <- source start nestedCount2
let s = x + y
if s > 0
then return s
else S.nil
{-# INLINE filterSome #-}
filterSome
:: (S.IsStream t, S.MonadAsync m, Monad (t m))
=> (t m Int -> S.SerialT m Int) -> Int -> m ()
filterSome t start = runStream . t $ do
x <- source start nestedCount2
y <- source start nestedCount2
let s = x + y
if s > 1100000
then return s
else S.nil
{-# INLINE breakAfterSome #-}
breakAfterSome
:: (S.IsStream t, Monad (t IO))
=> (t IO Int -> S.SerialT IO Int) -> Int -> IO ()
breakAfterSome t start = do
(_ :: Either ErrorCall ()) <- try $ runStream . t $ do
x <- source start nestedCount2
y <- source start nestedCount2
let s = x + y
if s > 1100000
then error "break"
else return s
return ()
| harendra-kumar/asyncly | benchmark/NestedOps.hs | bsd-3-clause | 4,633 | 0 | 14 | 1,137 | 1,666 | 842 | 824 | 123 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- reference to L.isPrefixOf
-- see Data.List (delete, deleteBy)
module Data.Carbonara.LazyByteString where
import qualified Data.ByteString.Char8 as S (singleton)
import qualified Data.ByteString.Internal as S (w2c,c2w)
import Data.Int (Int64)
import qualified Data.ByteString.Lazy.Internal as L (ByteString(Chunk,Empty))
import qualified Data.ByteString.Lazy.Char8 as L (ByteString, any, append, cons, drop
, dropWhile, filter, isPrefixOf, pack
, singleton, snoc, splitWith, take, takeWhile)
delete :: Char -> L.ByteString -> L.ByteString
delete _ L.Empty = L.Empty
delete c (L.Chunk x xs) = if x == c' then xs else xs -- x `L.cons` delete c' xs
where c' = S.singleton c
| szehk/Haskell-Carbonara-Library | src/Data/LazyByteString.hs | bsd-3-clause | 757 | 0 | 8 | 144 | 202 | 129 | 73 | 13 | 2 |
{-# LANGUAGE OverloadedStrings, FlexibleContexts, PackageImports #-}
module Network.XMPiPe.Core.S2S.Client (
-- * Types and Values
Mpi(..), Jid(..), Tags(..), tagsNull, tagsType,
-- * Functions
starttls, sasl, begin, input, output,
) where
import "monads-tf" Control.Monad.State
import "monads-tf" Control.Monad.Error
import Data.Pipe
import Text.XML.Pipe
import qualified Data.ByteString as BS
import SaslClient hiding (sasl)
import qualified SaslClient as S
import Xmpp hiding (input, output)
input :: Monad m => [Xmlns] -> Pipe BS.ByteString Mpi m ()
input = inputMpi
output :: Monad m => Pipe Mpi BS.ByteString m ()
output = outputMpi
starttls :: Monad m =>
BS.ByteString -> BS.ByteString -> Pipe BS.ByteString BS.ByteString m ()
starttls fr to = inputP3 =$= processTls fr to =$= outputS
processTls :: Monad m => BS.ByteString -> BS.ByteString -> Pipe Xmpp Xmpp m ()
processTls fr to = do
yield XCDecl
yield $ XCBegin [(From, fr), (To, to), (TagRaw $ nullQ "version", "1.0")]
procTls
procTls :: Monad m => Pipe Xmpp Xmpp m ()
procTls = await >>= \mx -> case mx of
Just (XCBegin _as) -> procTls
Just (XCFeatures [FtStarttls _]) -> do
yield XCStarttls
procTls
Just XCProceed -> return ()
Just _ -> return ()
_ -> return ()
sasl :: (
MonadState m, SaslState (StateType m),
MonadError m, Error (ErrorType m) ) =>
BS.ByteString -> BS.ByteString -> Pipe BS.ByteString BS.ByteString m ()
sasl fr to = inputP3 =$= processSasl fr to =$= outputS
processSasl :: (
MonadState m, SaslState (StateType m),
MonadError m, Error (ErrorType m) ) =>
BS.ByteString -> BS.ByteString -> Pipe Xmpp Xmpp m ()
processSasl fr to = do
yield XCDecl
yield $ XCBegin [ (From, fr), (To, to), (TagRaw $ nullQ "version", "1.0")]
procSasl
procSasl :: (
MonadState m, SaslState (StateType m),
MonadError m, Error (ErrorType m)
) => Pipe Xmpp Xmpp m ()
procSasl = await >>= \mx -> case mx of
Just (XCBegin _as) -> procSasl
Just (XCFeatures [FtMechanisms ["EXTERNAL"]]) -> do
st <- lift $ gets getSaslState
lift . modify . putSaslState $ ("username", "") : st
S.sasl "EXTERNAL"
lift . modify $ putSaslState st
_ -> return ()
begin :: Monad m =>
BS.ByteString -> BS.ByteString -> Pipe BS.ByteString BS.ByteString m [Xmlns]
begin fr to = inputFeature =@= process fr to =$= outputS
process :: Monad m => BS.ByteString -> BS.ByteString -> Pipe Xmpp Xmpp m ()
process fr to = do
yield XCDecl
yield $ XCBegin [(From, fr), (To, to), (TagRaw $ nullQ "version", "1.0")]
Just (XCBegin _as) <- await
Just (XCFeatures []) <- await
_ <- await
return ()
| YoshikuniJujo/xmpipe | core/Network/XMPiPe/Core/S2S/Client.hs | bsd-3-clause | 2,569 | 28 | 15 | 475 | 1,112 | 574 | 538 | 69 | 5 |
module PyHint.Message (
Message(..),
) where
import Language.Py.SrcLocation (SrcSpan)
data Message = Message String String SrcSpan deriving (Show)
| codeq/pyhint | src/PyHint/Message.hs | bsd-3-clause | 151 | 0 | 6 | 22 | 47 | 29 | 18 | 4 | 0 |
{-# LANGUAGE PackageImports #-}
import "monads-tf" Control.Monad.Trans
import Control.Applicative
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Conduit.Lazy
import Data.Time
times :: Int -> ConduitM i UTCTime IO ()
times 0 = return ()
times n = lift getCurrentTime >>= yield >> times (n - 1)
| YoshikuniJujo/simple-pipe | try/testConduitLazy.hs | bsd-3-clause | 321 | 0 | 8 | 48 | 101 | 56 | 45 | 10 | 1 |
module Text.Highlighter.Lexers.Modelica (lexer) where
import qualified Text.Highlighter.Lexers.Html as Html
import Text.Regex.PCRE.Light
import Text.Highlighter.Types
lexer :: Lexer
lexer = Lexer
{ lName = "Modelica"
, lAliases = ["modelica"]
, lExtensions = [".mo"]
, lMimetypes = ["text/x-modelica"]
, lStart = root'
, lFlags = [caseless, dotall]
}
functions' :: TokenMatcher
functions' =
[ tok "(abs|acos|acosh|asin|asinh|atan|atan2|atan3|ceil|cos|cosh|cross|div|exp|floor|log|log10|mod|rem|sign|sin|sinh|size|sqrt|tan|tanh|zeros)\\b" (Arbitrary "Name" :. Arbitrary "Function")
]
classes' :: TokenMatcher
classes' =
[ tok "(block|class|connector|function|model|package|record|type)\\b" (Arbitrary "Name" :. Arbitrary "Class")
]
statements' :: TokenMatcher
statements' =
[ tokNext "\"" (Arbitrary "Literal" :. Arbitrary "String") (GoTo string')
, tok "(\\d+\\.\\d*|\\.\\d+|\\d+|\\d.)[eE][+-]?\\d+[lL]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float")
, tok "(\\d+\\.\\d*|\\.\\d+)" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Float")
, tok "\\d+[Ll]?" (Arbitrary "Literal" :. Arbitrary "Number" :. Arbitrary "Integer")
, tok "[\126!%^&*+=|?:<>/-]" (Arbitrary "Operator")
, tok "[()\\[\\]{},.;]" (Arbitrary "Punctuation")
, tok "(true|false|NULL|Real|Integer|Boolean)\\b" (Arbitrary "Name" :. Arbitrary "Builtin")
, tok "([a-zA-Z_][\\w]*|'[a-zA-Z_\\+\\-\\*\\/\\^][\\w]*')(\\.([a-zA-Z_][\\w]*|'[a-zA-Z_\\+\\-\\*\\/\\^][\\w]*'))+" (Arbitrary "Name" :. Arbitrary "Class")
, tok "('[\\w\\+\\-\\*\\/\\^]+'|\\w+)" (Arbitrary "Name")
]
whitespace' :: TokenMatcher
whitespace' =
[ tok "\\n" (Arbitrary "Text")
, tok "\\s+" (Arbitrary "Text")
, tok "\\\\\\n" (Arbitrary "Text")
, tok "//(\\n|(.|\\n)*?[^\\\\]\\n)" (Arbitrary "Comment")
, tok "/(\\\\\\n)?[*](.|\\n)*?[*](\\\\\\n)?/" (Arbitrary "Comment")
]
htmlContent' :: TokenMatcher
htmlContent' =
[ tokNext "<\\s*/\\s*html\\s*>" (Arbitrary "Name" :. Arbitrary "Tag") Pop
, tok ".+?(?=<\\s*/\\s*html\\s*>)" (Using Html.lexer)
]
keywords' :: TokenMatcher
keywords' =
[ tok "(algorithm|annotation|break|connect|constant|constrainedby|discrete|each|else|elseif|elsewhen|encapsulated|enumeration|end|equation|exit|expandable|extends|external|false|final|flow|for|if|import|in|inner|input|loop|nondiscrete|outer|output|parameter|partial|protected|public|redeclare|replaceable|stream|time|then|true|when|while|within)\\b" (Arbitrary "Keyword")
]
operators' :: TokenMatcher
operators' =
[ tok "(and|assert|cardinality|change|delay|der|edge|initial|noEvent|not|or|pre|reinit|return|sample|smooth|terminal|terminate)\\b" (Arbitrary "Name" :. Arbitrary "Builtin")
]
root' :: TokenMatcher
root' =
[ anyOf whitespace'
, anyOf keywords'
, anyOf functions'
, anyOf operators'
, anyOf classes'
, tokNext "(\"<html>|<html>)" (Arbitrary "Name" :. Arbitrary "Tag") (GoTo htmlContent')
, anyOf statements'
]
string' :: TokenMatcher
string' =
[ tokNext "\"" (Arbitrary "Literal" :. Arbitrary "String") Pop
, tok "\\\\([\\\\abfnrtv\"\\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})" (Arbitrary "Literal" :. Arbitrary "String" :. Arbitrary "Escape")
, tok "[^\\\\\"\\n]+" (Arbitrary "Literal" :. Arbitrary "String")
, tok "\\\\\\n" (Arbitrary "Literal" :. Arbitrary "String")
, tok "\\\\" (Arbitrary "Literal" :. Arbitrary "String")
]
| chemist/highlighter | src/Text/Highlighter/Lexers/Modelica.hs | bsd-3-clause | 3,475 | 0 | 10 | 516 | 751 | 392 | 359 | 62 | 1 |
module Spec.Tag where
data Tag = Tag{ tName :: String
, tAuthor :: String
, tContact :: String
}
deriving(Show)
| oldmanmike/vulkan | generate/src/Spec/Tag.hs | bsd-3-clause | 163 | 0 | 8 | 71 | 40 | 25 | 15 | 5 | 0 |
-- |
-- Module: Scheduling
-- Description: Rule scheduling
-- Copyright: (c) 2013 Tom Hawkins & Lee Pike
--
-- Algorithms for scheduling rules in Atom
module Language.Atom.Scheduling
( schedule
, Schedule
, reportSchedule
) where
import Text.Printf
import Data.List
import Language.Atom.Analysis
import Language.Atom.Elaboration
import Language.Atom.UeMap
-- | Schedule expressed as a 'UeMap' and a list of (period, phase, rules).
type Schedule = (UeMap, [(Int, Int, [Rule])])
schedule :: [Rule] -> UeMap -> Schedule
schedule rules' mp = (mp, concatMap spread periods)
where
rules = [ r | r@(Rule _ _ _ _ _ _ _ _) <- rules' ]
-- Algorithm for assigning rules to phases for a given period
-- (assuming they aren't given an exact phase):
-- 1. List the rules by their offsets, highest first.
-- 2. If the list is empty, stop.
-- 3. Otherwise, take the head of the list and assign its phase as follows:
-- find the set of phases containing the minimum number of rules such that
-- they are at least as large as the rule's offset. Then take the smallest
-- of those phases.
-- 4. Go to (2).
-- Algorithm properties: for each period,
-- A. Each rule is scheduled no earlier than its offset.
-- B. The phase with the most rules is the minimum of all possible schedules
-- that satisfy (A).
-- XXX Check if this is true.
-- C. The sum of the difference between between each rule's offset and it's
-- scheduled phase is the minimum of all schedules satisfying (A) and (B).
spread :: (Int, [Rule]) -> [(Int, Int, [Rule])]
spread (period, rules_) =
placeRules (placeExactRules (replicate period []) exactRules)
orderedByPhase
where
(minRules,exactRules) = partition (\r -> case rulePhase r of
MinPhase _ -> True
ExactPhase _ -> False) rules_
placeExactRules :: [[Rule]] -> [Rule] -> [[Rule]]
placeExactRules ls [] = ls
placeExactRules ls (r:rst) = placeExactRules (insertAt (getPh r) r ls)
rst
orderedByPhase :: [Rule]
orderedByPhase = sortBy (\r0 r1 -> compare (getPh r1) (getPh r0)) minRules
getPh r = case rulePhase r of
MinPhase i -> i
ExactPhase i -> i
-- Initially, ls contains all the exactPhase rules. We put rules in those
-- lists according to the algorithm, and then filter out the phase-lists
-- with no rules.
placeRules :: [[Rule]] -> [Rule] -> [(Int, Int, [Rule])]
placeRules ls [] = filter (\(_,_,rls) -> not (null rls))
(zip3 (repeat period) [0..(period-1)] ls)
placeRules ls (r:rst) = placeRules (insertAt (lub r ls) r ls) rst
lub :: Rule -> [[Rule]] -> Int
lub r ls = let minI = getPh r
lub' i [] = i -- unreachable. Included to prevent missing
-- cases ghc warnings.
lub' i ls_ | (head ls_) == minimum ls_ = i
| otherwise = lub' (i+1) (tail ls_)
in lub' minI (drop minI $ map length ls)
-- Cons rule r onto the list at index i in ls.
insertAt :: Int -> Rule -> [[Rule]] -> [[Rule]]
insertAt i r ls = (take i ls) ++ ((r:(ls !! i)):(drop (i+1) ls))
periods = foldl grow [] [ (rulePeriod r, r) | r <- rules ]
grow :: [(Int, [Rule])] -> (Int, Rule) -> [(Int, [Rule])]
grow [] (a, b) = [(a, [b])]
grow ((a, bs):rest) (a', b) | a' == a = (a, b : bs) : rest
| otherwise = (a, bs) : grow rest (a', b)
-- | Generate a rule scheduling report for the given schedule.
reportSchedule :: Schedule -> String
reportSchedule (mp, schedule_) = concat
[ "Rule Scheduling Report\n\n"
, "Period Phase Exprs Rule\n"
, "------ ----- ----- ----\n"
, concatMap (reportPeriod mp) schedule_
, " -----\n"
, printf " %5i\n" $ sum $ map (ruleComplexity mp) rules
, "\n"
, "Hierarchical Expression Count\n\n"
, " Total Local Rule\n"
, " ------ ------ ----\n"
, reportUsage "" $ usage mp rules
, "\n"
]
where
rules = concat $ [ r | (_, _, r) <- schedule_ ]
reportPeriod :: UeMap -> (Int, Int, [Rule]) -> String
reportPeriod mp (period, phase, rules) = concatMap reportRule rules
where
reportRule :: Rule -> String
reportRule rule = printf "%6i %5i %5i %s\n" period phase (ruleComplexity mp rule) (show rule)
data Usage = Usage String Int [Usage] deriving Eq
instance Ord Usage where compare (Usage a _ _) (Usage b _ _) = compare a b
reportUsage :: String -> Usage -> String
reportUsage i node@(Usage name n subs) = printf " %6i %6i %s\n" (totalComplexity node) n (i ++ name) ++ concatMap (reportUsage (" " ++ i)) subs
totalComplexity :: Usage -> Int
totalComplexity (Usage _ n subs) = n + sum (map totalComplexity subs)
usage :: UeMap -> [Rule] -> Usage
usage mp = head . foldl insertUsage [] . map (usage' mp)
usage' :: UeMap -> Rule -> Usage
usage' mp rule = f $ split $ ruleName rule
where
f :: [String] -> Usage
f [] = undefined
f [name] = Usage name (ruleComplexity mp rule) []
f (name:names) = Usage name 0 [f names]
split :: String -> [String]
split "" = []
split s = a : if null b then [] else split (tail b) where (a,b) = span (/= '.') s
insertUsage :: [Usage] -> Usage -> [Usage]
insertUsage [] u = [u]
insertUsage (a@(Usage n1 i1 s1) : rest) b@(Usage n2 i2 s2) | n1 == n2 = Usage n1 (max i1 i2) (sort $ foldl insertUsage s1 s2) : rest
| otherwise = a : insertUsage rest b
| Copilot-Language/atom_for_copilot | Language/Atom/Scheduling.hs | bsd-3-clause | 5,686 | 0 | 18 | 1,642 | 1,811 | 982 | 829 | 86 | 7 |
-- munt - cryptographic function composition
import qualified Options.Applicative as Opts
import qualified Options.Applicative.Help.Chunk as OAHC
import qualified System.IO as IO
import qualified System.Process as Proc
import Data.List (intercalate)
import Options.Applicative ((<>))
import Text.Printf (printf)
import Munt.Types
import qualified Munt.App as App
readCliOpts :: IO Options
readCliOpts =
Opts.execParser $ Opts.info (Opts.helper <*> cliOpts)
( Opts.fullDesc
<> Opts.header "munt - cryptographic function composition"
<> Opts.progDesc "Transform input with cryptographic functions."
<> Opts.footerDoc (OAHC.unChunk (OAHC.stringChunk fnDoc)) )
where
cliOpts = Options
<$> Opts.argument Opts.str
( Opts.metavar "[ sources => ] action [ -> action -> ... ]"
<> Opts.value ""
<> Opts.help "Function expression to use for transformation."
)
<*> Opts.switch
( Opts.long "test"
<> Opts.short 't'
<> Opts.help "Run tests." )
ind = 10
fnDoc = printf "Available functions:\n%s" $ intercalate "" sections
list t xs = printf " %-7s %s\n" t $ drop ind (indentedList ind 80 xs)
sections = map (uncurry list)
[ ("Encode", ["b64e", "b64d"])
, ("Format", ["bin", "dec", "hex", "unbin", "undec", "unhex"])
, ("Math", ["+", "-", "*", "/", "%", "^"])
, ("Bitwise",["and", "or", "xor", "not", "rsh", "lsh"])
, ("Util", ["id", "trace"])
, ("List", ["append", "drop", "head", "init", "last", "len", "prepend",
"reverse", "tail", "take"])
, ("Stream", ["after", "before", "bytes", "concat", "consume", "count",
"dup", "filter", "flip", "lines", "repeat", "unlines",
"unwords", "words"])
, ("Cipher", ["aes128d", "aes128e", "aes192d", "aes192e", "aes256d",
"aes256e", "bfe", "bfd", "bf64e", "bf64d", "bf128e",
"bf128d", "bf256e", "bf256d", "bf448e", "bf448d", "dese",
"desd", "deseee3e", "deseee3d", "desede3e", "desede3d",
"deseee2e", "deseee2d", "desede2e", "desede2d", "cam128e",
"cam128d"])
, ("Hash", ["blake2s256", "blake2s224", "blake2sp256", "blake2sp224",
"blake2b512", "blake2bp512", "md2", "md4", "md5", "sha1",
"sha224", "sha256", "sha384", "sha512", "sha512t256",
"sha512t224", "sha3512", "sha3384", "sha3256", "sha3224",
"keccak512", "keccak384", "keccak256", "keccak224",
"ripemd160", "skein256256", "skein256224", "skein512512",
"skein512384", "skein512256", "skein512224", "whirlpool"])
]
-- | Display a list of strings indented and wrapped to fit the given width
indentedList :: Int -> Int -> [String] -> String
indentedList indentBy width =
intercalate "\n" . reverse. foldl addTerm [indent]
where addTerm (r:rs) x =
let r' = printf "%s %s" r x
in if length r' < width then (r':rs)
else addTerm (indent:r:rs) x
indent = take indentBy $ repeat ' '
main :: IO ()
main = readCliOpts >>= \o ->
let
expression = optExpression o
testMode = optRunTests o
in do
IO.hSetBuffering IO.stdin IO.NoBuffering
IO.hSetBuffering IO.stdout IO.NoBuffering
App.evaluate expression IO.stdin IO.stdout
putStrLn ""
| shmookey/bc-tools | src/munt.hs | bsd-3-clause | 3,474 | 0 | 14 | 954 | 952 | 551 | 401 | 70 | 2 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeOperators #-}
module Evaluator.Types where
import Protolude
import Evaluator.BuiltIns (builtIns)
import Evaluator.Object
import Parser.AST (Ident)
import Control.Monad.Trans.Class (MonadTrans(..))
newtype EvalError = EvalError Text
deriving (Show, Eq, Typeable)
instance Exception EvalError
newtype EvalState = EvalState EnvRef
getEnvRef :: Monad m => EvaluatorT m EnvRef
getEnvRef = do
EvalState ref <- get
return ref
setEnvRef :: Monad m => EnvRef -> EvaluatorT m ()
setEnvRef ref = put $ EvalState ref
createEmptyState :: IO EvalState
createEmptyState = EvalState <$> (emptyEnv >>= flip wrapEnv builtIns)
newtype EvaluatorT m a = EvaluatorT
{ runEvaluatorT :: StateT EvalState (ExceptT EvalError m) a }
instance Functor m => Functor (EvaluatorT m) where
fmap f (EvaluatorT e) = EvaluatorT $ fmap f e
instance Monad m => Applicative (EvaluatorT m) where
pure = EvaluatorT . pure
EvaluatorT mf <*> EvaluatorT ma = EvaluatorT $ mf <*> ma
instance Monad m => Monad (EvaluatorT m) where
EvaluatorT ma >>= f = EvaluatorT $ ma >>= runEvaluatorT . f
instance Monad m => MonadState EvalState (EvaluatorT m) where
get = EvaluatorT get
put = EvaluatorT . put
instance Monad m => MonadError EvalError (EvaluatorT m) where
throwError = EvaluatorT . throwError
EvaluatorT e `catchError` f = EvaluatorT $ e `catchError` (runEvaluatorT . f)
instance MonadTrans EvaluatorT where
lift = EvaluatorT . lift . lift
type Evaluator = EvaluatorT IO
execEvaluatorT :: Monad m => EvaluatorT m a -> EvalState -> m (Either EvalError (a, EvalState))
execEvaluatorT = (runExceptT .) . runStateT . runEvaluatorT
| noraesae/monkey-hs | lib/Evaluator/Types.hs | bsd-3-clause | 1,702 | 0 | 11 | 312 | 567 | 294 | 273 | 40 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./Common/ProofTree.hs
Description : a simple proof tree
Copyright : (c) DFKI GmbH, Uni Bremen 2002-2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Datatype for storing of the proof tree
-}
module Common.ProofTree where
import Data.Data
{- |
Datatype for storing of the proof tree. The Show class is instantiated.
-}
data ProofTree = ProofTree String deriving (Eq, Ord, Typeable, Data)
instance Show ProofTree where
show (ProofTree st) = st
emptyProofTree :: ProofTree
emptyProofTree = ProofTree ""
| spechub/Hets | Common/ProofTree.hs | gpl-2.0 | 665 | 0 | 8 | 125 | 77 | 43 | 34 | 8 | 1 |
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric #-}
-- |
-- Module : Statistics.Distribution.ChiSquared
-- Copyright : (c) 2010 Alexey Khudyakov
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- The chi-squared distribution. This is a continuous probability
-- distribution of sum of squares of k independent standard normal
-- distributions. It's commonly used in statistical tests
module Statistics.Distribution.ChiSquared (
ChiSquared
-- Constructors
, chiSquared
, chiSquaredNDF
) where
import Data.Aeson (FromJSON, ToJSON)
import Data.Binary (Binary)
import Data.Data (Data, Typeable)
import GHC.Generics (Generic)
import Numeric.SpecFunctions (
incompleteGamma,invIncompleteGamma,logGamma,digamma)
import qualified Statistics.Distribution as D
import qualified System.Random.MWC.Distributions as MWC
import Data.Binary (put, get)
-- | Chi-squared distribution
newtype ChiSquared = ChiSquared Int
deriving (Eq, Read, Show, Typeable, Data, Generic)
instance FromJSON ChiSquared
instance ToJSON ChiSquared
instance Binary ChiSquared where
get = fmap ChiSquared get
put (ChiSquared x) = put x
-- | Get number of degrees of freedom
chiSquaredNDF :: ChiSquared -> Int
chiSquaredNDF (ChiSquared ndf) = ndf
-- | Construct chi-squared distribution. Number of degrees of freedom
-- must be positive.
chiSquared :: Int -> ChiSquared
chiSquared n
| n <= 0 = error $
"Statistics.Distribution.ChiSquared.chiSquared: N.D.F. must be positive. Got " ++ show n
| otherwise = ChiSquared n
instance D.Distribution ChiSquared where
cumulative = cumulative
instance D.ContDistr ChiSquared where
density = density
quantile = quantile
instance D.Mean ChiSquared where
mean (ChiSquared ndf) = fromIntegral ndf
instance D.Variance ChiSquared where
variance (ChiSquared ndf) = fromIntegral (2*ndf)
instance D.MaybeMean ChiSquared where
maybeMean = Just . D.mean
instance D.MaybeVariance ChiSquared where
maybeStdDev = Just . D.stdDev
maybeVariance = Just . D.variance
instance D.Entropy ChiSquared where
entropy (ChiSquared ndf) =
let kHalf = 0.5 * fromIntegral ndf in
kHalf
+ log 2
+ logGamma kHalf
+ (1-kHalf) * digamma kHalf
instance D.MaybeEntropy ChiSquared where
maybeEntropy = Just . D.entropy
instance D.ContGen ChiSquared where
genContVar (ChiSquared n) = MWC.chiSquare n
cumulative :: ChiSquared -> Double -> Double
cumulative chi x
| x <= 0 = 0
| otherwise = incompleteGamma (ndf/2) (x/2)
where
ndf = fromIntegral $ chiSquaredNDF chi
density :: ChiSquared -> Double -> Double
density chi x
| x <= 0 = 0
| otherwise = exp $ log x * (ndf2 - 1) - x2 - logGamma ndf2 - log 2 * ndf2
where
ndf = fromIntegral $ chiSquaredNDF chi
ndf2 = ndf/2
x2 = x/2
quantile :: ChiSquared -> Double -> Double
quantile (ChiSquared ndf) p
| p == 0 = 0
| p == 1 = 1/0
| p > 0 && p < 1 = 2 * invIncompleteGamma (fromIntegral ndf / 2) p
| otherwise =
error $ "Statistics.Distribution.ChiSquared.quantile: p must be in [0,1] range. Got: "++show p
| fpco/statistics | Statistics/Distribution/ChiSquared.hs | bsd-2-clause | 3,226 | 0 | 12 | 698 | 867 | 454 | 413 | 72 | 1 |
{-# LANGUAGE TupleSections #-}
import CoreSyn
import CoreUtils
import Id
import Type
import MkCore
import CallArity (callArityRHS)
import MkId
import SysTools
import DynFlags
import ErrUtils
import Outputable
import TysWiredIn
import Literal
import GHC
import Control.Monad
import Control.Monad.IO.Class
import System.Environment( getArgs )
import VarSet
import PprCore
import Unique
import UniqFM
import CoreLint
import FastString
-- Build IDs. use mkTemplateLocal, more predictable than proper uniques
go, go2, x, d, n, y, z, scrutf, scruta :: Id
[go, go2, x,d, n, y, z, scrutf, scruta, f] = mkTestIds
(words "go go2 x d n y z scrutf scruta f")
[ mkFunTys [intTy, intTy] intTy
, mkFunTys [intTy, intTy] intTy
, intTy
, mkFunTys [intTy] intTy
, mkFunTys [intTy] intTy
, intTy
, intTy
, mkFunTys [boolTy] boolTy
, boolTy
, mkFunTys [intTy, intTy] intTy -- protoypical external function
]
exprs :: [(String, CoreExpr)]
exprs =
[ ("go2",) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
go `mkLApps` [0, 0]
, ("nested_go2",) $
mkRFun go [x]
(mkLet n (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)) $
mkACase (Var n) $
mkFun go2 [y]
(mkLet d
(mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y) ) $
mkLams [z] $ Var d `mkVarApps` [x] )$
Var go2 `mkApps` [mkLit 1] ) $
go `mkLApps` [0, 0]
, ("d0 (go 2 would be bad)",) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $
mkLams [z] $ Var f `mkApps` [ Var d `mkVarApps` [x], Var d `mkVarApps` [x] ]) $
go `mkLApps` [0, 0]
, ("go2 (in case crut)",) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
Case (go `mkLApps` [0, 0]) z intTy
[(DEFAULT, [], Var f `mkVarApps` [z,z])]
, ("go2 (in function call)",) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
f `mkLApps` [0] `mkApps` [go `mkLApps` [0, 0]]
, ("go2 (using surrounding interesting let)",) $
mkLet n (f `mkLApps` [0]) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
Var f `mkApps` [n `mkLApps` [0], go `mkLApps` [0, 0]]
, ("go2 (using surrounding boring let)",) $
mkLet z (mkLit 0) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
Var f `mkApps` [Var z, go `mkLApps` [0, 0]]
, ("two calls, one from let and from body (d 1 would be bad)",) $
mkLet d (mkACase (mkLams [y] $ mkLit 0) (mkLams [y] $ mkLit 0)) $
mkFun go [x,y] (mkVarApps (Var d) [x]) $
mkApps (Var d) [mkLApps go [1,2]]
, ("a thunk in a recursion (d 1 would be bad)",) $
mkRLet n (mkACase (mkLams [y] $ mkLit 0) (Var n)) $
mkRLet d (mkACase (mkLams [y] $ mkLit 0) (Var d)) $
Var n `mkApps` [d `mkLApps` [0]]
, ("two thunks, one called multiple times (both arity 1 would be bad!)",) $
mkLet n (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $
mkLet d (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $
Var n `mkApps` [Var d `mkApps` [Var d `mkApps` [mkLit 0]]]
, ("two functions, not thunks",) $
mkLet go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $
mkLet go2 (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $
Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0]
, ("a thunk, called multiple times via a forking recursion (d 1 would be bad!)",) $
mkLet d (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $
mkRLet go2 (mkLams [x] (mkACase (Var go2 `mkApps` [Var go2 `mkApps` [mkLit 0, mkLit 0]]) (Var d))) $
go2 `mkLApps` [0,1]
, ("a function, one called multiple times via a forking recursion",) $
mkLet go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $
mkRLet go2 (mkLams [x] (mkACase (Var go2 `mkApps` [Var go2 `mkApps` [mkLit 0, mkLit 0]]) (go `mkLApps` [0]))) $
go2 `mkLApps` [0,1]
, ("two functions (recursive)",) $
mkRLet go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x]))) $
mkRLet go2 (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go2 `mkVarApps` [x]))) $
Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0]
, ("mutual recursion (thunks), called mutiple times (both arity 1 would be bad!)",) $
Let (Rec [ (n, mkACase (mkLams [y] $ mkLit 0) (Var d))
, (d, mkACase (mkLams [y] $ mkLit 0) (Var n))]) $
Var n `mkApps` [Var d `mkApps` [Var d `mkApps` [mkLit 0]]]
, ("mutual recursion (functions), but no thunks",) $
Let (Rec [ (go, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go2 `mkVarApps` [x])))
, (go2, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x])))]) $
Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0]
, ("mutual recursion (functions), one boring (d 1 would be bad)",) $
mkLet d (f `mkLApps` [0]) $
Let (Rec [ (go, mkLams [x, y] (Var d `mkApps` [go2 `mkLApps` [1,2]]))
, (go2, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x])))]) $
Var d `mkApps` [go2 `mkLApps` [0,1]]
, ("a thunk (non-function-type), called twice, still calls once",) $
mkLet d (f `mkLApps` [0]) $
mkLet x (d `mkLApps` [1]) $
Var f `mkVarApps` [x, x]
, ("a thunk (function type), called multiple times, still calls once",) $
mkLet d (f `mkLApps` [0]) $
mkLet n (Var f `mkApps` [d `mkLApps` [1]]) $
mkLams [x] $ Var n `mkVarApps` [x]
, ("a thunk (non-function-type), in mutual recursion, still calls once (d 1 would be good)",) $
mkLet d (f `mkLApps` [0]) $
Let (Rec [ (x, Var d `mkApps` [go `mkLApps` [1,2]])
, (go, mkLams [x] $ mkACase (mkLams [z] $ Var x) (Var go `mkVarApps` [x]) ) ]) $
Var go `mkApps` [mkLit 0, go `mkLApps` [0,1]]
, ("a thunk (non-function-type), in mutual recursion, causes many calls (d 1 would be bad)",) $
mkLet d (f `mkLApps` [0]) $
Let (Rec [ (x, Var go `mkApps` [go `mkLApps` [1,2], go `mkLApps` [1,2]])
, (go, mkLams [x] $ mkACase (Var d) (Var go `mkVarApps` [x]) ) ]) $
Var go `mkApps` [mkLit 0, go `mkLApps` [0,1]]
, ("a thunk (function type), in mutual recursion, still calls once (d 1 would be good)",) $
mkLet d (f `mkLApps` [0]) $
Let (Rec [ (n, Var go `mkApps` [d `mkLApps` [1]])
, (go, mkLams [x] $ mkACase (Var n) (Var go `mkApps` [Var n `mkVarApps` [x]]) ) ]) $
Var go `mkApps` [mkLit 0, go `mkLApps` [0,1]]
, ("a thunk (non-function-type) co-calls with the body (d 1 would be bad)",) $
mkLet d (f `mkLApps` [0]) $
mkLet x (d `mkLApps` [1]) $
Var d `mkVarApps` [x]
]
main = do
[libdir] <- getArgs
runGhc (Just libdir) $ do
getSessionDynFlags >>= setSessionDynFlags . flip gopt_set Opt_SuppressUniques
dflags <- getSessionDynFlags
liftIO $ forM_ exprs $ \(n,e) -> do
case lintExpr dflags [f,scrutf,scruta] e of
Just msg -> putMsg dflags (msg $$ text "in" <+> text n)
Nothing -> return ()
putMsg dflags (text n <> char ':')
-- liftIO $ putMsg dflags (ppr e)
let e' = callArityRHS e
let bndrs = nonDetEltsUFM (allBoundIds e')
-- It should be OK to use nonDetEltsUFM here, if it becomes a
-- problem we should use DVarSet
-- liftIO $ putMsg dflags (ppr e')
forM_ bndrs $ \v -> putMsg dflags $ nest 4 $ ppr v <+> ppr (idCallArity v)
-- Utilities
mkLApps :: Id -> [Integer] -> CoreExpr
mkLApps v = mkApps (Var v) . map mkLit
mkACase = mkIfThenElse (mkVarApps (Var scrutf) [scruta])
mkTestId :: Int -> String -> Type -> Id
mkTestId i s ty = mkSysLocal (mkFastString s) (mkBuiltinUnique i) ty
mkTestIds :: [String] -> [Type] -> [Id]
mkTestIds ns tys = zipWith3 mkTestId [0..] ns tys
mkLet :: Id -> CoreExpr -> CoreExpr -> CoreExpr
mkLet v rhs body = Let (NonRec v rhs) body
mkRLet :: Id -> CoreExpr -> CoreExpr -> CoreExpr
mkRLet v rhs body = Let (Rec [(v, rhs)]) body
mkFun :: Id -> [Id] -> CoreExpr -> CoreExpr -> CoreExpr
mkFun v xs rhs body = mkLet v (mkLams xs rhs) body
mkRFun :: Id -> [Id] -> CoreExpr -> CoreExpr -> CoreExpr
mkRFun v xs rhs body = mkRLet v (mkLams xs rhs) body
mkLit :: Integer -> CoreExpr
mkLit i = Lit (mkLitInteger i intTy)
-- Collects all let-bound IDs
allBoundIds :: CoreExpr -> VarSet
allBoundIds (Let (NonRec v rhs) body) = allBoundIds rhs `unionVarSet` allBoundIds body `extendVarSet` v
allBoundIds (Let (Rec binds) body) =
allBoundIds body `unionVarSet` unionVarSets
[ allBoundIds rhs `extendVarSet` v | (v, rhs) <- binds ]
allBoundIds (App e1 e2) = allBoundIds e1 `unionVarSet` allBoundIds e2
allBoundIds (Case scrut _ _ alts) =
allBoundIds scrut `unionVarSet` unionVarSets
[ allBoundIds e | (_, _ , e) <- alts ]
allBoundIds (Lam _ e) = allBoundIds e
allBoundIds (Tick _ e) = allBoundIds e
allBoundIds (Cast e _) = allBoundIds e
allBoundIds _ = emptyVarSet
| olsner/ghc | testsuite/tests/callarity/unittest/CallArity1.hs | bsd-3-clause | 9,920 | 0 | 25 | 2,975 | 4,348 | 2,402 | 1,946 | 202 | 2 |
{-# LANGUAGE BangPatterns #-}
import Control.Monad
import Data.List
import StackTest
main :: IO ()
main = repl [] $ do
replCommand ":main"
line <- replGetLine
when (line /= "Hello World!")
$ error "Main module didn't load correctly."
| AndrewRademacher/stack | test/integration/tests/module-added-multiple-times/Main.hs | bsd-3-clause | 256 | 0 | 11 | 59 | 72 | 35 | 37 | 10 | 1 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module GHC.Pack (module M) where
import "base" GHC.Pack as M
| Ye-Yong-Chi/codeworld | codeworld-base/src/GHC/Pack.hs | apache-2.0 | 731 | 0 | 4 | 136 | 23 | 17 | 6 | 4 | 0 |
module Main where
import Prelude hiding (lines)
import Control.Monad
import Data.IORef
import Data.Time.Clock
import Graphics.Rendering.OpenGL (($=))
import qualified Graphics.Rendering.OpenGL as GL
import qualified Graphics.UI.GLUT as GLUT
import Game
import Keyboard
import Matrix
fps :: (Fractional a) => a
fps = 1/25
initialWindowSize :: GL.Size
initialWindowSize = GL.Size 640 480
drawOneLine :: GL.Vertex2 Scalar -> GL.Vertex2 Scalar -> IO ()
drawOneLine p1 p2 = GL.renderPrimitive GL.Lines $ do GL.vertex p1; GL.vertex p2
drawLines :: [Line] -> IO ()
drawLines lines = do
GL.color (GL.Color3 1.0 1.0 1.0 :: GL.Color3 GL.GLfloat)
forM_ lines (\ (p0, p1) -> drawOneLine (v p0) (v p1))
where v = uncurry GL.Vertex2
initGL :: IO ()
initGL = do
GL.clearColor $= GL.Color4 0 0 0 0
GL.shadeModel $= GL.Flat
GL.depthFunc $= Nothing
reshape :: GLUT.ReshapeCallback
reshape size@(GL.Size w h) = do
GL.viewport $= (GL.Position 0 0, size)
GL.matrixMode $= GL.Projection
GL.loadIdentity
GL.ortho2D 0 (fromIntegral w) 0 (fromIntegral h)
frame :: UTCTime ->
IORef Keyboard -> IORef GameState ->
LogicStep ->
GLUT.TimerCallback
frame lastFrameTime keyboardRef stateRef logicStep = do
now <- getCurrentTime
let timeDiff = now `diffUTCTime` lastFrameTime
state <- readIORef stateRef
keyboard <- readIORef keyboardRef
let state' = logicStep timeDiff keyboard state
writeIORef stateRef state'
GLUT.postRedisplay Nothing
let nextFrameTime = fps `addUTCTime` lastFrameTime
waitTime = nextFrameTime `diffUTCTime` now
msWait = truncate (waitTime * 1000)
GLUT.addTimerCallback msWait (frame now keyboardRef stateRef logicStep)
displayCallback :: IORef GameState -> GLUT.DisplayCallback
displayCallback stateRef = do
state <- readIORef stateRef
GL.clear [GL.ColorBuffer]
drawLines $ getLines state
GLUT.swapBuffers
main :: IO ()
main = do
_ <- GLUT.getArgsAndInitialize
GLUT.initialDisplayMode $= [GLUT.DoubleBuffered, GLUT.RGBMode]
GLUT.initialWindowSize $= initialWindowSize
_ <- GLUT.createWindow "purewars"
initGL
GLUT.reshapeCallback $= Just reshape
now <- getCurrentTime
stateRef <- newIORef initialGameState
keyboardRef <- newIORef initialKeyboardState
GLUT.keyboardMouseCallback $= Just (keyboardCallback keyboardRef)
GLUT.displayCallback $= displayCallback stateRef
GLUT.addTimerCallback 1 (frame now keyboardRef stateRef logic)
GLUT.mainLoop
| sordina/purewars | Main.hs | bsd-3-clause | 2,469 | 0 | 12 | 435 | 833 | 411 | 422 | 70 | 1 |
module Tuura.Concept (
module Data.Monoid,
module Tuura.Concept.Abstract,
module Tuura.Concept.Circuit,
) where
import Data.Monoid
import Tuura.Concept.Abstract
import Tuura.Concept.Circuit
| tuura/concepts | src/Tuura/Concept.hs | bsd-3-clause | 207 | 0 | 5 | 34 | 46 | 31 | 15 | 7 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.Plane
-- Copyright : (c) Marco Túlio Gontijo e Silva <[email protected]>,
-- Leonardo Serra <[email protected]>
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Marco Túlio Gontijo e Silva <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- This module has functions to navigate through workspaces in a bidimensional
-- manner. It allows the organization of workspaces in lines, and provides
-- functions to move and shift windows in all four directions (left, up, right
-- and down) possible in a surface.
--
-- This functionality was inspired by GNOME (finite) and KDE (infinite)
-- keybindings for workspace navigation, and by "XMonad.Actions.CycleWS" for
-- the idea of applying this approach to XMonad.
-----------------------------------------------------------------------------
module XMonad.Actions.Plane
(
-- * Usage
-- $usage
-- * Data types
Direction (..)
, Limits (..)
, Lines (..)
-- * Key bindings
, planeKeys
-- * Navigating through workspaces
, planeShift
, planeMove
)
where
import Control.Monad
import Data.List
import Data.Map hiding (split)
import Data.Maybe
import XMonad
import XMonad.StackSet hiding (workspaces)
import XMonad.Util.Run
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@ file:
--
-- > import XMonad.Actions.Plane
-- >
-- > main = xmonad defaultConfig {keys = myKeys}
-- >
-- > myKeys conf = union (keys defaultConfig conf) $ myNewKeys conf
-- >
-- > myNewkeys (XConfig {modMask = modm}) = planeKeys modm (Lines 3) Finite
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
-- | Direction to go in the plane.
data Direction = ToLeft | ToUp | ToRight | ToDown deriving Enum
-- | Defines the behaviour when you're trying to move out of the limits.
data Limits
= Finite -- ^ Ignore the function call, and keep in the same workspace.
| Circular -- ^ Get on the other side, like in the Snake game.
| Linear -- ^ The plan comes as a row, so it goes to the next or prev if
-- the workspaces were numbered.
deriving Eq
-- | The number of lines in which the workspaces will be arranged. It's
-- possible to use a number of lines that is not a divisor of the number of
-- workspaces, but the results are better when using a divisor. If it's not a
-- divisor, the last line will have the remaining workspaces.
data Lines
= GConf -- ^ Use @gconftool-2@ to find out the number of lines.
| Lines Int -- ^ Specify the number of lines explicitly.
-- | This is the way most people would like to use this module. It attaches the
-- 'KeyMask' passed as a parameter with 'xK_Left', 'xK_Up', 'xK_Right' and
-- 'xK_Down', associating it with 'planeMove' to the corresponding 'Direction'.
-- It also associates these bindings with 'shiftMask' to 'planeShift'.
planeKeys :: KeyMask -> Lines -> Limits -> Map (KeyMask, KeySym) (X ())
planeKeys modm ln limits =
fromList $
[ ((keyMask, keySym), function ln limits direction)
| (keySym, direction) <- zip [xK_Left .. xK_Down] $ enumFrom ToLeft
, (keyMask, function) <- [(modm, planeMove), (shiftMask .|. modm, planeShift)]
]
-- | Shift a window to the next workspace in 'Direction'. Note that this will
-- also move to the next workspace. It's a good idea to use the same 'Lines'
-- and 'Limits' for all the bindings.
planeShift :: Lines -> Limits -> Direction -> X ()
planeShift = plane shift'
shift' ::
(Eq s, Eq i, Ord a) => i -> StackSet i l a s sd -> StackSet i l a s sd
shift' area = greedyView area . shift area
-- | Move to the next workspace in 'Direction'.
planeMove :: Lines -> Limits -> Direction -> X ()
planeMove = plane greedyView
plane ::
(WorkspaceId -> WindowSet -> WindowSet) -> Lines -> Limits -> Direction ->
X ()
plane function numberLines_ limits direction = do
st <- get
xconf <- ask
numberLines <-
liftIO $
case numberLines_ of
Lines numberLines__ ->
return numberLines__
GConf ->
do
numberLines__ <-
runProcessWithInput gconftool parameters ""
case reads numberLines__ of
[(numberRead, _)] -> return numberRead
_ ->
do
trace $
"XMonad.Actions.Plane: Could not parse the output of " ++ gconftool ++
unwords parameters ++ ": " ++ numberLines__ ++ "; assuming 1."
return 1
let
notBorder :: Bool
notBorder = (replicate 2 (circular_ < currentWS) ++ replicate 2 (circular_ > currentWS)) !! fromEnum direction
circular_ :: Int
circular_ = circular currentWS
circular :: Int -> Int
circular =
[ onLine pred
, onColumn pred
, onLine succ
, onColumn succ
]
!! fromEnum direction
linear :: Int -> Int
linear =
[ onLine pred . onColumn pred
, onColumn pred . onLine pred
, onLine succ . onColumn succ
, onColumn succ . onLine succ
]
!! fromEnum direction
onLine :: (Int -> Int) -> Int -> Int
onLine f currentWS_
| line < areasLine = mod_ columns
| otherwise = mod_ areasColumn
where
line, column :: Int
(line, column) = split currentWS_
mod_ :: Int -> Int
mod_ columns_ = compose line $ mod (f column) columns_
onColumn :: (Int -> Int) -> Int -> Int
onColumn f currentWS_
| column < areasColumn || areasColumn == 0 = mod_ numberLines
| otherwise = mod_ $ pred numberLines
where
line, column :: Int
(line, column) = split currentWS_
mod_ :: Int -> Int
mod_ lines_ = compose (mod (f line) lines_) column
compose :: Int -> Int -> Int
compose line column = line * columns + column
split :: Int -> (Int, Int)
split currentWS_ =
(operation div, operation mod)
where
operation :: (Int -> Int -> Int) -> Int
operation f = f currentWS_ columns
areasLine :: Int
areasLine = div areas columns
areasColumn :: Int
areasColumn = mod areas columns
columns :: Int
columns =
if mod areas numberLines == 0 then preColumns else preColumns + 1
currentWS :: Int
currentWS = fromJust mCurrentWS
preColumns :: Int
preColumns = div areas numberLines
mCurrentWS :: Maybe Int
mCurrentWS = elemIndex (currentTag $ windowset st) areaNames
areas :: Int
areas = length areaNames
run :: (Int -> Int) -> X ()
run f = windows $ function $ areaNames !! f currentWS
areaNames :: [String]
areaNames = workspaces $ config xconf
when (isJust mCurrentWS) $
case limits of
Finite -> when notBorder $ run circular
Circular -> run circular
Linear -> if notBorder then run circular else run linear
gconftool :: String
gconftool = "gconftool-2"
parameters :: [String]
parameters = ["--get", "/apps/panel/applets/workspace_switcher_screen0/prefs/num_rows"] | markus1189/xmonad-contrib-710 | XMonad/Actions/Plane.hs | bsd-3-clause | 7,811 | 0 | 24 | 2,455 | 1,459 | 788 | 671 | 130 | 7 |
module Main where
import Load
main = testload
| abuiles/turbinado-blog | tmp/dependencies/hs-plugins-1.3.1/testsuite/pdynload/bayley1/prog/Main.hs | bsd-3-clause | 47 | 0 | 4 | 9 | 12 | 8 | 4 | 3 | 1 |
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
module Network.Wai.Handler.Warp.SendFile (
sendFile
, readSendFile
, packHeader -- for testing
#ifndef WINDOWS
, positionRead
#endif
) where
import Control.Monad (void, when)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Network.Socket (Socket)
import Network.Wai.Handler.Warp.Buffer
import Network.Wai.Handler.Warp.Types
#ifdef WINDOWS
import Data.ByteString.Internal (ByteString(..))
import Foreign.ForeignPtr (newForeignPtr_)
import Foreign.Ptr (plusPtr)
import qualified System.IO as IO
#else
# if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>))
# endif
import Control.Exception
import Foreign.C.Error (throwErrno)
import Foreign.C.Types
import Foreign.Ptr (Ptr, castPtr, plusPtr)
import Network.Sendfile
import Network.Wai.Handler.Warp.FdCache (openFile, closeFile)
import System.Posix.Types
#endif
----------------------------------------------------------------
-- | Function to send a file based on sendfile() for Linux\/Mac\/FreeBSD.
-- This makes use of the file descriptor cache.
-- For other OSes, this is identical to 'readSendFile'.
--
-- Since: 3.1.0
sendFile :: Socket -> Buffer -> BufSize -> (ByteString -> IO ()) -> SendFile
#ifdef SENDFILEFD
sendFile s _ _ _ fid off len act hdr = case mfid of
-- settingsFdCacheDuration is 0
Nothing -> sendfileWithHeader s path (PartOfFile off len) act hdr
Just fd -> sendfileFdWithHeader s fd (PartOfFile off len) act hdr
where
mfid = fileIdFd fid
path = fileIdPath fid
#else
sendFile _ = readSendFile
#endif
----------------------------------------------------------------
packHeader :: Buffer -> BufSize -> (ByteString -> IO ())
-> IO () -> [ByteString]
-> Int
-> IO Int
packHeader _ _ _ _ [] n = return n
packHeader buf siz send hook (bs:bss) n
| len < room = do
let dst = buf `plusPtr` n
void $ copy dst bs
packHeader buf siz send hook bss (n + len)
| otherwise = do
let dst = buf `plusPtr` n
(bs1, bs2) = BS.splitAt room bs
void $ copy dst bs1
bufferIO buf siz send
hook
packHeader buf siz send hook (bs2:bss) 0
where
len = BS.length bs
room = siz - n
mini :: Int -> Integer -> Int
mini i n
| fromIntegral i < n = i
| otherwise = fromIntegral n
-- | Function to send a file based on pread()\/send() for Unix.
-- This makes use of the file descriptor cache.
-- For Windows, this is emulated by 'Handle'.
--
-- Since: 3.1.0
#ifdef WINDOWS
readSendFile :: Buffer -> BufSize -> (ByteString -> IO ()) -> SendFile
readSendFile buf siz send fid off0 len0 hook headers = do
hn <- packHeader buf siz send hook headers 0
let room = siz - hn
buf' = buf `plusPtr` hn
IO.withBinaryFile path IO.ReadMode $ \h -> do
IO.hSeek h IO.AbsoluteSeek off0
n <- IO.hGetBufSome h buf' (mini room len0)
bufferIO buf (hn + n) send
hook
let n' = fromIntegral n
fptr <- newForeignPtr_ buf
loop h fptr (len0 - n')
where
path = fileIdPath fid
loop h fptr len
| len <= 0 = return ()
| otherwise = do
n <- IO.hGetBufSome h buf (mini siz len)
when (n /= 0) $ do
let bs = PS fptr 0 n
n' = fromIntegral n
send bs
hook
loop h fptr (len - n')
#else
readSendFile :: Buffer -> BufSize -> (ByteString -> IO ()) -> SendFile
readSendFile buf siz send fid off0 len0 hook headers =
bracket setup teardown $ \fd -> do
hn <- packHeader buf siz send hook headers 0
let room = siz - hn
buf' = buf `plusPtr` hn
n <- positionRead fd buf' (mini room len0) off0
bufferIO buf (hn + n) send
hook
let n' = fromIntegral n
loop fd (len0 - n') (off0 + n')
where
path = fileIdPath fid
setup = case fileIdFd fid of
Just fd -> return fd
Nothing -> openFile path
teardown fd = case fileIdFd fid of
Just _ -> return ()
Nothing -> closeFile fd
loop fd len off
| len <= 0 = return ()
| otherwise = do
n <- positionRead fd buf (mini siz len) off
bufferIO buf n send
let n' = fromIntegral n
hook
loop fd (len - n') (off + n')
positionRead :: Fd -> Buffer -> BufSize -> Integer -> IO Int
positionRead fd buf siz off = do
bytes <- fromIntegral <$> c_pread fd (castPtr buf) (fromIntegral siz) (fromIntegral off)
when (bytes < 0) $ throwErrno "positionRead"
return bytes
foreign import ccall unsafe "pread"
c_pread :: Fd -> Ptr CChar -> ByteCount -> FileOffset -> IO CSsize
#endif
| utdemir/wai | warp/Network/Wai/Handler/Warp/SendFile.hs | mit | 4,692 | 0 | 16 | 1,240 | 951 | 490 | 461 | 78 | 3 |
import Graphics.UI.Gtk
import Data.IORef
import System.Random (randomRIO)
main:: IO ()
main= do
initGUI
window <- windowNew
set window [ windowTitle := "Guess a Number",
windowDefaultWidth := 300, windowDefaultHeight := 250]
mb <- vBoxNew False 0
containerAdd window mb
info <- labelNew (Just "Press \"New\" for a random number")
boxPackStart mb info PackNatural 7
sep1 <- hSeparatorNew
boxPackStart mb sep1 PackNatural 7
scrwin <- scrolledWindowNew Nothing Nothing
boxPackStart mb scrwin PackGrow 0
table <- tableNew 10 10 True
scrolledWindowAddWithViewport scrwin table
buttonlist <- sequence (map numButton [1..100])
let places = cross [0..9] [0..9]
sequence_ (zipWith (attachButton table) buttonlist places)
sep2 <- hSeparatorNew
boxPackStart mb sep2 PackNatural 7
hb <- hBoxNew False 0
boxPackStart mb hb PackNatural 0
play <- buttonNewFromStock stockNew
quit <- buttonNewFromStock stockQuit
boxPackStart hb play PackNatural 0
boxPackEnd hb quit PackNatural 0
randstore <- newIORef 50
randomButton info randstore play
sequence_ (map (actionButton info randstore) buttonlist)
widgetShowAll window
onClicked quit (widgetDestroy window)
onDestroy window mainQuit
mainGUI
numButton :: Int -> IO Button
numButton n = do
button <- buttonNewWithLabel (show n)
return button
cross :: [Int] -> [Int] -> [(Int,Int)]
cross row col = do
x <- row
y <- col
return (x,y)
attachButton :: Table -> Button -> (Int,Int) -> IO ()
attachButton ta bu (x,y) =
tableAttachDefaults ta bu y (y+1) x (x+1)
actionButton :: ButtonClass b => Label -> IORef Int -> b -> IO (ConnectId b)
actionButton inf rst b =
onClicked b $ do label <- get b buttonLabel
let num = (read label):: Int
rand <- readIORef rst
case compare num rand of
GT -> do set inf [labelLabel := "Too High"]
widgetModifyFg inf StateNormal (Color 65535 0 0)
LT -> do set inf [labelLabel := "Too Low"]
widgetModifyFg inf StateNormal (Color 65535 0 0)
EQ -> do set inf [labelLabel := "Correct"]
widgetModifyFg inf StateNormal (Color 0 35000 0)
randomButton :: ButtonClass b => Label -> IORef Int -> b -> IO (ConnectId b)
randomButton inf rst b =
onClicked b $ do rand <- randomRIO (1::Int, 100)
writeIORef rst rand
set inf [labelLabel := "Ready"]
widgetModifyFg inf StateNormal (Color 0 0 65535)
| thiagoarrais/gtk2hs | docs/tutorial/Tutorial_Port/Example_Code/GtkChap6-1.hs | lgpl-2.1 | 2,777 | 0 | 15 | 894 | 944 | 439 | 505 | 67 | 3 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP #-}
module BuildTyCl (
buildSynonymTyCon,
buildFamilyTyCon,
buildAlgTyCon,
buildDataCon,
buildPatSyn,
TcMethInfo, buildClass,
distinctAbstractTyConRhs, totallyAbstractTyConRhs,
mkNewTyConRhs, mkDataTyConRhs,
newImplicitBinder
) where
#include "HsVersions.h"
import IfaceEnv
import FamInstEnv( FamInstEnvs )
import DataCon
import PatSyn
import Var
import VarSet
import BasicTypes
import Name
import MkId
import Class
import TyCon
import Type
import Id
import Coercion
import TcType
import DynFlags
import TcRnMonad
import UniqSupply
import Util
import Outputable
------------------------------------------------------
buildSynonymTyCon :: Name -> [TyVar] -> [Role]
-> Type
-> Kind -- ^ Kind of the RHS
-> TcRnIf m n TyCon
buildSynonymTyCon tc_name tvs roles rhs rhs_kind
= return (mkSynonymTyCon tc_name kind tvs roles rhs)
where kind = mkPiKinds tvs rhs_kind
buildFamilyTyCon :: Name -> [TyVar]
-> FamTyConFlav
-> Kind -- ^ Kind of the RHS
-> TyConParent
-> TcRnIf m n TyCon
buildFamilyTyCon tc_name tvs rhs rhs_kind parent
= return (mkFamilyTyCon tc_name kind tvs rhs parent)
where kind = mkPiKinds tvs rhs_kind
------------------------------------------------------
distinctAbstractTyConRhs, totallyAbstractTyConRhs :: AlgTyConRhs
distinctAbstractTyConRhs = AbstractTyCon True
totallyAbstractTyConRhs = AbstractTyCon False
mkDataTyConRhs :: [DataCon] -> AlgTyConRhs
mkDataTyConRhs cons
= DataTyCon {
data_cons = cons,
is_enum = not (null cons) && all is_enum_con cons
-- See Note [Enumeration types] in TyCon
}
where
is_enum_con con
| (_tvs, theta, arg_tys, _res) <- dataConSig con
= null theta && null arg_tys
mkNewTyConRhs :: Name -> TyCon -> DataCon -> TcRnIf m n AlgTyConRhs
-- ^ Monadic because it makes a Name for the coercion TyCon
-- We pass the Name of the parent TyCon, as well as the TyCon itself,
-- because the latter is part of a knot, whereas the former is not.
mkNewTyConRhs tycon_name tycon con
= do { co_tycon_name <- newImplicitBinder tycon_name mkNewTyCoOcc
; let co_tycon = mkNewTypeCo co_tycon_name tycon etad_tvs etad_roles etad_rhs
; traceIf (text "mkNewTyConRhs" <+> ppr co_tycon)
; return (NewTyCon { data_con = con,
nt_rhs = rhs_ty,
nt_etad_rhs = (etad_tvs, etad_rhs),
nt_co = co_tycon } ) }
-- Coreview looks through newtypes with a Nothing
-- for nt_co, or uses explicit coercions otherwise
where
tvs = tyConTyVars tycon
roles = tyConRoles tycon
inst_con_ty = applyTys (dataConUserType con) (mkTyVarTys tvs)
rhs_ty = ASSERT( isFunTy inst_con_ty ) funArgTy inst_con_ty
-- Instantiate the data con with the
-- type variables from the tycon
-- NB: a newtype DataCon has a type that must look like
-- forall tvs. <arg-ty> -> T tvs
-- Note that we *can't* use dataConInstOrigArgTys here because
-- the newtype arising from class Foo a => Bar a where {}
-- has a single argument (Foo a) that is a *type class*, so
-- dataConInstOrigArgTys returns [].
etad_tvs :: [TyVar] -- Matched lazily, so that mkNewTypeCo can
etad_roles :: [Role] -- return a TyCon without pulling on rhs_ty
etad_rhs :: Type -- See Note [Tricky iface loop] in LoadIface
(etad_tvs, etad_roles, etad_rhs) = eta_reduce (reverse tvs) (reverse roles) rhs_ty
eta_reduce :: [TyVar] -- Reversed
-> [Role] -- also reversed
-> Type -- Rhs type
-> ([TyVar], [Role], Type) -- Eta-reduced version
-- (tyvars in normal order)
eta_reduce (a:as) (_:rs) ty | Just (fun, arg) <- splitAppTy_maybe ty,
Just tv <- getTyVar_maybe arg,
tv == a,
not (a `elemVarSet` tyVarsOfType fun)
= eta_reduce as rs fun
eta_reduce tvs rs ty = (reverse tvs, reverse rs, ty)
------------------------------------------------------
buildDataCon :: FamInstEnvs
-> Name -> Bool
-> [HsBang]
-> [Name] -- Field labels
-> [TyVar] -> [TyVar] -- Univ and ext
-> [(TyVar,Type)] -- Equality spec
-> ThetaType -- Does not include the "stupid theta"
-- or the GADT equalities
-> [Type] -> Type -- Argument and result types
-> TyCon -- Rep tycon
-> TcRnIf m n DataCon
-- A wrapper for DataCon.mkDataCon that
-- a) makes the worker Id
-- b) makes the wrapper Id if necessary, including
-- allocating its unique (hence monadic)
buildDataCon fam_envs src_name declared_infix arg_stricts field_lbls
univ_tvs ex_tvs eq_spec ctxt arg_tys res_ty rep_tycon
= do { wrap_name <- newImplicitBinder src_name mkDataConWrapperOcc
; work_name <- newImplicitBinder src_name mkDataConWorkerOcc
-- This last one takes the name of the data constructor in the source
-- code, which (for Haskell source anyway) will be in the DataName name
-- space, and puts it into the VarName name space
; us <- newUniqueSupply
; dflags <- getDynFlags
; let
stupid_ctxt = mkDataConStupidTheta rep_tycon arg_tys univ_tvs
data_con = mkDataCon src_name declared_infix
arg_stricts field_lbls
univ_tvs ex_tvs eq_spec ctxt
arg_tys res_ty rep_tycon
stupid_ctxt dc_wrk dc_rep
dc_wrk = mkDataConWorkId work_name data_con
dc_rep = initUs_ us (mkDataConRep dflags fam_envs wrap_name data_con)
; return data_con }
-- The stupid context for a data constructor should be limited to
-- the type variables mentioned in the arg_tys
-- ToDo: Or functionally dependent on?
-- This whole stupid theta thing is, well, stupid.
mkDataConStupidTheta :: TyCon -> [Type] -> [TyVar] -> [PredType]
mkDataConStupidTheta tycon arg_tys univ_tvs
| null stupid_theta = [] -- The common case
| otherwise = filter in_arg_tys stupid_theta
where
tc_subst = zipTopTvSubst (tyConTyVars tycon) (mkTyVarTys univ_tvs)
stupid_theta = substTheta tc_subst (tyConStupidTheta tycon)
-- Start by instantiating the master copy of the
-- stupid theta, taken from the TyCon
arg_tyvars = tyVarsOfTypes arg_tys
in_arg_tys pred = not $ isEmptyVarSet $
tyVarsOfType pred `intersectVarSet` arg_tyvars
------------------------------------------------------
buildPatSyn :: Name -> Bool
-> (Id,Bool) -> Maybe (Id, Bool)
-> ([TyVar], ThetaType) -- ^ Univ and req
-> ([TyVar], ThetaType) -- ^ Ex and prov
-> [Type] -- ^ Argument types
-> Type -- ^ Result type
-> PatSyn
buildPatSyn src_name declared_infix matcher@(matcher_id,_) builder
(univ_tvs, req_theta) (ex_tvs, prov_theta) arg_tys pat_ty
= ASSERT((and [ univ_tvs == univ_tvs'
, ex_tvs == ex_tvs'
, pat_ty `eqType` pat_ty'
, prov_theta `eqTypes` prov_theta'
, req_theta `eqTypes` req_theta'
, arg_tys `eqTypes` arg_tys'
]))
mkPatSyn src_name declared_infix
(univ_tvs, req_theta) (ex_tvs, prov_theta)
arg_tys pat_ty
matcher builder
where
((_:univ_tvs'), req_theta', tau) = tcSplitSigmaTy $ idType matcher_id
([pat_ty', cont_sigma, _], _) = tcSplitFunTys tau
(ex_tvs', prov_theta', cont_tau) = tcSplitSigmaTy cont_sigma
(arg_tys', _) = tcSplitFunTys cont_tau
-- ------------------------------------------------------
type TcMethInfo = (Name, DefMethSpec, Type)
-- A temporary intermediate, to communicate between
-- tcClassSigs and buildClass.
buildClass :: Name -> [TyVar] -> [Role] -> ThetaType
-> [FunDep TyVar] -- Functional dependencies
-> [ClassATItem] -- Associated types
-> [TcMethInfo] -- Method info
-> ClassMinimalDef -- Minimal complete definition
-> RecFlag -- Info for type constructor
-> TcRnIf m n Class
buildClass tycon_name tvs roles sc_theta fds at_items sig_stuff mindef tc_isrec
= fixM $ \ rec_clas -> -- Only name generation inside loop
do { traceIf (text "buildClass")
; datacon_name <- newImplicitBinder tycon_name mkClassDataConOcc
-- The class name is the 'parent' for this datacon, not its tycon,
-- because one should import the class to get the binding for
-- the datacon
; op_items <- mapM (mk_op_item rec_clas) sig_stuff
-- Build the selector id and default method id
-- Make selectors for the superclasses
; sc_sel_names <- mapM (newImplicitBinder tycon_name . mkSuperDictSelOcc)
[1..length sc_theta]
; let sc_sel_ids = [ mkDictSelId sc_name rec_clas
| sc_name <- sc_sel_names]
-- We number off the Dict superclass selectors, 1, 2, 3 etc so that we
-- can construct names for the selectors. Thus
-- class (C a, C b) => D a b where ...
-- gives superclass selectors
-- D_sc1, D_sc2
-- (We used to call them D_C, but now we can have two different
-- superclasses both called C!)
; let use_newtype = isSingleton arg_tys
-- Use a newtype if the data constructor
-- (a) has exactly one value field
-- i.e. exactly one operation or superclass taken together
-- (b) that value is of lifted type (which they always are, because
-- we box equality superclasses)
-- See note [Class newtypes and equality predicates]
-- We treat the dictionary superclasses as ordinary arguments.
-- That means that in the case of
-- class C a => D a
-- we don't get a newtype with no arguments!
args = sc_sel_names ++ op_names
op_tys = [ty | (_,_,ty) <- sig_stuff]
op_names = [op | (op,_,_) <- sig_stuff]
arg_tys = sc_theta ++ op_tys
rec_tycon = classTyCon rec_clas
; dict_con <- buildDataCon (panic "buildClass: FamInstEnvs")
datacon_name
False -- Not declared infix
(map (const HsNoBang) args)
[{- No fields -}]
tvs [{- no existentials -}]
[{- No GADT equalities -}]
[{- No theta -}]
arg_tys
(mkTyConApp rec_tycon (mkTyVarTys tvs))
rec_tycon
; rhs <- if use_newtype
then mkNewTyConRhs tycon_name rec_tycon dict_con
else return (mkDataTyConRhs [dict_con])
; let { clas_kind = mkPiKinds tvs constraintKind
; tycon = mkClassTyCon tycon_name clas_kind tvs roles
rhs rec_clas tc_isrec
-- A class can be recursive, and in the case of newtypes
-- this matters. For example
-- class C a where { op :: C b => a -> b -> Int }
-- Because C has only one operation, it is represented by
-- a newtype, and it should be a *recursive* newtype.
-- [If we don't make it a recursive newtype, we'll expand the
-- newtype like a synonym, but that will lead to an infinite
-- type]
; result = mkClass tvs fds
sc_theta sc_sel_ids at_items
op_items mindef tycon
}
; traceIf (text "buildClass" <+> ppr tycon)
; return result }
where
mk_op_item :: Class -> TcMethInfo -> TcRnIf n m ClassOpItem
mk_op_item rec_clas (op_name, dm_spec, _)
= do { dm_info <- case dm_spec of
NoDM -> return NoDefMeth
GenericDM -> do { dm_name <- newImplicitBinder op_name mkGenDefMethodOcc
; return (GenDefMeth dm_name) }
VanillaDM -> do { dm_name <- newImplicitBinder op_name mkDefaultMethodOcc
; return (DefMeth dm_name) }
; return (mkDictSelId op_name rec_clas, dm_info) }
{-
Note [Class newtypes and equality predicates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
class (a ~ F b) => C a b where
op :: a -> b
We cannot represent this by a newtype, even though it's not
existential, because there are two value fields (the equality
predicate and op. See Trac #2238
Moreover,
class (a ~ F b) => C a b where {}
Here we can't use a newtype either, even though there is only
one field, because equality predicates are unboxed, and classes
are boxed.
-}
| forked-upstream-packages-for-ghcjs/ghc | compiler/iface/BuildTyCl.hs | bsd-3-clause | 14,205 | 0 | 17 | 5,114 | 2,228 | 1,231 | 997 | 196 | 4 |
-- provides a simple testing-tool for using ApplyXmlDiff upon a proper Xml-File
import Static.XSimplePath
import System.Environment
import Control.Monad
import Text.XML.Light
main :: IO ()
main = do
args <- getArgs
case args of
("-p" : p1 : ps) -> printDiff p1 ps
(p1 : ps) -> testDiff p1 ps
_ -> putStrLn "missing arguments: xml-file location and diff/xupdate files"
printDiff :: FilePath -> [FilePath] -> IO ()
printDiff p1 ps = do
xml <- readFile p1
case parseXMLDoc xml of
Just xml1 -> mapM_ (\ xup -> do
diff <- readFile xup
ef <- liftM snd $ changeXml xml1 diff
print ef) ps
_ -> fail "failed to parse xml-file"
testDiff :: FilePath -> [FilePath] -> IO ()
testDiff p1 ps = do
xml <- readFile p1
case parseXMLDoc xml of
Just xml1 -> do
xml2 <- foldM (\ xml' xup -> do
diff <- readFile xup
liftM fst $ changeXml xml' diff ) xml1 ps
writeFile (p1 ++ "-output") $ ppTopElement xml2
_ -> fail "failed to parse xml-file"
| keithodulaigh/Hets | Static/testApplyDiff.hs | gpl-2.0 | 1,080 | 0 | 19 | 328 | 356 | 168 | 188 | 30 | 3 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>Passive Scan Rules | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Zawartość</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Szukaj</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Ulubione</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/pscanrules/src/main/javahelp/org/zaproxy/zap/extension/pscanrules/resources/help_pl_PL/helpset_pl_PL.hs | apache-2.0 | 982 | 78 | 66 | 160 | 420 | 212 | 208 | -1 | -1 |
module Multi2 where
import Multi1
g = fib fib_gen 46
| RefactoringTools/HaRe | old/testing/introThreshold/Multi2_TokOut.hs | bsd-3-clause | 55 | 0 | 5 | 12 | 17 | 10 | 7 | 3 | 1 |
module ListSort () where
import Language.Haskell.Liquid.Prelude
append k [] ys = k:ys
append k (x:xs) ys = x:(append k xs ys)
takeL x [] = []
takeL x (y:ys) = if (y<x) then y:(takeL x ys) else takeL x ys
takeGE x [] = []
takeGE x (y:ys) = if (y>=x) then y:(takeGE x ys) else takeGE x ys
{-@ quicksort :: (Ord a) => xs:[a] -> [a]<{\fld v -> (v < fld)}> @-}
quicksort [] = []
quicksort (x:xs) = append x xsle xsge
where xsle = quicksort (takeL x xs)
xsge = quicksort (takeGE x xs)
chk [] = liquidAssertB True
chk (x1:xs) = case xs of
[] -> liquidAssertB True
x2:xs2 -> liquidAssertB (x1 <= x2) && chk xs
rlist = map choose [1 .. 10]
bar = quicksort rlist
prop0 = chk bar
| ssaavedra/liquidhaskell | tests/neg/ListQSort.hs | bsd-3-clause | 763 | 2 | 11 | 231 | 364 | 189 | 175 | 19 | 2 |
{-# LANGUAGE CPP, FlexibleContexts, OverloadedStrings, TupleSections, ScopedTypeVariables, ExtendedDefaultRules, LambdaCase #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Control.Lens (over, _1)
import Control.Concurrent.MVar
import Control.Concurrent
import Data.Char (isLower, toLower, isDigit, isSpace)
import Data.IORef
import qualified Data.HashMap.Strict as HM
import Data.List (partition, isPrefixOf)
import Data.Maybe
import Data.Monoid
import Data.Traversable (sequenceA)
import qualified Data.ByteString as B
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy as TL
import Data.Time.Clock (getCurrentTime, diffUTCTime)
import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds)
import Data.Traversable (traverse)
import Filesystem (removeTree, isFile, getWorkingDirectory, createDirectory, copyFile)
import Filesystem.Path ( replaceExtension, basename, directory, extension, addExtension
, filename, addExtensions, dropExtensions)
import Filesystem.Path.CurrentOS (fromText, toText, encodeString)
import Prelude hiding (FilePath)
import qualified Prelude
import Shelly
import System.Directory (doesFileExist, getCurrentDirectory, findExecutable)
import System.Environment (getArgs, getEnv)
import System.Exit
import System.IO hiding (FilePath)
import System.IO.Error
import System.Process ( createProcess, proc, CreateProcess(..), StdStream(..)
, terminateProcess, waitForProcess, readProcessWithExitCode
, ProcessHandle )
import System.Random (randomRIO)
import System.Timeout (timeout)
import Test.Framework
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit.Base (assertBool, assertFailure, assertEqual, Assertion)
import Test.HUnit.Lang (HUnitFailure(..))
import qualified Data.Yaml as Yaml
import Data.Yaml (FromJSON(..), Value(..), (.:), (.:?), (.!=))
import Data.Default
import Foreign.C.Error (ePIPE, Errno(..))
import Control.DeepSeq
import GHC.IO.Exception(IOErrorType(..), IOException(..))
import qualified Control.Exception as C
import Text.Read (readMaybe)
import Options.Applicative
import Options.Applicative.Types
import Options.Applicative.Internal
import Options.Applicative.Help hiding ((</>), fullDesc)
import qualified Options.Applicative.Help as H
default (Text)
-- | path containing the test cases and data files
getTestDir :: FilePath -> IO FilePath
#ifdef STANDALONE
getTestDir ghcjs = do
(ec, libDir, _) <- readProcessWithExitCode (encodeString ghcjs) ["--print-libdir"] ""
when (ec /= ExitSuccess) (error "could not determine GHCJS installation directory")
let testDir = fromString (trim libDir) </> "test"
e <- doesFileExist (encodeString $ testDir </> "tests.yaml")
when (not e) (error $ "test suite not found in " ++ toStringIgnore testDir ++ ", GHCJS might have been installed without tests")
return testDir
#else
getTestDir _ = do
testDir <- (</> "test") . fromString <$> getCurrentDirectory
e <- doesFileExist (encodeString $ testDir </> "tests.yaml")
when (not e) (error $ "test suite not found in " ++ toStringIgnore testDir)
return testDir
#endif
main :: IO ()
main = shellyE . silently . withTmpDir $ liftIO . setupTests
setupTests :: FilePath -> IO ()
setupTests tmpDir = do
args <- getArgs
(testArgs, leftoverArgs) <-
case runP (runParser AllowOpts optParser args) (prefs idm) of
(Left err, _ctx) -> error ("error parsing arguments: " ++ show err)
(Right (a,l), _ctx) -> return (a,l)
when (taHelp testArgs) $ do
defaultMainWithArgs [] ["--help"] `C.catch` \(e::ExitCode) -> return ()
putStrLn $ renderHelp 80 (parserHelp (prefs idm) optParser)
exitSuccess
let ghcjs = fromString (taWithGhcjs testArgs)
ghcjsPkg = fromString (taWithGhcjsPkg testArgs)
runhaskell = fromString (taWithRunhaskell testArgs)
checkBooted ghcjs
testDir <- maybe (getTestDir ghcjs) (return . fromString) (taWithTests testArgs)
nodePgm <- checkProgram "node" (taWithNode testArgs) ["--help"]
smPgm <- checkProgram "js" (taWithSpiderMonkey testArgs) ["--help"]
jscPgm <- checkProgram "jsc" (taWithJavaScriptCore testArgs) ["--help"]
-- fixme use command line options instead
onlyOptEnv <- getEnvOpt "GHCJS_TEST_ONLYOPT"
onlyUnoptEnv <- getEnvOpt "GHCJS_TEST_ONLYUNOPT"
log <- newIORef []
let noProf = taNoProfiling testArgs
(symbs, base) <- prepareBaseBundle testDir ghcjs []
(profSymbs, profBase) <- if noProf then return (symbs, base)
else prepareBaseBundle testDir ghcjs ["-prof"]
let specFile = testDir </> if taBenchmark testArgs then "benchmarks.yaml" else "tests.yaml"
symbsFile = tmpDir </> "base.symbs"
profSymbsFile = tmpDir </> "base.p_symbs"
disUnopt = onlyOptEnv || taBenchmark testArgs
disOpt = onlyUnoptEnv
opts = TestOpts (onlyOptEnv || taBenchmark testArgs) onlyUnoptEnv noProf (taTravis testArgs) log testDir
symbsFile base
profSymbsFile profBase
ghcjs runhaskell nodePgm smPgm jscPgm
es <- doesFileExist (encodeString specFile)
when (not es) (error $ "test suite not found in " ++ toStringIgnore testDir)
ts <- B.readFile (encodeString specFile) >>=
\x -> case Yaml.decodeEither x of
Left err -> error ("error in test spec file: " ++ toStringIgnore specFile ++ "\n" ++ err)
Right t -> return t
groups <- forM (tsuiGroups ts) $ \(dir, name) ->
testGroup name <$> allTestsIn opts testDir dir
checkRequiredPackages (fromString $ taWithGhcjsPkg testArgs) (tsuiRequiredPackages ts)
B.writeFile (encodeString symbsFile) symbs
B.writeFile (encodeString profSymbsFile) profSymbs
when (disUnopt && disOpt) (putStrLn "nothing to do, optimized and unoptimized disabled")
putStrLn ("running tests in " <> toStringIgnore testDir)
defaultMainWithArgs groups leftoverArgs `C.catch` \(e::ExitCode) -> do
errs <- readIORef log
when (e /= ExitSuccess && not (null errs))
(putStrLn "\nFailed tests:" >> mapM_ putStrLn (reverse errs) >> putStrLn "")
when (e /= ExitSuccess) (C.throwIO e)
checkBooted :: FilePath -> IO ()
checkBooted ghcjs = check `C.catch` \(e::C.SomeException) -> cantRun e
where
cantRun e = do
#ifdef STANDALONE
putStrLn ("Error running GHCJS: " ++ show e)
exitFailure
#else
putStrLn ("Error running GHCJS, skipping tests:\n" ++ show e)
exitSuccess
#endif
check = do
(ec, _, _) <- readProcessWithExitCode (toStringIgnore ghcjs) ["-c", "x.hs"] ""
case ec of
(ExitFailure 87) -> do
putStrLn "GHCJS is not booted, skipping tests"
exitSuccess
_ -> return ()
-- find programs at the start so we don't try to run a nonexistent program over and over again
-- temporary workaround, process-1.2.0.0 leaks when trying to run a nonexistent program
checkProgram :: FilePath -> Maybe String -> [String] -> IO (Maybe FilePath)
checkProgram defName userName testArgs = do
let testProg p as = either (\(e::C.SomeException) -> False) (const True) <$>
C.try (readProcessWithExitCode' "/" p as "")
findExecutable (fromMaybe (encodeString defName) userName) >>= \case
Nothing | Just n <- userName -> error ("could not find program " ++ toStringIgnore defName ++ " at " ++ n)
Nothing -> return Nothing
Just p -> do
testProg p testArgs >>= \case
True -> return (Just $ fromString p)
False -> return Nothing
data TestArgs = TestArgs { taHelp :: Bool
, taWithGhcjs :: String
, taWithGhcjsPkg :: String
, taWithRunhaskell :: String
, taWithNode :: Maybe String
, taWithSpiderMonkey :: Maybe String
, taWithJavaScriptCore :: Maybe String
, taWithTests :: Maybe String
, taNoProfiling :: Bool
, taBenchmark :: Bool
, taTravis :: Bool
} deriving Show
optParser :: Parser TestArgs
optParser = TestArgs <$> switch (long "help" <> help "show help message")
<*> strOption (long "with-ghcjs" <> metavar "PROGRAM" <> value "ghcjs" <> help "ghcjs program to use")
<*> strOption (long "with-ghcjs-pkg" <> metavar "PROGRAM" <> value "ghcjs-pkg" <> help "ghcjs-pkg program to use")
<*> strOption (long "with-runhaskell" <> metavar "PROGRAM" <> value "runhaskell" <> help "runhaskell program to use")
<*> (optional . strOption) (long "with-node" <> metavar "PROGRAM" <> help "node.js program to use")
<*> (optional . strOption) (long "with-spidermonkey" <> metavar "PROGRAM" <> help "SpiderMonkey jsshell program to use")
<*> (optional . strOption) (long "with-javascriptcore" <> metavar "PROGRAM" <> help "JavaScriptCore jsc program to use")
<*> (optional . strOption) (long "with-tests" <> metavar "LOCATION" <> help "location of the test cases")
<*> switch (long "no-profiling" <> help "do not run profiling tests")
<*> switch (long "benchmark" <> help "run benchmarks instead of regression tests")
<*> switch (long "travis" <> help "use settings for running on Travis CI")
-- settings for the test suite
data TestOpts = TestOpts { disableUnopt :: Bool
, disableOpt :: Bool
, noProfiling :: Bool
, travisCI :: Bool
, failedTests :: IORef [String] -- yes it's ugly but i don't know how to get the data from test-framework
, testsuiteLocation :: FilePath
, baseSymbs :: FilePath
, baseJs :: B.ByteString
, profBaseSymbs :: FilePath
, profBaseJs :: B.ByteString
, ghcjsProgram :: FilePath
, runhaskellProgram :: FilePath
, nodeProgram :: Maybe FilePath
, spiderMonkeyProgram :: Maybe FilePath
, javaScriptCoreProgram :: Maybe FilePath
}
-- settings for a single test
data TestSettings =
TestSettings { tsDisableNode :: Bool
, tsDisableSpiderMonkey :: Bool
, tsDisableJavaScriptCore :: Bool
, tsDisableOpt :: Bool
, tsDisableUnopt :: Bool
, tsDisableTravis :: Bool
, tsDisabled :: Bool
, tsProf :: Bool -- ^ use profiling bundle
, tsCompArguments :: [String] -- ^ command line arguments to pass to compiler
, tsArguments :: [String] -- ^ command line arguments to pass to interpreter(node, js)
, tsCopyFiles :: [String] -- ^ copy these files to the dir where the test is run
} deriving (Eq, Show)
instance Default TestSettings where
def = TestSettings False False False False False False False False [] [] []
instance FromJSON TestSettings where
parseJSON (Object o) = TestSettings <$> o .:? "disableNode" .!= False
<*> o .:? "disableSpiderMonkey" .!= False
<*> o .:? "disableJavaScriptCore" .!= False
<*> o .:? "disableOpt" .!= False
<*> o .:? "disableUnopt" .!= False
<*> o .:? "disableTravis" .!= False
<*> o .:? "disabled" .!= False
<*> o .:? "prof" .!= False
<*> o .:? "compArguments" .!= []
<*> o .:? "arguments" .!= []
<*> o .:? "copyFiles" .!= []
parseJSON _ = mempty
-- testsuite description
data TestSuite =
TestSuite { tsuiGroups :: [(FilePath, String)]
, tsuiRequiredPackages :: [Text]
}
instance FromJSON TestSuite where
parseJSON (Object o) = TestSuite <$> (groups =<< o .: "groups") <*> o .: "requiredPackages"
where
groups (Object o) = sequenceA $ map (\(k,v) -> (,) <$> pure (fromText k) <*> parseJSON v) (HM.toList o)
groups _ = mempty
parseJSON _ = mempty
testCaseLog :: TestOpts -> TestName -> Assertion -> Test
testCaseLog opts name assertion = testCase name assertion'
where
assertion' = assertion `C.catch` \e@(HUnitFailure msg) -> do
let errMsg = listToMaybe (filter (not . null) (lines msg))
err = name ++ maybe "" (\x -> " (" ++ trunc (dropName x) ++ ")") errMsg
trunc xs | length xs > 43 = take 40 xs ++ "..."
| otherwise = xs
dropName xs | name `isPrefixOf` xs = drop (length name) xs
| otherwise = xs
modifyIORef (failedTests opts) (err:)
C.throwIO e
{-
run all files in path as stdio tests
tests are:
- .hs or .lhs files
- that start with a lowercase letter
-}
allTestsIn :: MonadIO m => TestOpts -> FilePath -> FilePath -> m [Test]
allTestsIn testOpts testDir groupDir = shelly $ do
cd testDir
map (stdioTest testOpts) <$> findWhen (return . isTestFile) groupDir
where
testFirstChar c = isLower c || isDigit c
isTestFile file =
(extension file == Just "hs" || extension file == Just "lhs") &&
((maybe False testFirstChar . listToMaybe . encodeString . basename $ file) ||
(basename file == "Main"))
{-
a stdio test tests two things:
stdout/stderr/exit output must be either:
- the same as filename.out/filename.err/filename.exit (if any exists)
- the same as runhaskell output (otherwise)
the javascript is run with `js' (SpiderMonkey) and `node` (v8)
if they're in $PATH.
-}
data StdioResult = StdioResult { stdioExit :: ExitCode
, stdioOut :: Text
, stdioErr :: Text
}
instance Eq StdioResult where
(StdioResult e1 ou1 er1) == (StdioResult e2 ou2 er2) =
e1 == e2 && (T.strip ou1 == T.strip ou2) && (T.strip er1 == T.strip er2)
outputLimit :: Int
outputLimit = 4096
truncLimit :: Int -> Text -> Text
truncLimit n t | T.length t >= n = T.take n t <> "\n[output truncated]"
| otherwise = t
instance Show StdioResult where
show (StdioResult ex out err) =
"\n>>> exit: " ++ show ex ++ "\n>>> stdout >>>\n" ++
(T.unpack . T.strip) (truncLimit outputLimit out) ++
"\n<<< stderr >>>\n" ++ (T.unpack . T.strip) (truncLimit outputLimit err) ++ "\n<<<\n"
stdioTest :: TestOpts -> FilePath -> Test
stdioTest testOpts file = testCaseLog testOpts (encodeString file) (stdioAssertion testOpts file)
stdioAssertion :: TestOpts -> FilePath -> Assertion
stdioAssertion testOpts file = do
putStrLn ("running test: " ++ encodeString file)
mexpected <- stdioExpected testOpts file
case mexpected of
Nothing -> putStrLn "test disabled"
Just (expected, t) -> do
actual <- runGhcjsResult testOpts file
when (null actual) (putStrLn "warning: no test results")
case t of
Nothing -> return ()
Just ms -> putStrLn ((padTo 35 $ encodeString file) ++ " - " ++ (padTo 35 "runhaskell") ++ " " ++ show ms ++ "ms")
forM_ actual $ \((a,t),d) -> do
assertEqual (encodeString file ++ ": " ++ d) expected a
putStrLn ((padTo 35 $ encodeString file) ++ " - " ++ (padTo 35 d) ++ " " ++ show t ++ "ms")
padTo :: Int -> String -> String
padTo n xs | l < n = xs ++ replicate (n-l) ' '
| otherwise = xs
where l = length xs
stdioExpected :: TestOpts -> FilePath -> IO (Maybe (StdioResult, Maybe Integer))
stdioExpected testOpts file = do
settings <- settingsFor testOpts file
if tsDisabled settings
then return Nothing
else do
xs@[mex,mout,merr] <- mapM (readFilesIfExists.(map (replaceExtension (testsuiteLocation testOpts </> file))))
[["exit"], ["stdout", "out"], ["stderr","err"]]
if any isJust xs
then return . Just $ (StdioResult (fromMaybe ExitSuccess $ readExitCode =<< mex)
(fromMaybe "" mout) (fromMaybe "" merr), Nothing)
else do
mr <- runhaskellResult testOpts settings file
case mr of
Nothing -> assertFailure "cannot run `runhaskell'" >> return undefined
Just (r,t) -> return (Just (r, Just t))
readFileIfExists :: FilePath -> IO (Maybe Text)
readFileIfExists file = do
e <- isFile file
case e of
False -> return Nothing
True -> Just <$> T.readFile (encodeString file)
readFilesIfExists :: [FilePath] -> IO (Maybe Text)
readFilesIfExists [] = return Nothing
readFilesIfExists (x:xs) = do
r <- readFileIfExists x
if (isJust r)
then return r
else readFilesIfExists xs
-- test settings
settingsFor :: TestOpts -> FilePath -> IO TestSettings
settingsFor opts file = do
e <- isFile (testsuiteLocation opts </> settingsFile)
case e of
False -> return def
True -> do
cfg <- B.readFile settingsFile'
case Yaml.decodeEither cfg of
Left err -> errDef
Right t -> return t
where
errDef = do
putStrLn $ "error in test settings: " ++ settingsFile'
putStrLn "running test with default settings"
return def
settingsFile = replaceExtension file "settings"
settingsFile' = encodeString (testsuiteLocation opts </> settingsFile)
runhaskellResult :: TestOpts
-> TestSettings
-> FilePath
-> IO (Maybe (StdioResult, Integer))
runhaskellResult testOpts settings file = do
let args = tsArguments settings
r <- runProcess (testsuiteLocation testOpts </> directory file) (runhaskellProgram testOpts)
([ "-w", encodeString $ filename file] ++ args) ""
return r
extraJsFiles :: FilePath -> IO [String]
extraJsFiles file =
let jsFile = addExtensions (dropExtensions file) ["foreign", "js"]
in do
e <- isFile jsFile
return $ if e then [encodeString jsFile] else []
runGhcjsResult :: TestOpts -> FilePath -> IO [((StdioResult, Integer), String)]
runGhcjsResult opts file = do
settings <- settingsFor opts file
if tsDisabled settings || (tsProf settings && noProfiling opts) || (tsDisableTravis settings && travisCI opts)
then return []
else do
let unopt = if disableUnopt opts || tsDisableUnopt settings then [] else [False]
opt = if disableOpt opts || tsDisableOpt settings then [] else [True]
runs = unopt ++ opt
concat <$> mapM (run settings) runs
where
run settings optimize = do
output <- outputPath
extraFiles <- extraJsFiles file
cd <- getWorkingDirectory
-- compile test
let outputExe = cd </> output </> "a"
outputExe' = outputExe <.> "jsexe"
outputBuild = cd </> output </> "build"
outputRun = outputExe' </> ("all.js"::FilePath)
input = file
desc = ", optimization: " ++ show optimize
opt = if optimize then ["-O2"] else []
extraCompArgs = tsCompArguments settings
prof = tsProf settings
compileOpts = [ "-no-rts", "-no-stats"
, "-o", encodeString outputExe
, "-odir", encodeString outputBuild
, "-hidir", encodeString outputBuild
, "-use-base" , encodeString ((if prof then profBaseSymbs else baseSymbs) opts)
, encodeString (filename input)
] ++ opt ++ extraCompArgs ++ extraFiles
args = tsArguments settings
runTestPgm name disabled getPgm pgmArgs pgmArgs'
| Just p <- getPgm opts, not (disabled settings) =
fmap (,name ++ desc) <$>
runProcess outputExe' p (pgmArgs++encodeString outputRun:pgmArgs'++args) ""
| otherwise = return Nothing
C.bracket (createDirectory False output)
(\_ -> removeTree output) $ \_ -> do -- fixme this doesn't remove the output if the test program is stopped with ctrl-c
createDirectory False outputBuild
e <- liftIO $ runProcess (testsuiteLocation opts </> directory file) (ghcjsProgram opts) compileOpts ""
case e of
Nothing -> assertFailure "cannot find ghcjs"
Just (r,_) -> do
when (stdioExit r /= ExitSuccess) (print r)
assertEqual "compile error" ExitSuccess (stdioExit r)
-- copy data files for test
forM_ (tsCopyFiles settings) $ \cfile ->
let cfile' = fromText (T.pack cfile)
in copyFile (testsuiteLocation opts </> directory file </> cfile') (outputExe' </> cfile')
-- combine files with base bundle from incremental link
[out, lib] <- mapM (B.readFile . (\x -> encodeString (outputExe' </> x)))
["out.js", "lib.js"]
let runMain = "\nh$main(h$mainZCMainzimain);\n"
B.writeFile (encodeString outputRun) $
(if prof then profBaseJs else baseJs) opts <> lib <> out <> runMain
-- run with node.js and SpiderMonkey
nodeResult <- runTestPgm "node" tsDisableNode nodeProgram ["--use_strict"] []
smResult <- runTestPgm "SpiderMonkey" tsDisableSpiderMonkey spiderMonkeyProgram ["--strict"] []
jscResult <- over (traverse . _1 . _1) unmangleJscResult <$>
runTestPgm "JavaScriptCore" tsDisableJavaScriptCore javaScriptCoreProgram [] ["--"]
return $ catMaybes [nodeResult, smResult, jscResult]
-- jsc prefixes all sderr lines with "--> " and does not let us
-- return a nonzero exit status
unmangleJscResult :: StdioResult -> StdioResult
unmangleJscResult (StdioResult exit out err)
| (x:xs) <- reverse (T.lines err)
, Just code <- T.stripPrefix "--> GHCJS JSC exit status: " x
= StdioResult (parseExit code) out (T.unlines . reverse $ map unmangle xs)
| otherwise = StdioResult exit out (T.unlines . map unmangle . T.lines $ err)
where
unmangle xs = fromMaybe xs (T.stripPrefix "--> " xs)
parseExit x = case reads (T.unpack x) of
[(0,"")] -> ExitSuccess
[(n,"")] -> ExitFailure n
_ -> ExitFailure 999
outputPath :: IO FilePath
outputPath = do
t <- (show :: Integer -> String) . round . (*1000) . utcTimeToPOSIXSeconds <$> getCurrentTime
rnd <- show <$> randomRIO (1000000::Int,9999999)
return . fromString $ "ghcjs_test_" ++ t ++ "_" ++ rnd
-- | returns Nothing if the program cannot be run
runProcess :: MonadIO m => FilePath -> FilePath -> [String] -> String -> m (Maybe (StdioResult, Integer))
runProcess workingDir pgm args input = do
before <- liftIO getCurrentTime
r <- liftIO (C.try $ timeout 180000000 (readProcessWithExitCode' (encodeString workingDir) (encodeString pgm) args input))
case r of
Left (e::C.SomeException) -> return Nothing
Right Nothing -> return (Just (StdioResult ExitSuccess "" "process killed after timeout", 0))
Right (Just (ex, out, err)) -> do
after <- liftIO getCurrentTime
return $
case ex of -- fixme is this the right way to find out that a program does not exist?
(ExitFailure 127) -> Nothing
_ ->
Just ( StdioResult ex (T.pack out) (T.pack err)
, round $ 1000 * (after `diffUTCTime` before)
)
-- modified readProcessWithExitCode with working dir
readProcessWithExitCode'
:: Prelude.FilePath -- ^ Working directory
-> Prelude.FilePath -- ^ Filename of the executable (see 'proc' for details)
-> [String] -- ^ any arguments
-> String -- ^ standard input
-> IO (ExitCode,String,String) -- ^ exitcode, stdout, stderr
readProcessWithExitCode' workingDir cmd args input = do
let cp_opts = (proc cmd args) {
std_in = CreatePipe,
std_out = CreatePipe,
std_err = CreatePipe,
cwd = Just workingDir
}
withCreateProcess cp_opts $
\(Just inh) (Just outh) (Just errh) ph -> do
out <- hGetContents outh
err <- hGetContents errh
-- fork off threads to start consuming stdout & stderr
withForkWait (C.evaluate $ rnf out) $ \waitOut ->
withForkWait (C.evaluate $ rnf err) $ \waitErr -> do
-- now write any input
unless (null input) $
ignoreSigPipe $ hPutStr inh input
-- hClose performs implicit hFlush, and thus may trigger a SIGPIPE
ignoreSigPipe $ hClose inh
-- wait on the output
waitOut
waitErr
hClose outh
hClose errh
-- wait on the process
ex <- waitForProcess ph
return (ex, out, err)
withCreateProcess
:: CreateProcess
-> (Maybe Handle -> Maybe Handle -> Maybe Handle -> ProcessHandle -> IO a)
-> IO a
withCreateProcess c action =
C.bracketOnError (createProcess c) cleanupProcess
(\(m_in, m_out, m_err, ph) -> action m_in m_out m_err ph)
cleanupProcess :: (Maybe Handle, Maybe Handle, Maybe Handle, ProcessHandle)
-> IO ()
cleanupProcess (mb_stdin, mb_stdout, mb_stderr, ph) = do
terminateProcess ph
-- Note, it's important that other threads that might be reading/writing
-- these handles also get killed off, since otherwise they might be holding
-- the handle lock and prevent us from closing, leading to deadlock.
maybe (return ()) (ignoreSigPipe . hClose) mb_stdin
maybe (return ()) hClose mb_stdout
maybe (return ()) hClose mb_stderr
-- terminateProcess does not guarantee that it terminates the process.
-- Indeed on Unix it's SIGTERM, which asks nicely but does not guarantee
-- that it stops. If it doesn't stop, we don't want to hang, so we wait
-- asynchronously using forkIO.
_ <- forkIO (waitForProcess ph >> return ())
return ()
withForkWait :: IO () -> (IO () -> IO a) -> IO a
withForkWait async body = do
waitVar <- newEmptyMVar :: IO (MVar (Either C.SomeException ()))
C.mask $ \restore -> do
tid <- forkIO $ C.try (restore async) >>= putMVar waitVar
let wait = takeMVar waitVar >>= either C.throwIO return
restore (body wait) `C.onException` killThread tid
ignoreSigPipe :: IO () -> IO ()
ignoreSigPipe = C.handle $ \e -> case e of
IOError { ioe_type = ResourceVanished
, ioe_errno = Just ioe }
| Errno ioe == ePIPE -> return ()
_ -> C.throwIO e
-------------------
{-
a mocha test changes to the directory,
runs the action, then runs `mocha'
fails if mocha exits nonzero
-}
mochaTest :: FilePath -> IO a -> IO b -> Test
mochaTest dir pre post = do
undefined
writeFileT :: FilePath -> Text -> IO ()
writeFileT fp t = T.writeFile (encodeString fp) t
readFileT :: FilePath -> IO Text
readFileT fp = T.readFile (encodeString fp)
readExitCode :: Text -> Maybe ExitCode
readExitCode = fmap convert . readMaybe . T.unpack
where
convert 0 = ExitSuccess
convert n = ExitFailure n
checkRequiredPackages :: FilePath -> [Text] -> IO ()
checkRequiredPackages ghcjsPkg requiredPackages = shelly . silently $ do
installedPackages <- T.words <$> run "ghcjs-pkg" ["list", "--simple-output"]
forM_ requiredPackages $ \pkg -> do
when (not $ any ((pkg <> "-") `T.isPrefixOf`) installedPackages) $ do
echo ("warning: package `" <> pkg <> "' is required by the test suite but is not installed")
prepareBaseBundle :: FilePath -> FilePath -> [Text] -> IO (B.ByteString, B.ByteString)
prepareBaseBundle testDir ghcjs extraArgs = shellyE . silently . sub . withTmpDir $ \tmp -> do
cp (testDir </> "TestLinkBase.hs") tmp
cp (testDir </> "TestLinkMain.hs") tmp
cd tmp
run_ ghcjs $ ["-generate-base", "TestLinkBase", "-o", "base", "TestLinkMain.hs"] ++ extraArgs
cd "base.jsexe"
[symbs, js, lib, rts] <- mapM readBinary
["out.base.symbs", "out.base.js", "lib.base.js", "rts.js"]
return (symbs, rts <> lib <> js)
getEnvMay :: String -> IO (Maybe String)
getEnvMay xs = fmap Just (getEnv xs)
`C.catch` \(_::C.SomeException) -> return Nothing
getEnvOpt :: MonadIO m => String -> m Bool
getEnvOpt xs = liftIO (maybe False ((`notElem` ["0","no"]).map toLower) <$> getEnvMay xs)
trim :: String -> String
trim = let f = dropWhile isSpace . reverse in f . f
shellyE :: Sh a -> IO a
shellyE m = do
r <- newIORef (Left undefined)
let wio r v = liftIO (writeIORef r v)
a <- shelly $ (wio r . Right =<< m) `catch_sh` \(e::C.SomeException) -> wio r (Left e)
readIORef r >>= \case
Left e -> C.throw e
Right a -> return a
toStringIgnore :: FilePath -> String
toStringIgnore = T.unpack . either id id . toText
fromString :: String -> FilePath
fromString = fromText . T.pack
| beni55/ghcjs | test/TestRunner.hs | mit | 30,626 | 1 | 38 | 9,358 | 8,347 | 4,261 | 4,086 | 525 | 8 |
module T11167 where
data SomeException
newtype ContT r m a = ContT {runContT :: (a -> m r) -> m r}
runContT' :: ContT r m a -> (a -> m r) -> m r
runContT' = runContT
catch_ :: IO a -> (SomeException -> IO a) -> IO a
catch_ = undefined
foo :: IO ()
foo = (undefined :: ContT () IO a)
`runContT` (undefined :: a -> IO ())
`catch_` (undefined :: SomeException -> IO ())
foo' :: IO ()
foo' = (undefined :: ContT () IO a)
`runContT'` (undefined :: a -> IO ())
`catch_` (undefined :: SomeException -> IO ())
| olsner/ghc | testsuite/tests/rename/should_compile/T11167.hs | bsd-3-clause | 542 | 0 | 10 | 145 | 256 | 141 | 115 | -1 | -1 |
{-# LANGUAGE StaticPointers #-}
module StaticPointers01 where
import GHC.StaticPtr
f0 :: StaticPtr (Int -> Int)
f0 = static g
f1 :: StaticPtr (Bool -> Bool -> Bool)
f1 = static (&&)
f2 :: StaticPtr (Bool -> Bool -> Bool)
f2 = static ((&&) . id)
g :: Int -> Int
g = id
| ghc-android/ghc | testsuite/tests/typecheck/should_compile/TcStaticPointers01.hs | bsd-3-clause | 274 | 0 | 8 | 59 | 112 | 63 | 49 | 11 | 1 |
{-# LANGUAGE Rank2Types #-}
-- Tests subsumption for infix operators (in this case (.))
-- Broke GHC 6.4!
-- Now it breaks the impredicativity story
-- (id {a}) . (id {a}) :: a -> a
-- And (forall m. Monad m => m a) /~ IO a
module Main(main) where
foo :: (forall m. Monad m => m a) -> IO a
foo = id . id
main :: IO ()
main = foo (return ())
| urbanslug/ghc | testsuite/tests/typecheck/should_run/tcrun035.hs | bsd-3-clause | 348 | 0 | 9 | 83 | 80 | 45 | 35 | 6 | 1 |
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..))
import Control.Monad (void)
import DNA (hammingDistance)
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = void $ runTestTT $ TestList
[ TestList hammingDistanceTests ]
hammingDistanceTests :: [Test]
hammingDistanceTests =
[ testCase "no difference between empty strands" $
0 @=? hammingDistance "" ""
, testCase "no difference between identical strands" $
0 @=? hammingDistance "GGACTGA" "GGACTGA"
, testCase "complete hamming distance in small strand" $
3 @=? hammingDistance "ACT" "GGA"
, testCase "hamming distance in off by one strand" $
19 @=? hammingDistance
"GGACGGATTCTGACCTGGACTAATTTTGGGG"
"AGGACGGATTCTGACCTGGACTAATTTTGGGG"
, testCase "small hamming distance in middle somewhere" $
1 @=? hammingDistance "GGACG" "GGTCG"
, testCase "larger distance" $
2 @=? hammingDistance "ACCAGGG" "ACTATGG"
, testCase "ignores extra length on other strand when longer" $
3 @=? hammingDistance "AAACTAGGGG" "AGGCTAGCGGTAGGAC"
, testCase "ignores extra length on original strand when longer" $
5 @=? hammingDistance "GACTACGGACAGGGTAGGGAAT" "GACATCGCACACC"
, TestLabel "does not actually shorten original strand" $
TestList $ map TestCase $
[ 1 @=? hammingDistance "AGACAACAGCCAGCCGCCGGATT" "AGGCAA"
, 1 @=? hammingDistance "AGACAACAGCCAGCCGCCGGATT" "AGGCAA"
, 4 @=? hammingDistance
"AGACAACAGCCAGCCGCCGGATT"
"AGACATCTTTCAGCCGCCGGATTAGGCAA"
, 1 @=? hammingDistance "AGACAACAGCCAGCCGCCGGATT" "AGG" ]
] | tfausak/exercism-solutions | haskell/point-mutations/point-mutations_test.hs | mit | 1,630 | 0 | 9 | 299 | 344 | 175 | 169 | 36 | 1 |
module TestSuites.ParserCSVSpec (spec) where
import Test.Hspec.Contrib.HUnit(fromHUnitTest)
import Test.HUnit
import HsPredictor.ParserCSV
import HsPredictor.Types
spec = fromHUnitTest $ TestList [
TestLabel ">>readMatches" test_readMatches
]
test_readMatches = TestCase $ do
let r1 = ["2012.08.24,Dortmund,Bremen,2,3,1.0,2.0,3.0"]
let r2 = ["20.08.24,Dortmund,Bremen,2,3,-1,-1,-1"]
let r3 = ["2012.08.24,Dortmund,Bremen,2,three,-1,-1,-1"]
let r4 = ["2012.08.24,Dortmund,Bremen,2,-1,-1"]
let r5 = ["2012.08.24,Dortmund,Bremen,-1,-1,1.0,2.0,3.0"]
let r6 = ["2012.08.24,Dortmund,Bremen,-1,1,1.0,2.0,3.0"]
let r7 = ["2012.08.24,Dortmund,Bremen,1,-1,-1,-1,-1"]
let r8 = ["2012.08.25,Dortmund,Bremen,1,-1,-1,-1,-1"]
assertEqual "Good input"
[Match 20120824 "Dortmund" "Bremen" 2 3 1 2 3]
(readMatches r1)
assertEqual "Wrong date format"
[]
(readMatches r2)
assertEqual "Wrong result format"
[]
(readMatches r3)
assertEqual "Not complete line"
[]
(readMatches r4)
assertEqual "Upcoming match good input"
[Match 20120824 "Dortmund" "Bremen" (-1) (-1) 1.0 2.0 3.0]
(readMatches r5)
assertEqual "Upcoming match bad1"
[]
(readMatches r6)
assertEqual "Upcoming match bad2"
[]
(readMatches r7)
assertEqual "Sort matches"
(readMatches $ r7++r8)
(readMatches $ r8++r7)
assertEqual "Sort matches"
((readMatches r7) ++ (readMatches r8))
(readMatches $ r8++r7)
| Taketrung/HsPredictor | tests/TestSuites/ParserCSVSpec.hs | mit | 1,458 | 0 | 12 | 258 | 407 | 203 | 204 | 43 | 1 |
-- |
-- Module: BigE.TextRenderer.Font
-- Copyright: (c) 2017 Patrik Sandahl
-- Licence: MIT
-- Maintainer: Patrik Sandahl <[email protected]>
-- Stability: experimental
-- Portability: portable
module BigE.TextRenderer.Font
( Font (..)
, fromFile
, enable
, disable
, delete
) where
import qualified BigE.TextRenderer.Parser as Parser
import BigE.TextRenderer.Types (Character (charId), Common, Info,
Page (..))
import BigE.Texture (TextureParameters (..),
defaultParams2D)
import qualified BigE.Texture as Texture
import BigE.Types (Texture, TextureFormat (..),
TextureWrap (..))
import Control.Exception (SomeException, try)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.ByteString.Lazy.Char8 (ByteString)
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import System.FilePath
import Text.Megaparsec (parse)
-- | A loaded font.
data Font = Font
{ info :: !Info
, common :: !Common
, characters :: !(HashMap Int Character)
, fontAtlas :: !Texture
} deriving Show
-- | Read 'Font' data from file and read the referenced texture file.
fromFile :: MonadIO m => FilePath -> m (Either String Font)
fromFile filePath = do
eFnt <- liftIO $ readFontFromFile filePath
case eFnt of
Right fnt -> do
eFontAtlas <- readTextureFromFile filePath fnt
case eFontAtlas of
Right fontAtlas' ->
return $
Right Font
{ info = Parser.info fnt
, common = Parser.common fnt
, characters =
HashMap.fromList $ keyValueList (Parser.characters fnt)
, fontAtlas = fontAtlas'
}
Left err -> return $ Left err
Left err -> return $ Left err
where
keyValueList = map (\char -> (charId char, char))
-- | Enable to font. I.e. bind the texture to the given texture unit.
enable :: MonadIO m => Int -> Font -> m ()
enable unit = Texture.enable2D unit . fontAtlas
-- | Disable the font. I.e. disable the texture at the given texture unit.
disable :: MonadIO m => Int -> m ()
disable = Texture.disable2D
-- | Delete the font. I.e. delete its texture.
delete :: MonadIO m => Font -> m ()
delete = Texture.delete . fontAtlas
-- | Get a 'FontFile' from external file.
readFontFromFile :: FilePath -> IO (Either String Parser.FontFile)
readFontFromFile filePath = do
eBs <- tryRead filePath
case eBs of
Right bs ->
case parse Parser.parseFontFile filePath bs of
Right fnt -> return $ Right fnt
Left err -> return $ Left (show err)
Left e -> return $ Left (show e)
where
tryRead :: FilePath -> IO (Either SomeException ByteString)
tryRead = try . BS.readFile
-- | Get a 'Texture' from external file.
readTextureFromFile :: MonadIO m => FilePath -> Parser.FontFile
-> m (Either String Texture)
readTextureFromFile filePath fntFile = do
let fntDir = takeDirectory filePath
texFile = fntDir </> file (Parser.page fntFile)
Texture.fromFile2D texFile
defaultParams2D { format = RGBA8
, wrapS = WrapClampToEdge
, wrapT = WrapClampToEdge
}
| psandahl/big-engine | src/BigE/TextRenderer/Font.hs | mit | 3,787 | 0 | 23 | 1,330 | 842 | 458 | 384 | 80 | 3 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, UndecidableInstances #-}
{- |
The HList library
(C) 2004-2006, Oleg Kiselyov, Ralf Laemmel, Keean Schupke
A model of label as needed for extensible records.
Record labels are simply type-level naturals.
This models is as simple and as portable as it could be.
-}
module Data.HList.Label1 where
import Data.HList.FakePrelude
import Data.HList.Record (ShowLabel(..))
-- | Labels are type-level naturals
newtype Label x = Label x deriving Show
-- | Public constructors for labels
label :: HNat n => n -> Label n
label = Label
-- | Construct the first label
firstLabel :: Label HZero
firstLabel = label hZero
-- | Construct the next label
nextLabel ::( HNat t) => Label t -> Label (HSucc t)
nextLabel (Label n) = label (hSucc n)
-- | Equality on labels
instance HEq n n' b
=> HEq (Label n) (Label n') b
-- | Show label
instance Show n => ShowLabel (Label n)
where
showLabel (Label n) = show n
| bjornbm/HList-classic | Data/HList/Label1.hs | mit | 991 | 0 | 9 | 207 | 213 | 115 | 98 | 15 | 1 |
{-# LANGUAGE QuasiQuotes #-}
import Here
str :: String
str = [here|test
test
test test |]
main :: IO()
main = do putStrLn str
| Pnom/haskell-ast-pretty | Test/examples/QuasiQuoteLines.hs | mit | 132 | 0 | 7 | 30 | 40 | 23 | 17 | 6 | 1 |
module Shipper.Outputs (
startDebugOutput,
startZMQ4Output,
startRedisOutput,
) where
import Shipper.Outputs.Debug
import Shipper.Outputs.ZMQ4
import Shipper.Outputs.Redis
| christian-marie/pill-bug | Shipper/Outputs.hs | mit | 185 | 0 | 4 | 27 | 36 | 24 | 12 | 7 | 0 |
module Analysis where
data Criticality = Maximum | Minimum | Inflection
deriving (Eq, Show, Read)
data Extremum p = Extremum
{ exPoint :: p
, exType :: Criticality
} deriving (Eq, Show)
instance Functor Extremum where
fmap f (Extremum p c) = Extremum (f p) c
extremum :: (Fractional t, Ord t) =>
(t -> t) -> (t -> t) -> (t -> t) -> t -> (t, t) -> Extremum (t, t)
extremum f f' f'' e r = Extremum (x, y) t
where x = solve f' f'' e r
y = f x
c = f'' x
t | c > e = Minimum
| c < (-e) = Maximum
| otherwise = Inflection
-- TODO: use bisection to ensure bounds
solve :: (Fractional t, Ord t) =>
(t -> t) -> (t -> t) -> t -> (t, t) -> t
solve f f' e (x0, _) = head . convergedBy e . iterate step $ x0
where step x = x - f x / f' x
dropWhile2 :: (t -> t -> Bool) -> [t] -> [t]
dropWhile2 p xs@(x : xs'@(x' : _)) = if not (p x x') then xs else dropWhile2 p xs'
dropWhile2 _ xs = xs
convergedBy :: (Num t, Ord t) => t -> [t] -> [t]
convergedBy e = dropWhile2 unconverging
where unconverging x x' = abs (x - x') >= e
| neilmayhew/Tides | Analysis.hs | mit | 1,094 | 0 | 12 | 322 | 563 | 299 | 264 | 28 | 2 |
module Game.Client where
import Network.Simple.TCP
import Control.Concurrent.MVar
import Control.Applicative
import Game.Position
import Game.Player
import qualified Game.GameWorld as G
import qualified Game.Resources as R
import qualified Game.Unit as U
-- | Client kuvaa koko asiakasohjelman tilaa
data Client = Client {
resources :: R.Resources,
gameworld :: G.GameWorld,
mousePos :: Position,
selectedUnit :: Maybe U.Unit,
scroll :: (Float, Float),
player :: Player,
others :: [Player],
frame :: Int,
box :: MVar G.GameWorld,
socket :: Socket
}
playerNum :: Client -> Int
playerNum client = 1 + length (others client)
myTurn :: Client -> Bool
myTurn client = G.turn (gameworld client) `mod` playerNum client == teamIndex (player client)
myTeam :: Client -> Int
myTeam client = teamIndex (player client)
-- | Luo uuden clientin ja lataa sille resurssit
newClient :: MVar G.GameWorld -> Socket -> Int -> IO Client
newClient box sock idx = Client
<$> R.loadResources
<*> G.initialGameWorld
<*> return (0, 0)
<*> return Nothing
<*> return (-150, 0)
<*> return (Player "pelaaja" idx)
<*> return [Player "toinen" 3]
<*> return 0
<*> return box
<*> return sock
| maqqr/psycho-bongo-fight | Game/Client.hs | mit | 1,318 | 0 | 15 | 336 | 392 | 216 | 176 | 38 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Views.Pages.Error (errorView) where
import BasicPrelude
import Text.Blaze.Html5 (Html, toHtml, (!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Routes (Route)
import Views.Layout (layoutView)
errorView :: Route -> Text -> Html
errorView currentRoute err =
let pageTitle = "Error"
in layoutView currentRoute pageTitle $ do
H.div ! A.class_ "page-header" $ H.h1 "Error"
H.p $ "An unexpected error occured."
H.p $ toHtml err
| nicolashery/example-marvel-haskell | Views/Pages/Error.hs | mit | 572 | 0 | 13 | 94 | 160 | 91 | 69 | 16 | 1 |
import Control.Applicative
import Data.Char
import Data.Tuple
newtype Parser result = Parser { runParser :: String ->
[(String, result)] }
succeed :: r -> Parser r
succeed v = Parser $ \stream -> [(stream, v)]
instance Functor Parser where
fmap f (Parser pattern) = Parser $ (fmap . fmap . fmap) f pattern
instance Applicative Parser where
pure result = succeed result
Parser pattern_map <*> Parser pattern
= Parser $ \s -> [(u, f a) | (t, f) <- pattern_map s, (u, a) <- pattern t]
satisfy :: (Char -> Bool) -> Parser Char
satisfy p = Parser $ \s -> case s of
[] -> []
a:as
| p a -> [(as, a)]
| otherwise -> []
char :: Char -> Parser Char
char = satisfy . (==)
alpha = satisfy isAlpha
digit = satisfy isDigit
space = satisfy isSpace
charList :: String -> Parser Char
charList = satisfy . (flip elem)
string :: String -> Parser String
string [] = pure []
string (c:cs) = (:) <$> char c <*> string cs
instance Alternative Parser where
empty = Parser $ const []
Parser pattern1 <|> Parser pattern2 = Parser $ liftA2 (++) pattern1 pattern2
end :: Parser ()
end = Parser $ \stream -> [(stream, ()) | null stream]
just :: Parser r -> Parser r
just pattern = const <$> pattern <*> end
(<.>) :: Parser r1 -> Parser r2 -> Parser r2
parser1 <.> parser2 = fmap (flip const) parser1 <*> parser2
(<?>) :: (r -> Bool) -> Parser r -> Parser r
predicate <?> (Parser parser)
= Parser $ \s -> [(t, r) | (t, r) <- parser s, predicate r]
number = (fmap (:) digit) <*> (number <|> succeed [])
| markstoehr/cs161 | _site/fls/Lab5_flymake.hs | cc0-1.0 | 1,570 | 0 | 13 | 383 | 730 | 379 | 351 | 42 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.Texturing.Queries
-- Copyright : (c) Sven Panne 2002-2009
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- This module offers various texture queries.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.Texturing.Queries (
TextureQuery, textureInternalFormat, textureSize1D, textureSize2D,
textureSize3D, textureBorder, textureRGBASizes, textureSharedSize,
textureIntensitySize, textureLuminanceSize, textureIndexSize,
textureDepthBits, textureCompressedImageSize, textureProxyOK
) where
import Control.Monad
import Data.StateVar
import Foreign.Marshal.Alloc
import Graphics.Rendering.OpenGL.GL.GLboolean
import Graphics.Rendering.OpenGL.GL.PeekPoke
import Graphics.Rendering.OpenGL.GL.PixelRectangles
import Graphics.Rendering.OpenGL.GL.Texturing.PixelInternalFormat
import Graphics.Rendering.OpenGL.GL.Texturing.Specification
import Graphics.Rendering.OpenGL.GL.Texturing.TextureTarget
import Graphics.Rendering.OpenGL.GL.VertexSpec
import Graphics.Rendering.OpenGL.Raw.ARB.Compatibility (
gl_TEXTURE_INTENSITY_SIZE, gl_TEXTURE_LUMINANCE_SIZE, gl_DEPTH_BITS )
import Graphics.Rendering.OpenGL.Raw.Core31
import Graphics.Rendering.OpenGL.Raw.EXT.PalettedTexture (
gl_TEXTURE_INDEX_SIZE )
--------------------------------------------------------------------------------
data TexLevelParameter =
TextureInternalFormat
| TextureWidth
| TextureHeight
| TextureDepth
| TextureBorder
| TextureRedSize
| TextureGreenSize
| TextureBlueSize
| TextureAlphaSize
| TextureIntensitySize
| TextureLuminanceSize
| TextureIndexSize
| DepthBits
| TextureCompressedImageSize
| TextureCompressed
| TextureSharedSize
marshalTexLevelParameter :: TexLevelParameter -> GLenum
marshalTexLevelParameter x = case x of
TextureInternalFormat -> gl_TEXTURE_INTERNAL_FORMAT
TextureWidth -> gl_TEXTURE_WIDTH
TextureHeight -> gl_TEXTURE_HEIGHT
TextureDepth -> gl_TEXTURE_DEPTH
TextureBorder -> gl_TEXTURE_BORDER
TextureRedSize -> gl_TEXTURE_RED_SIZE
TextureGreenSize -> gl_TEXTURE_GREEN_SIZE
TextureBlueSize -> gl_TEXTURE_BLUE_SIZE
TextureAlphaSize -> gl_TEXTURE_ALPHA_SIZE
TextureIntensitySize -> gl_TEXTURE_INTENSITY_SIZE
TextureLuminanceSize -> gl_TEXTURE_LUMINANCE_SIZE
TextureIndexSize -> gl_TEXTURE_INDEX_SIZE
DepthBits -> gl_DEPTH_BITS
TextureCompressedImageSize -> gl_TEXTURE_COMPRESSED_IMAGE_SIZE
TextureCompressed -> gl_TEXTURE_COMPRESSED
TextureSharedSize -> gl_TEXTURE_SHARED_SIZE
--------------------------------------------------------------------------------
type TextureQuery a = Either TextureTarget CubeMapTarget -> Level -> GettableStateVar a
textureInternalFormat :: TextureQuery PixelInternalFormat
textureInternalFormat t level =
makeGettableStateVar $
getTexLevelParameteri unmarshalPixelInternalFormat NoProxy t level TextureInternalFormat
textureSize1D :: TextureQuery TextureSize1D
textureSize1D t level =
makeGettableStateVar $
getTexLevelParameteri (TextureSize1D . fromIntegral) NoProxy t level TextureWidth
textureSize2D :: TextureQuery TextureSize2D
textureSize2D t level =
makeGettableStateVar $
liftM2 TextureSize2D
(getTexLevelParameteri fromIntegral NoProxy t level TextureWidth )
(getTexLevelParameteri fromIntegral NoProxy t level TextureHeight)
textureSize3D :: TextureQuery TextureSize3D
textureSize3D t level =
makeGettableStateVar $
liftM3 TextureSize3D
(getTexLevelParameteri fromIntegral NoProxy t level TextureWidth )
(getTexLevelParameteri fromIntegral NoProxy t level TextureHeight)
(getTexLevelParameteri fromIntegral NoProxy t level TextureDepth )
textureBorder :: TextureQuery Border
textureBorder t level =
makeGettableStateVar $
getTexLevelParameteri fromIntegral NoProxy t level TextureBorder
textureRGBASizes :: TextureQuery (Color4 GLsizei)
textureRGBASizes t level =
makeGettableStateVar $
liftM4 Color4
(getTexLevelParameteri fromIntegral NoProxy t level TextureRedSize )
(getTexLevelParameteri fromIntegral NoProxy t level TextureGreenSize)
(getTexLevelParameteri fromIntegral NoProxy t level TextureBlueSize )
(getTexLevelParameteri fromIntegral NoProxy t level TextureAlphaSize)
textureSharedSize :: TextureQuery GLsizei
textureSharedSize t level =
makeGettableStateVar $
getTexLevelParameteri fromIntegral NoProxy t level TextureSharedSize
textureIntensitySize :: TextureQuery GLsizei
textureIntensitySize t level =
makeGettableStateVar $
getTexLevelParameteri fromIntegral NoProxy t level TextureIntensitySize
textureLuminanceSize :: TextureQuery GLsizei
textureLuminanceSize t level =
makeGettableStateVar $
getTexLevelParameteri fromIntegral NoProxy t level TextureLuminanceSize
textureIndexSize :: TextureQuery GLsizei
textureIndexSize t level =
makeGettableStateVar $
getTexLevelParameteri fromIntegral NoProxy t level TextureIndexSize
textureDepthBits :: TextureQuery GLsizei
textureDepthBits t level =
makeGettableStateVar $
getTexLevelParameteri fromIntegral NoProxy t level DepthBits
textureCompressedImageSize :: TextureQuery (Maybe GLsizei)
textureCompressedImageSize t level =
makeGettableStateVar $ do
isCompressed <- getTexLevelParameteri unmarshalGLboolean NoProxy t level TextureCompressed
if isCompressed
then getTexLevelParameteri (Just . fromIntegral) NoProxy t level TextureCompressedImageSize
else return Nothing
textureProxyOK :: TextureQuery Bool
textureProxyOK t level =
makeGettableStateVar $
getTexLevelParameteri unmarshalGLboolean Proxy t level TextureWidth
getTexLevelParameteri :: (GLint -> a) -> Proxy -> Either TextureTarget CubeMapTarget -> Level -> TexLevelParameter -> IO a
getTexLevelParameteri f proxy t level p =
alloca $ \buf -> do
glGetTexLevelParameteriv (either (marshalProxyTextureTarget proxy) (\c -> if proxy == Proxy then marshalProxyTextureTarget Proxy TextureCubeMap else marshalCubeMapTarget c) t) level (marshalTexLevelParameter p) buf
peek1 f buf
| ducis/haAni | hs/common/Graphics/Rendering/OpenGL/GL/Texturing/Queries.hs | gpl-2.0 | 6,462 | 0 | 15 | 968 | 1,120 | 606 | 514 | 125 | 16 |
{- This module was generated from data in the Kate syntax
highlighting file bibtex.xml, version 1.17, by Jeroen Wijnhout ([email protected])+Thomas Braun ([email protected]) -}
module Text.Highlighting.Kate.Syntax.Bibtex
(highlight, parseExpression, syntaxName, syntaxExtensions)
where
import Text.Highlighting.Kate.Types
import Text.Highlighting.Kate.Common
import Text.ParserCombinators.Parsec hiding (State)
import Control.Monad.State
import Data.Char (isSpace)
import qualified Data.Set as Set
-- | Full name of language.
syntaxName :: String
syntaxName = "BibTeX"
-- | Filename extensions for this language.
syntaxExtensions :: String
syntaxExtensions = "*.bib"
-- | Highlight source code using this syntax definition.
highlight :: String -> [SourceLine]
highlight input = evalState (mapM parseSourceLine $ lines input) startingState
parseSourceLine :: String -> State SyntaxState SourceLine
parseSourceLine = mkParseSourceLine (parseExpression Nothing)
-- | Parse an expression using appropriate local context.
parseExpression :: Maybe (String,String)
-> KateParser Token
parseExpression mbcontext = do
(lang,cont) <- maybe currentContext return mbcontext
result <- parseRules (lang,cont)
optional $ do eof
updateState $ \st -> st{ synStPrevChar = '\n' }
pEndLine
return result
startingState = SyntaxState {synStContexts = [("BibTeX","Normal")], synStLineNumber = 0, synStPrevChar = '\n', synStPrevNonspace = False, synStContinuation = False, synStCaseSensitive = True, synStKeywordCaseSensitive = False, synStCaptures = []}
pEndLine = do
updateState $ \st -> st{ synStPrevNonspace = False }
context <- currentContext
contexts <- synStContexts `fmap` getState
st <- getState
if length contexts >= 2
then case context of
_ | synStContinuation st -> updateState $ \st -> st{ synStContinuation = False }
("BibTeX","Normal") -> return ()
("BibTeX","PreambleCommand") -> return ()
("BibTeX","StringCommand") -> return ()
("BibTeX","Entry") -> return ()
("BibTeX","Field") -> return ()
("BibTeX","CurlyBracket") -> return ()
("BibTeX","QuotedText") -> return ()
_ -> return ()
else return ()
withAttribute attr txt = do
when (null txt) $ fail "Parser matched no text"
updateState $ \st -> st { synStPrevChar = last txt
, synStPrevNonspace = synStPrevNonspace st || not (all isSpace txt) }
return (attr, txt)
list_kw'5fentry = Set.fromList $ words $ "@article @book @booklet @conference @collection @electronic @inbook @incollection @inproceedings @manual @mastersthesis @misc @online @patent @periodical @proceedings @report @phdthesis @set @thesis @techreport @unpublished @www @person @company @place"
regex_'5ba'2dzA'2dZ0'2d9'5c'2d'5d'2b = compileRegex True "[a-zA-Z0-9\\-]+"
regex_'5ba'2dzA'2dZ0'2d9'5f'40'5c'5c'2d'5c'5c'3a'5d'2b = compileRegex True "[a-zA-Z0-9_@\\\\-\\\\:]+"
regex_'5ba'2dzA'2dZ0'2d9'5c'2d'5f'5c'2e'5d'2b = compileRegex True "[a-zA-Z0-9\\-_\\.]+"
regex_'5b0'2d9'5d'2b = compileRegex True "[0-9]+"
regex_'2e = compileRegex True "."
regex_'5c'5c'28'5ba'2dzA'2dZ'40'5d'2b'7c'5b'5e_'5d'29 = compileRegex True "\\\\([a-zA-Z@]+|[^ ])"
regex_'7d'24 = compileRegex True "}$"
parseRules ("BibTeX","Normal") =
(((pKeyword " \n\t.():!+,-<=>%&*/;?[]^{|}~" list_kw'5fentry >>= withAttribute KeywordTok) >>~ pushContext ("BibTeX","Entry"))
<|>
((pString False "@string" >>= withAttribute FunctionTok) >>~ pushContext ("BibTeX","StringCommand"))
<|>
((pString False "@preamble" >>= withAttribute FunctionTok) >>~ pushContext ("BibTeX","PreambleCommand"))
<|>
((pString False "@comment" >>= withAttribute CommentTok))
<|>
(currentContext >>= \x -> guard (x == ("BibTeX","Normal")) >> pDefault >>= withAttribute CommentTok))
parseRules ("BibTeX","PreambleCommand") =
(((pDetectChar False '{' >>= withAttribute NormalTok) >>~ pushContext ("BibTeX","CurlyBracket"))
<|>
((popContext) >> currentContext >>= parseRules))
parseRules ("BibTeX","StringCommand") =
(((pDetectChar False '{' >>= withAttribute NormalTok) >>~ pushContext ("BibTeX","CurlyBracket"))
<|>
((pRegExpr regex_'5ba'2dzA'2dZ0'2d9'5c'2d'5d'2b >>= withAttribute StringTok) >>~ pushContext ("BibTeX","CurlyBracket"))
<|>
((popContext) >> currentContext >>= parseRules))
parseRules ("BibTeX","Entry") =
(((pDetectChar False '{' >>= withAttribute NormalTok))
<|>
((pRegExpr regex_'5ba'2dzA'2dZ0'2d9'5f'40'5c'5c'2d'5c'5c'3a'5d'2b >>= withAttribute OtherTok))
<|>
((pDetectChar False ',' >>= withAttribute NormalTok) >>~ pushContext ("BibTeX","Field"))
<|>
((pDetectChar False '}' >>= withAttribute NormalTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("BibTeX","Entry")) >> pDefault >>= withAttribute NormalTok))
parseRules ("BibTeX","Field") =
(((pFirstNonSpace >> pRegExpr regex_'5ba'2dzA'2dZ0'2d9'5c'2d'5f'5c'2e'5d'2b >>= withAttribute DataTypeTok))
<|>
((pDetectSpaces >>= withAttribute NormalTok))
<|>
((pDetectChar False '=' >>= withAttribute NormalTok))
<|>
((pDetectSpaces >>= withAttribute NormalTok))
<|>
((pDetectChar False '{' >>= withAttribute NormalTok) >>~ pushContext ("BibTeX","CurlyBracket"))
<|>
((lookAhead (pDetectChar False '}') >> (popContext) >> currentContext >>= parseRules))
<|>
((pDetectChar False '"' >>= withAttribute NormalTok) >>~ pushContext ("BibTeX","QuotedText"))
<|>
((pDetectChar False ',' >>= withAttribute NormalTok))
<|>
((pDetectChar False '#' >>= withAttribute NormalTok))
<|>
((pRegExpr regex_'5b0'2d9'5d'2b >>= withAttribute NormalTok))
<|>
((pRegExpr regex_'5ba'2dzA'2dZ0'2d9'5c'2d'5d'2b >>= withAttribute StringTok))
<|>
((pDetectSpaces >>= withAttribute NormalTok))
<|>
((pRegExpr regex_'2e >>= withAttribute AlertTok))
<|>
(currentContext >>= \x -> guard (x == ("BibTeX","Field")) >> pDefault >>= withAttribute NormalTok))
parseRules ("BibTeX","CurlyBracket") =
(((pDetectChar False '{' >>= withAttribute NormalTok) >>~ pushContext ("BibTeX","CurlyBracket"))
<|>
((pRegExpr regex_'5c'5c'28'5ba'2dzA'2dZ'40'5d'2b'7c'5b'5e_'5d'29 >>= withAttribute CharTok))
<|>
((pRegExpr regex_'7d'24 >>= withAttribute NormalTok) >>~ (popContext >> popContext))
<|>
((pDetectChar False '}' >>= withAttribute NormalTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("BibTeX","CurlyBracket")) >> pDefault >>= withAttribute NormalTok))
parseRules ("BibTeX","QuotedText") =
(((pDetectChar False '"' >>= withAttribute NormalTok) >>~ (popContext))
<|>
((pRegExpr regex_'5c'5c'28'5ba'2dzA'2dZ'40'5d'2b'7c'5b'5e_'5d'29 >>= withAttribute CharTok))
<|>
(currentContext >>= \x -> guard (x == ("BibTeX","QuotedText")) >> pDefault >>= withAttribute StringTok))
parseRules x = parseRules ("BibTeX","Normal") <|> fail ("Unknown context" ++ show x)
| ambiata/highlighting-kate | Text/Highlighting/Kate/Syntax/Bibtex.hs | gpl-2.0 | 6,973 | 0 | 23 | 1,106 | 1,931 | 1,025 | 906 | 131 | 10 |
{-# LANGUAGE CPP, ScopedTypeVariables, MultiParamTypeClasses #-}
-- | This module contains plain tree indexing code. The index itself is a
-- CACHE: you should only ever use it as an optimisation and never as a primary
-- storage. In practice, this means that when we change index format, the
-- application is expected to throw the old index away and build a fresh
-- index. Please note that tracking index validity is out of scope for this
-- library: this is responsibility of your application. It is advisable that in
-- your validity tracking code, you also check for format validity (see
-- 'indexFormatValid') and scrap and re-create index when needed.
--
-- The index is a binary file that overlays a hashed tree over the working
-- copy. This means that every working file and directory has an entry in the
-- index, that contains its path and hash and validity data. The validity data
-- is a timestamp plus the file size. The file hashes are sha256's of the
-- file's content. It also contains the fileid to track moved files.
--
-- There are two entry types, a file entry and a directory entry. Both have a
-- common binary format (see 'Item'). The on-disk format is best described by
-- the section /Index format/ below.
--
-- For each file, the index has a copy of the file's last modification
-- timestamp taken at the instant when the hash has been computed. This means
-- that when file size and timestamp of a file in working copy matches those in
-- the index, we assume that the hash stored in the index for given file is
-- valid. These hashes are then exposed in the resulting 'Tree' object, and can
-- be leveraged by eg. 'diffTrees' to compare many files quickly.
--
-- You may have noticed that we also keep hashes of directories. These are
-- assumed to be valid whenever the complete subtree has been valid. At any
-- point, as soon as a size or timestamp mismatch is found, the working file in
-- question is opened, its hash (and timestamp and size) is recomputed and
-- updated in-place in the index file (everything lives at a fixed offset and
-- is fixed size, so this isn't an issue). This is also true of directories:
-- when a file in a directory changes hash, this triggers recomputation of all
-- of its parent directory hashes; moreover this is done efficiently -- each
-- directory is updated at most once during an update run.
--
-- /Index format/
--
-- The Index is organised into \"lines\" where each line describes a single
-- indexed item. Cf. 'Item'.
--
-- The first word on the index \"line\" is the length of the file path (which is
-- the only variable-length part of the line). Then comes the path itself, then
-- fixed-length hash (sha256) of the file in question, then three words, one for
-- size, one for "aux", which is used differently for directories and for files, and
-- one for the fileid (inode or fhandle) of the file.
--
-- With directories, this aux holds the offset of the next sibling line in the
-- index, so we can efficiently skip reading the whole subtree starting at a
-- given directory (by just seeking aux bytes forward). The lines are
-- pre-ordered with respect to directory structure -- the directory comes first
-- and after it come all its items. Cf. 'readIndex''.
--
-- For files, the aux field holds a timestamp.
module Storage.Hashed.Index( readIndex, updateIndexFrom, indexFormatValid
, updateIndex, listFileIDs, Index, filter
, getFileID )
where
import Prelude hiding ( lookup, readFile, writeFile, filter, catch )
import Storage.Hashed.Utils
import Storage.Hashed.Tree
import Storage.Hashed.AnchoredPath
import Data.Int( Int64, Int32 )
import Bundled.Posix( getFileStatusBS, modificationTime,
getFileStatus, fileSize, fileExists, isDirectory )
import System.IO.MMap( mmapFileForeignPtr, mmapFileByteString, Mode(..) )
import System.IO( )
import System.Directory( doesFileExist, getCurrentDirectory, doesDirectoryExist )
#if mingw32_HOST_OS
import System.Directory( renameFile )
import System.FilePath( (<.>) )
#else
import System.Directory( removeFile )
#endif
import System.FilePath( (</>) )
import System.Posix.Types ( FileID )
import Control.Monad( when )
import Control.Exception( catch, SomeException )
import Control.Applicative( (<$>) )
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import Data.ByteString.Unsafe( unsafeHead, unsafeDrop )
import Data.ByteString.Internal( toForeignPtr, fromForeignPtr, memcpy
, nullForeignPtr, c2w )
import Data.IORef( )
import Data.Maybe( fromJust, isJust, fromMaybe )
import Data.Bits( Bits )
import Foreign.Storable
import Foreign.ForeignPtr
import Foreign.Ptr
import Storage.Hashed.Hash( sha256, rawHash )
#ifdef WIN32
import System.Win32.File ( createFile, getFileInformationByHandle, BY_HANDLE_FILE_INFORMATION(..),
fILE_SHARE_NONE, fILE_FLAG_BACKUP_SEMANTICS,
gENERIC_NONE, oPEN_EXISTING, closeHandle )
#else
import System.PosixCompat ( fileID, getSymbolicLinkStatus )
#endif
--------------------------
-- Indexed trees
--
-- | Description of a a single indexed item. The structure itself does not
-- contain any data, just pointers to the underlying mmap (bytestring is a
-- pointer + offset + length).
--
-- The structure is recursive-ish (as opposed to flat-ish structure, which is
-- used by git...) It turns out that it's hard to efficiently read a flat index
-- with our internal data structures -- we need to turn the flat index into a
-- recursive Tree object, which is rather expensive... As a bonus, we can also
-- efficiently implement subtree queries this way (cf. 'readIndex').
data Item = Item { iBase :: !(Ptr ())
, iHashAndDescriptor :: !BS.ByteString
} deriving Show
size_magic :: Int
size_magic = 4 -- the magic word, first 4 bytes of the index
size_dsclen, size_hash, size_size, size_aux, size_fileid :: Int
size_size = 8 -- file/directory size (Int64)
size_aux = 8 -- aux (Int64)
size_fileid = 8 -- fileid (inode or fhandle FileID)
size_dsclen = 4 -- this many bytes store the length of the path
size_hash = 32 -- hash representation
off_size, off_aux, off_hash, off_dsc, off_dsclen, off_fileid :: Int
off_size = 0
off_aux = off_size + size_size
off_fileid = off_aux + size_aux
off_dsclen = off_fileid + size_fileid
off_hash = off_dsclen + size_dsclen
off_dsc = off_hash + size_hash
itemAllocSize :: AnchoredPath -> Int
itemAllocSize apath =
align 4 $ size_hash + size_size + size_aux + size_fileid + size_dsclen + 2 + BS.length (flatten apath)
itemSize, itemNext :: Item -> Int
itemSize i = size_size + size_aux + size_fileid + size_dsclen + (BS.length $ iHashAndDescriptor i)
itemNext i = align 4 (itemSize i + 1)
iPath, iHash, iDescriptor :: Item -> BS.ByteString
iDescriptor = unsafeDrop size_hash . iHashAndDescriptor
iPath = unsafeDrop 1 . iDescriptor
iHash = BS.take size_hash . iHashAndDescriptor
iSize, iAux :: Item -> Ptr Int64
iSize i = plusPtr (iBase i) off_size
iAux i = plusPtr (iBase i) off_aux
iFileID :: Item -> Ptr FileID
iFileID i = plusPtr (iBase i) off_fileid
itemIsDir :: Item -> Bool
itemIsDir i = unsafeHead (iDescriptor i) == c2w 'D'
-- xlatePeek32 = fmap xlate32 . peek
xlatePeek64 :: (Storable a, Num a, Bits a) => Ptr a -> IO a
xlatePeek64 = fmap xlate64 . peek
-- xlatePoke32 ptr v = poke ptr (xlate32 v)
xlatePoke64 :: (Storable a, Num a, Bits a) => Ptr a -> a -> IO ()
xlatePoke64 ptr v = poke ptr (xlate64 v)
-- | Lay out the basic index item structure in memory. The memory location is
-- given by a ForeignPointer () and an offset. The path and type given are
-- written out, and a corresponding Item is given back. The remaining bits of
-- the item can be filled out using 'update'.
createItem :: ItemType -> AnchoredPath -> ForeignPtr () -> Int -> IO Item
createItem typ apath fp off =
do let dsc = BS.concat [ BSC.singleton $ if typ == TreeType then 'D' else 'F'
, flatten apath
, BS.singleton 0 ]
(dsc_fp, dsc_start, dsc_len) = toForeignPtr dsc
withForeignPtr fp $ \p ->
withForeignPtr dsc_fp $ \dsc_p ->
do fileid <- fromMaybe 0 <$> getFileID apath
pokeByteOff p (off + off_fileid) (xlate64 $ fromIntegral fileid :: Int64)
pokeByteOff p (off + off_dsclen) (xlate32 $ fromIntegral dsc_len :: Int32)
memcpy (plusPtr p $ off + off_dsc)
(plusPtr dsc_p dsc_start)
(fromIntegral dsc_len)
peekItem fp off
-- | Read the on-disk representation into internal data structure.
--
-- See the module-level section /Index format/ for details on how the index
-- is structured.
peekItem :: ForeignPtr () -> Int -> IO Item
peekItem fp off =
withForeignPtr fp $ \p -> do
nl' :: Int32 <- xlate32 `fmap` peekByteOff p (off + off_dsclen)
when (nl' <= 2) $ fail "Descriptor too short in peekItem!"
let nl = fromIntegral nl'
dsc = fromForeignPtr (castForeignPtr fp) (off + off_hash) (size_hash + nl - 1)
return $! Item { iBase = plusPtr p off
, iHashAndDescriptor = dsc }
-- | Update an existing item with new hash and optionally mtime (give Nothing
-- when updating directory entries).
updateItem :: Item -> Int64 -> Hash -> IO ()
updateItem item _ NoHash =
fail $ "Index.update NoHash: " ++ BSC.unpack (iPath item)
updateItem item size hash =
do xlatePoke64 (iSize item) size
unsafePokeBS (iHash item) (rawHash hash)
updateFileID :: Item -> FileID -> IO ()
updateFileID item fileid = xlatePoke64 (iFileID item) $ fromIntegral fileid
updateAux :: Item -> Int64 -> IO ()
updateAux item aux = xlatePoke64 (iAux item) $ aux
updateTime :: forall a.(Enum a) => Item -> a -> IO ()
updateTime item mtime = updateAux item (fromIntegral $ fromEnum mtime)
iHash' :: Item -> Hash
iHash' i = SHA256 (iHash i)
-- | Gives a ForeignPtr to mmapped index, which can be used for reading and
-- updates. The req_size parameter, if non-0, expresses the requested size of
-- the index file. mmapIndex will grow the index if it is smaller than this.
mmapIndex :: forall a. FilePath -> Int -> IO (ForeignPtr a, Int)
mmapIndex indexpath req_size = do
exist <- doesFileExist indexpath
act_size <- fromIntegral `fmap` if exist then fileSize `fmap` getFileStatus indexpath
else return 0
let size = case req_size > 0 of
True -> req_size
False | act_size >= size_magic -> act_size - size_magic
| otherwise -> 0
case size of
0 -> return (castForeignPtr nullForeignPtr, size)
_ -> do (x, _, _) <- mmapFileForeignPtr indexpath
ReadWriteEx (Just (0, size + size_magic))
return (x, size)
data IndexM m = Index { mmap :: (ForeignPtr ())
, basedir :: FilePath
, hashtree :: Tree m -> Hash
, predicate :: AnchoredPath -> TreeItem m -> Bool }
| EmptyIndex
type Index = IndexM IO
data State = State { dirlength :: !Int
, path :: !AnchoredPath
, start :: !Int }
data Result = Result { -- | marks if the item has changed since the last update to the index
changed :: !Bool
-- | next is the position of the next item, in bytes.
, next :: !Int
-- | treeitem is Nothing in case of the item doesn't exist in the tree
-- or is filtered by a FilterTree. Or a TreeItem otherwise.
, treeitem :: !(Maybe (TreeItem IO))
-- | resitem is the item extracted.
, resitem :: !Item }
data ResultF = ResultF { -- | nextF is the position of the next item, in bytes.
nextF :: !Int
-- | resitemF is the item extracted.
, resitemF :: !Item
-- | _fileIDs contains the fileids of the files and folders inside,
-- in a folder item and its own fileid for file item).
, _fileIDs :: [((AnchoredPath, ItemType), FileID)] }
readItem :: Index -> State -> IO Result
readItem index state = do
item <- peekItem (mmap index) (start state)
res' <- if itemIsDir item
then readDir index state item
else readFile index state item
return res'
readDir :: Index -> State -> Item -> IO Result
readDir index state item =
do following <- fromIntegral <$> xlatePeek64 (iAux item)
st <- getFileStatusBS (iPath item)
let exists = fileExists st && isDirectory st
fileid <- fromIntegral <$> (xlatePeek64 $ iFileID item)
fileid' <- fromMaybe fileid <$> (getFileID' $ BSC.unpack $ iPath item)
when (fileid == 0) $ updateFileID item fileid'
let name it dirlen = Name $ (BS.drop (dirlen + 1) $ iDescriptor it) -- FIXME MAGIC
namelength = (BS.length $ iDescriptor item) - (dirlength state)
myname = name item (dirlength state)
substate = state { start = start state + itemNext item
, path = path state `appendPath` myname
, dirlength = if myname == Name (BSC.singleton '.')
then dirlength state
else dirlength state + namelength }
want = exists && (predicate index) (path substate) (Stub undefined NoHash)
oldhash = iHash' item
subs off | off < following = do
result <- readItem index $ substate { start = off }
rest <- subs $ next result
return $! (name (resitem result) $ dirlength substate, result) : rest
subs coff | coff == following = return []
| otherwise = fail $ "Offset mismatch at " ++ show coff ++
" (ends at " ++ show following ++ ")"
inferiors <- if want then subs $ start substate
else return []
let we_changed = or [ changed x | (_, x) <- inferiors ] || nullleaf
nullleaf = null inferiors && oldhash == nullsha
nullsha = SHA256 (BS.replicate 32 0)
tree' = makeTree [ (n, fromJust $ treeitem s) | (n, s) <- inferiors, isJust $ treeitem s ]
treehash = if we_changed then hashtree index tree' else oldhash
tree = tree' { treeHash = treehash }
when (exists && we_changed) $ updateItem item 0 treehash
return $ Result { changed = not exists || we_changed
, next = following
, treeitem = if want then Just $ SubTree tree
else Nothing
, resitem = item }
readFile :: Index -> State -> Item -> IO Result
readFile index state item =
do st <- getFileStatusBS (iPath item)
mtime <- fromIntegral <$> (xlatePeek64 $ iAux item)
size <- xlatePeek64 $ iSize item
fileid <- fromIntegral <$> (xlatePeek64 $ iFileID item)
fileid' <- fromMaybe fileid <$> (getFileID' $ BSC.unpack $ iPath item)
let mtime' = modificationTime st
size' = fromIntegral $ fileSize st
readblob = readSegment (basedir index </> BSC.unpack (iPath item), Nothing)
exists = fileExists st && not (isDirectory st)
we_changed = mtime /= mtime' || size /= size'
hash = iHash' item
when (exists && we_changed) $
do hash' <- sha256 `fmap` readblob
updateItem item size' hash'
updateTime item mtime'
when (fileid == 0) $ updateFileID item fileid'
return $ Result { changed = not exists || we_changed
, next = start state + itemNext item
, treeitem = if exists then Just $ File $ Blob readblob hash else Nothing
, resitem = item }
updateIndex :: Index -> IO (Tree IO)
updateIndex EmptyIndex = return emptyTree
updateIndex index =
do let initial = State { start = size_magic
, dirlength = 0
, path = AnchoredPath [] }
res <- readItem index initial
case treeitem res of
Just (SubTree tree) -> return $ filter (predicate index) tree
_ -> fail "Unexpected failure in updateIndex!"
-- | Return a list containing all the file/folder names in an index, with
-- their respective ItemType and FileID.
listFileIDs :: Index -> IO ([((AnchoredPath, ItemType), FileID)])
listFileIDs EmptyIndex = return []
listFileIDs index =
do let initial = State { start = size_magic
, dirlength = 0
, path = AnchoredPath [] }
res <- readItemFileIDs index initial
return $ _fileIDs res
readItemFileIDs :: Index -> State -> IO ResultF
readItemFileIDs index state = do
item <- peekItem (mmap index) (start state)
res' <- if itemIsDir item
then readDirFileIDs index state item
else readFileFileID index state item
return res'
readDirFileIDs :: Index -> State -> Item -> IO ResultF
readDirFileIDs index state item =
do fileid <- fromIntegral <$> (xlatePeek64 $ iFileID item)
following <- fromIntegral <$> xlatePeek64 (iAux item)
let name it dirlen = Name $ (BS.drop (dirlen + 1) $ iDescriptor it) -- FIXME MAGIC
namelength = (BS.length $ iDescriptor item) - (dirlength state)
myname = name item (dirlength state)
substate = state { start = start state + itemNext item
, path = path state `appendPath` myname
, dirlength = if myname == Name (BSC.singleton '.')
then dirlength state
else dirlength state + namelength }
subs off | off < following = do
result <- readItemFileIDs index $ substate { start = off }
rest <- subs $ nextF result
return $! (name (resitemF result) $ dirlength substate, result) : rest
subs coff | coff == following = return []
| otherwise = fail $ "Offset mismatch at " ++ show coff ++
" (ends at " ++ show following ++ ")"
inferiors <- subs $ start substate
return $ ResultF { nextF = following
, resitemF = item
, _fileIDs = (((path substate, TreeType), fileid):concatMap (_fileIDs . snd) inferiors) }
readFileFileID :: Index -> State -> Item -> IO ResultF
readFileFileID _ state item =
do fileid' <- fromIntegral <$> (xlatePeek64 $ iFileID item)
let name it dirlen = Name $ (BS.drop (dirlen + 1) $ iDescriptor it)
myname = name item (dirlength state)
return $ ResultF { nextF = start state + itemNext item
, resitemF = item
, _fileIDs = [((path state `appendPath` myname, BlobType), fileid')] }
-- | Read an index and build up a 'Tree' object from it, referring to current
-- working directory. The initial Index object returned by readIndex is not
-- directly useful. However, you can use 'Tree.filter' on it. Either way, to
-- obtain the actual Tree object, call update.
--
-- The usual use pattern is this:
--
-- > do (idx, update) <- readIndex
-- > tree <- update =<< filter predicate idx
--
-- The resulting tree will be fully expanded.
readIndex :: FilePath -> (Tree IO -> Hash) -> IO Index
readIndex indexpath ht = do
(mmap_ptr, mmap_size) <- mmapIndex indexpath 0
base <- getCurrentDirectory
return $ if mmap_size == 0 then EmptyIndex
else Index { mmap = mmap_ptr
, basedir = base
, hashtree = ht
, predicate = \_ _ -> True }
formatIndex :: ForeignPtr () -> Tree IO -> Tree IO -> IO ()
formatIndex mmap_ptr old reference =
do _ <- create (SubTree reference) (AnchoredPath []) size_magic
unsafePokeBS magic (BSC.pack "HSI5")
where magic = fromForeignPtr (castForeignPtr mmap_ptr) 0 4
create (File _) path' off =
do i <- createItem BlobType path' mmap_ptr off
let flatpath = BSC.unpack $ flatten path'
case find old path' of
Nothing -> return ()
-- TODO calling getFileStatus here is both slightly
-- inefficient and slightly race-prone
Just ti -> do st <- getFileStatus flatpath
let hash = itemHash ti
mtime = modificationTime st
size = fileSize st
updateItem i (fromIntegral size) hash
updateTime i mtime
return $ off + itemNext i
create (SubTree s) path' off =
do i <- createItem TreeType path' mmap_ptr off
case find old path' of
Nothing -> return ()
Just ti | itemHash ti == NoHash -> return ()
| otherwise -> updateItem i 0 $ itemHash ti
let subs [] = return $ off + itemNext i
subs ((name,x):xs) = do
let path'' = path' `appendPath` name
noff <- subs xs
create x path'' noff
lastOff <- subs (listImmediate s)
xlatePoke64 (iAux i) (fromIntegral lastOff)
return lastOff
create (Stub _ _) path' _ =
fail $ "Cannot create index from stubbed Tree at " ++ show path'
-- | Will add and remove files in index to make it match the 'Tree' object
-- given (it is an error for the 'Tree' to contain a file or directory that
-- does not exist in a plain form in current working directory).
updateIndexFrom :: FilePath -> (Tree IO -> Hash) -> Tree IO -> IO Index
updateIndexFrom indexpath hashtree' ref =
do old_idx <- updateIndex =<< readIndex indexpath hashtree'
reference <- expand ref
let len_root = itemAllocSize anchoredRoot
len = len_root + sum [ itemAllocSize p | (p, _) <- list reference ]
exist <- doesFileExist indexpath
#if mingw32_HOST_OS
when exist $ renameFile indexpath (indexpath <.> "old")
#else
when exist $ removeFile indexpath -- to avoid clobbering oldidx
#endif
(mmap_ptr, _) <- mmapIndex indexpath len
formatIndex mmap_ptr old_idx reference
readIndex indexpath hashtree'
-- | Check that a given file is an index file with a format we can handle. You
-- should remove and re-create the index whenever this is not true.
indexFormatValid :: FilePath -> IO Bool
indexFormatValid path' =
do magic <- mmapFileByteString path' (Just (0, size_magic))
return $ case BSC.unpack magic of
"HSI5" -> True
_ -> False
`catch` \(_::SomeException) -> return False
instance FilterTree IndexM IO where
filter _ EmptyIndex = EmptyIndex
filter p index = index { predicate = \a b -> predicate index a b && p a b }
-- | For a given file or folder path, get the corresponding fileID from the
-- filesystem.
getFileID :: AnchoredPath -> IO (Maybe FileID)
getFileID = getFileID' . anchorPath ""
getFileID' :: FilePath -> IO (Maybe FileID)
getFileID' fp = do file_exists <- doesFileExist fp
dir_exists <- doesDirectoryExist fp
if file_exists || dir_exists
#ifdef WIN32
then do h <- createFile fp gENERIC_NONE fILE_SHARE_NONE Nothing oPEN_EXISTING fILE_FLAG_BACKUP_SEMANTICS Nothing
fhnumber <- (Just . fromIntegral . bhfiFileIndex) <$> getFileInformationByHandle h
closeHandle h
return fhnumber
#else
then (Just . fileID) <$> getSymbolicLinkStatus fp
#endif
else return Nothing
| DavidAlphaFox/darcs | hashed-storage/Storage/Hashed/Index.hs | gpl-2.0 | 24,401 | 0 | 19 | 7,280 | 5,496 | 2,857 | 2,639 | 359 | 6 |
module Tree.AVL where
-- import Test.QuickCheck
-- import Test.QuickCheck.All
import Tree.BinarySearchTree as BST
-- Some examples of structure in code
t3 :: Tree Int
t3 = Node 10 (leaf 8) (leaf 15)
t4 :: Tree Int
t4 = Node 17 (Node 12 (Node 5 (leaf 4) (leaf 8))
(leaf 15))
(Node 115
(Node 32 (leaf 30)
(Node 46 (leaf 43)
(leaf 57)))
(Node 163 (leaf 161)
Empty))
-- *AVL> size t1
-- 3
-- *AVL> size t2
-- 14
-- maximum distance from any node to the root
height :: (Ord a, Num a) => Tree t -> a
height Empty = -1
height (Node _ l r) = 1 + max (height l) (height r)
-- *AVL> height t1
-- 2
-- *AVL> height t2
-- 5
empty :: Tree a -> Bool
empty Empty = True
empty (Node _ _ _) = False
-- *AVL> leaf 10
-- Node 10 Empty Empty
-- *AVL> empty Empty
-- True
-- *AVL> empty $ leaf 10
-- False
hFactor :: Tree a -> Tree a -> Int
hFactor l r = (height l) - (height r)
{--
Given a tree, compute its height factor (-1, 0 or 1, the tree is well balanced)
--}
heightFactor :: Tree a -> Int
heightFactor Empty = 0
heightFactor (Node _ l r) = hFactor l r
-- *AVL> heightFactor t1
-- 0
-- *AVL> heightFactor t2
-- -1
{--
returns whether the given tree is h-balanced or not
--}
hBalanced :: Tree a -> Bool
hBalanced Empty = True
hBalanced n@(Node _ l r) = abs (heightFactor n) <= 1 && hBalanced l && hBalanced r
-- *AVL> hBalanced Empty
-- True
-- *AVL> t1
-- Node 10 (Node 8 Empty Empty) (Node 15 Empty Empty)
-- *AVL> hBalanced t1
-- True
-- *AVL> t2
-- Node 17 (Node 12 (Node 5 (Node 4 Empty Empty) (Node 8 Empty Empty)) (Node 15 Empty Empty)) (Node 115 (Node 32 (Node 30 Empty Empty) (Node 46 (Node 43 Empty Empty) (Node 57 Empty Empty))) (Node 163 (Node 161 Empty Empty) Empty))
-- *AVL> hBalanced t2
-- False
{--
Tells whether the given tree is an AVL or not.
--}
isAVL :: Ord a => Tree a -> Bool
isAVL t = isBSearchTree t && hBalanced t
left :: Tree a -> Tree a
left Empty = Empty
left (Node _ l _) = l
right :: Tree a -> Tree a
right Empty = Empty
right (Node _ _ r) = r
-- *AVL> isAVL t1
-- True
-- *AVL> isAVL t2
-- True
-- *AVL> isAVL $ Node 10 t1 Empty
-- False
rotateR :: Tree a -> Tree a
rotateR Empty = Empty
rotateR n@(Node _ Empty _) = n
rotateR (Node v (Node x ll lr) r) = (Node x ll (Node v lr r))
-- *AVL> t1
-- Node 4 (Node 3 Empty Empty) (Node 7 (Node 5 Empty Empty) (Node 10 Empty Empty))
-- *AVL> rotateR t1
-- Node 3 Empty (Node 4 Empty (Node 7 (Node 5 Empty Empty) (Node 10 Empty Empty)))
rotateL :: Tree a -> Tree a
rotateL Empty = Empty
rotateL n@(Node _ _ Empty) = n
rotateL (Node v l (Node x rl rr)) = (Node x (Node v l rl) rr)
-- *AVL> t1
-- Node 4 (Node 3 Empty Empty) (Node 7 (Node 5 Empty Empty) (Node 10 Empty Empty))
-- *AVL> rotateL t1
-- Node 7 (Node 4 (Node 3 Empty Empty) (Node 5 Empty Empty)) (Node 10 Empty Empty)
t5 :: Tree Int
t5 = Node 6 (Node 3
(Node 2
(leaf 1)
Empty)
(leaf 4))
(leaf 7)
t6 :: Tree Int
t6 = Node 3 (Node 2
(leaf 1)
Empty)
(Node 6
(leaf 4)
(leaf 7))
-- *AVL> rotateR t1
-- Node 15 (Node 10 (Node 8 Empty Empty) Empty) Empty
-- *AVL> (rotateL . rotateR) t1 == t1
-- True
-- *AVL> (rotateR . rotateL) t1 == t1
-- True
-- *AVL> pp t5
-- --6
-- |--3
-- | |--2
-- | | |--1
-- | | | |-- /-
-- | | | `-- /-
-- | | `-- /-
-- | `--4
-- | |-- /-
-- | `-- /-
-- `--7
-- |-- /-
-- `-- /-
-- *AVL> pp t6
-- --3
-- |--2
-- | |--1
-- | | |-- /-
-- | | `-- /-
-- | `-- /-
-- `--6
-- |--4
-- | |-- /-
-- | `-- /-
-- `--7
-- |-- /-
-- `-- /-
-- *AVL> rotateL t6 == t5
-- True
-- *AVL> rotateR t5 == t6
-- True
t8 :: Tree Int
t8 = Node 6 (Node 3 (leaf 2) (Node 4 Empty (leaf 5))) (leaf 7)
t9 :: Tree Int
t9 = Node 4 (Node 3 (leaf 2) Empty) (Node 6 (leaf 5) (leaf 7))
-- *AVL> rotateLRight t8 == t9
-- True
t7 :: Tree Int
t7 = Node 6 (Node 3 (Node 2 Empty Empty) (Node 4 Empty (Node 5 Empty Empty))) (Node 7 Empty Empty)
{--
Insert a new ordered value into the tree.
Note that it preserves the Binary Search tree and the H-balanced properties of an AVL.
Node: If an entry is already present, return directly the same tree
--}
ins :: Ord a => Tree a -> a -> Tree a
ins Empty v = leaf v
ins n@(Node x l r) y
| x == y = n
| x < y = rebalance $ Node x l (ins r y)
| otherwise = rebalance $ Node x (ins l y) r
rebalance :: Ord a => Tree a -> Tree a
rebalance Empty = Empty
rebalance n@(Node _ Empty Empty) = n
rebalance n@(Node _ _ Empty) | hBalanced n = n
| otherwise = rotateR n
rebalance n@(Node _ Empty _) | hBalanced n = n
| otherwise = rotateL n
rebalance n@(Node x
l@(Node _ ll lr)
r@(Node _ rl rr))
| (hBalanced n) = n
| (heightFactor n == 2) = if (hFactor ll lr) >= 0 then (rotateR n) else rotateL (Node x (rotateR l) r)
| (heightFactor n == -2) = if (hFactor rl rr) <= 0 then (rotateL n) else rotateR (Node x l (rotateL r))
-- *AVL> pp $ build [1..10]
-- --4
-- |--2
-- | |--1
-- | | |-- /-
-- | | `-- /-
-- | `--3
-- | |-- /-
-- | `-- /-
-- `--8
-- |--6
-- | |--5
-- | | |-- /-
-- | | `-- /-
-- | `--7
-- | |-- /-
-- | `-- /-
-- `--9
-- |-- /-
-- `--10
-- |-- /-
-- `-- /-
-- *AVL> isAVL $ build [1..10]
-- True
-- *AVL> isAVL $ build [1..100]
-- True
-- *AVL> isAVL $ build [1..1000]
-- True
t10 :: Tree Int
t10 = Node 6 (Node 3 (leaf 2) (Node 4 Empty Empty)) (leaf 7)
-- *AVL> pp $ ins t10 100
-- --6
-- |--3
-- | |--2
-- | | |-- /-
-- | | `-- /-
-- | `--4
-- | |-- /-
-- | `-- /-
-- `--7
-- |-- /-
-- `--100
-- |-- /-
-- `-- /-
-- build an AVL from a list
build :: (Num a, Ord a) => [a] -> Tree a
build = foldl ins Empty
--prop_avl = (\ t -> abs (heightFactor t) <= 1)
-- adding
--main = do
-- verboseCheckWith stdArgs { maxSuccess = 1000, maxSize = 5 } prop_avl
{--
Remove a node from the tree. Note that it preserves the AVL properties.
--}
remove :: (Ord a) => Tree a -> a -> Tree a
remove Empty _ = Empty
remove (Node x l r) y
| x < y = rebalance $ Node x l (AVL.remove r y)
| y < x = rebalance $ Node x (AVL.remove l y) r
| otherwise = case deleteMax l of
(Just z, t) -> rebalance $ Node z t r
(Nothing, _) -> Empty
-- *AVL> isAVL (BST.remove (ins (ins (ins (ins t1 1100) 1200) 1300) 1400) 1100)
-- False
-- *AVL> isAVL (AVL.remove (ins (ins (ins (ins t1 1100) 1200) 1300) 1400) 1100)
-- True
-- *AVL> pp $ AVL.remove (build [1..10]) 8
-- --4
-- |--2
-- | |--1
-- | | |-- /-
-- | | `-- /-
-- | `--3
-- | |-- /-
-- | `-- /-
-- `--7
-- |--6
-- | |--5
-- | | |-- /-
-- | | `-- /-
-- | `-- /-
-- `--9
-- |-- /-
-- `--10
-- |-- /-
-- `-- /-
-- *AVL> isAVL $ AVL.remove (build [1..10]) 8
-- True
-- *AVL> isAVL $ (AVL.remove (AVL.remove (build [1..10]) 2) 3)
-- True
-- *AVL> pp $ (AVL.remove (AVL.remove (build [1..10]) 2) 3)
-- --8
-- |--4
-- | |--1
-- | | |-- /-
-- | | `-- /-
-- | `--6
-- | |--5
-- | | |-- /-
-- | | `-- /-
-- | `--7
-- | |-- /-
-- | `-- /-
-- `--9
-- |-- /-
-- `--10
-- |-- /-
-- `-- /-
-- *AVL> pp $ (AVL.remove (AVL.remove (AVL.remove (build [1..10]) 2) 3) 1)
-- --8
-- |--6
-- | |--4
-- | | |-- /-
-- | | `--5
-- | | |-- /-
-- | | `-- /-
-- | `--7
-- | |-- /-
-- | `-- /-
-- `--9
-- |-- /-
-- `--10
-- |-- /-
-- `-- /-
-- *AVL> isAVL $ (AVL.remove (AVL.remove (AVL.remove (build [1..10]) 2) 3) 1)
-- True
{--
Breadth first traversal
--}
breadth :: Tree a -> [a]
breadth t =
reverse $ bf [t] []
where
bf :: [Tree a] -> [a] -> [a]
bf [] q = q
bf (Empty : ns) q = bf ns q
bf ((Node x l r) : ns) q = bf (ns ++ [l,r]) (x : q)
-- *AVL> pp (build [1..10])
-- --4
-- |--2
-- | |--1
-- | | |-- /-
-- | | `-- /-
-- | `--3
-- | |-- /-
-- | `-- /-
-- `--8
-- |--6
-- | |--5
-- | | |-- /-
-- | | `-- /-
-- | `--7
-- | |-- /-
-- | `-- /-
-- `--9
-- |-- /-
-- `--10
-- |-- /-
-- `-- /-
-- *AVL> breadth (build [1..10])
-- [4,2,8,1,3,6,9,5,7,10]
-- massyl
-- breadth :: [Tree a] -> [a]
-- breadth [] = []
-- breadth ts = concatMap (value []) ts ++ breadth (concatMap childs ts)
-- childs :: Tree a -> [Tree a]
-- childs Empty = []
-- childs (Node _ l r) = [l,r]
-- value :: [a] -> Tree a -> [a]
-- value acc Empty = acc
-- value xs (Node x _ _) = xs++[x]
{--
breadth first traversal based filtering.
returns the list of all elements satisfying the given predicate
--}
filterT :: (a -> Bool) -> Tree a -> [a]
filterT p = (filter p) . breadth
-- *AVL> filterT (<= 3) t1
-- [3]
-- *AVL> filterT (<= 1000) t1
-- [4,3,7,5,10]
-- *AVL> filterT (<= 1000) $ (ins (ins (ins (ins t1 1100) 1200) 1300) 1400)
-- [7,4,3,5,10]
-- *AVL> filterT (<= 1100) $ (ins (ins (ins (ins t1 1100) 1200) 1300) 1400)
-- [7,4,3,5,1100,10]
{--
Breadth first traversal based implementation of exist
--}
exist:: Eq a => a -> Tree a -> Bool
exist x = not . null . filterT (== x)
-- *AVL> t1
-- Node 4 (Node 3 Empty Empty) (Node 7 (Node 5 Empty Empty) (Node 10 Empty Empty))
-- *AVL> exist 1 t1
-- False
-- *AVL> exist 3 t1
-- True
mapT :: (a -> b) -> Tree a -> Tree b
mapT _ Empty = Empty
mapT f (Node x l r) = Node (f x) (mapT f l) (mapT f r)
-- *AVL> mapT show t1
-- Node "4" (Node "3" Empty Empty) (Node "7" (Node "5" Empty Empty) (Node "10" Empty Empty))
instance Functor Tree where
fmap = mapT
-- *AVL> fmap show t1
-- Node "4" (Node "3" Empty Empty) (Node "7" (Node "5" Empty Empty) (Node "10" Empty Empty))
| ardumont/haskell-lab | src/Tree/AVL.hs | gpl-2.0 | 10,327 | 0 | 13 | 3,460 | 2,362 | 1,303 | 1,059 | 110 | 3 |
import Control.Concurrent.MVar
import Control.Monad (void, when)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Sound.SC3.UGen
import Sound.SC3.Server.State.Monad
import Sound.SC3.Server.State.Monad.Command
-- You need the hsc3-server-internal package in order to use the internal server
--import Sound.SC3.Server.Monad.Process.Internal (withDefaultInternal)
import Sound.SC3.Server.State.Monad.Process (withDefaultSynth)
import Sound.OSC (pauseThread, pauseThreadUntil)
import qualified Sound.OSC as OSC
import System.Posix.Signals (installHandler, keyboardSignal, Handler(Catch))
import System.Random
-- Simple sine grain synthdef with frequency and amplitude controls and an ASR envelope.
sine :: UGen
sine = out 0 $ pan2 x (sinOsc KR 1 0 * 0.6) 1
where x = sinOsc AR (control KR "freq" 440) 0
* control KR "amp" 1
* envGen KR (control KR "gate" 1) 1 0 1 RemoveSynth (envASR 0.02 1 0.1 EnvLin)
-- | Once a second ask for the server status and print it.
statusLoop :: Server ()
statusLoop = do
statusM >>= liftIO . print
pauseThread 1
statusLoop
-- | Latency imposed on packets sent to the server.
latency :: Double
latency = 0.03
-- | Random sine grain generator loop.
grainLoop :: MVar a -> SynthDef -> Double -> Double -> Double -> Server ()
grainLoop quit synthDef delta sustain t = do
-- Get a random frequency between 100 and 800 Hz
f <- liftIO $ randomRIO (100,800)
-- Get a random amplitude between 0.1 and 0.3
a <- liftIO $ randomRIO (0.1,0.3)
-- Get the root node
r <- rootNode
-- Create a synth of the sine grain SynthDef with the random freqyency and amplitude from above
-- Schedule the synth for execution in 'latency' seconds in order to avoid jitter
synth <- (t + latency) `exec` s_new synthDef AddToTail r [("freq", f), ("amp", a)]
-- Fork a thread for releasing the synth after 'sustain' seconds
fork $ do
-- Calculate the time at which to release the synth and pause
let t' = t + sustain
pauseThreadUntil t'
-- Release the synth, taking latency into account
(t' + latency) `exec` s_release 0 synth
-- Calculate the time for the next iteration and pause
let t' = t + delta
pauseThreadUntil t'
-- Check whether to exit the loop and recurse
b <- liftIO $ isEmptyMVar quit
when b $ grainLoop quit synthDef delta sustain t'
newBreakHandler :: IO (MVar ())
newBreakHandler = do
quit <- newEmptyMVar
void $ installHandler keyboardSignal
(Catch $ putStrLn "Quitting..." >> putMVar quit ())
Nothing
return quit
main :: IO ()
main = do
-- Install keyboard break handler
quit <- newBreakHandler
-- Run an scsynth process
-- You need the hsc3-server-internal package in order to use the internal server
-- withDefaultInternal $ do
withDefaultSynth $ do
-- Create a new SynthDef
sd <- exec_ $ d_recv "hsc3-server:sine" sine
-- Fork the status display loop
fork statusLoop
-- Enter the grain loop
grainLoop quit sd 0.03 0.06 =<< liftIO OSC.time
takeMVar quit
| kaoskorobase/hsc3-server | examples/sine-grains.hs | gpl-2.0 | 3,150 | 7 | 13 | 739 | 726 | 370 | 356 | 52 | 1 |
module Main where
import Test.DocTest
main :: IO ()
main = doctest ["-isrc", "src/Regex/Parser.hs", "src/Regex/Enumerator.hs"]
| tadeboro/reglang | test/doctest.hs | gpl-3.0 | 129 | 0 | 6 | 17 | 36 | 21 | 15 | 4 | 1 |
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
module Language.Bitcoin.Interpreter
-- export {{{1
(
run_interpreter, exec
) where
-- import {{{1
import Data.Bits (complement, (.|.), (.&.), xor)
import Data.Word (Word8)
import Data.Int (Int32)
import Control.Arrow ((***), Arrow)
import Language.Bitcoin.Types
import Language.Bitcoin.Utils (b2i, i2b, bsIsTrue)
import Language.Bitcoin.Text (print_result)
import qualified Data.ByteString as B
import qualified Data.List as List
run_interpreter :: Program -> Keyring -> Either String Result -- {{{1
run_interpreter program keyring =
case exec (Machine program keyring [] []) of
result@(Result (Error _) _) -> Left $ print_result result
result -> Right result
exec :: Machine -> Result
exec machine@(Machine [] _ stack _) =
if topIsTrue stack
then Result Success machine
else Result (Failure "top stack value is not True") machine
-- alt stack {{{2
exec machine@(Machine (OP_TOALTSTACK:_) _ [] _) =
Result (Error "OP_TOALTSTACK failed because the stack is empty") machine
exec (Machine (OP_TOALTSTACK:rest) keyring (top:rest') altStack) =
exec (Machine rest keyring rest' (top:altStack))
exec machine@(Machine (OP_FROMALTSTACK:_) _ _ []) =
Result (Error "OP_FROMALTSTACK failed because the alt stack is empty") machine
exec (Machine (OP_FROMALTSTACK:rest) keyring stack (top:rest')) =
exec (Machine rest keyring (top:stack) rest')
-- verify {{{2
exec machine@(Machine (OP_EQUALVERIFY:xs) _ _ _) =
exec $ machine { mchProgram = OP_EQUAL:OP_VERIFY:xs }
exec machine@(Machine (OP_NUMEQUALVERIFY:xs) _ _ _) =
exec $ machine { mchProgram = OP_NUMEQUAL:OP_VERIFY:xs }
exec machine@(Machine (OP_CHECKSIGVERIFY:xs) _ _ _) =
exec $ machine { mchProgram = OP_CHECKSIG:OP_VERIFY:xs }
exec machine@(Machine (OP_CHECKMULTISIGVERIFY:xs) _ _ _) =
exec $ machine { mchProgram = OP_CHECKMULTISIG:OP_VERIFY:xs }
-- flow control {{{2
exec machine@(Machine (OP_IF:_) _ _ _) = execIfBlock id machine
exec machine@(Machine (OP_NOTIF:_) _ _ _) = execIfBlock not machine
exec machine@(Machine (OP_ELSE:_) _ _ _) = Result (Error "OP_ELSE without if block") machine
exec machine@(Machine (OP_ENDIF:_) _ _ _) = Result (Error "OP_ENDIF withtout if block") machine
exec machine@(Machine (OP_NOP:xs) _ _ _) = exec (machine { mchProgram = xs })
exec machine@(Machine (OP_RETURN:_) _ _ _) = Result (Failure "script failes as requested by OP_RETURN.") machine
exec machine@(Machine (OP_VERIFY:xs) _ stack _) =
if topIsTrue stack
then exec (machine { mchProgram=xs, mchStack = tail stack } )
else Result (Failure "OP_VERIFY failed because top stack value is not True.") machine
-- simple ops {{{2
exec machine@(Machine (op:rest) keyring stack altStack) =
case simpleOp op stack of
Left code -> Result code machine
Right (stack') -> exec (Machine rest keyring stack' altStack)
simpleOp :: Opcode -> Stack -> Either ResultCode Stack
-- constants -- {{{3
simpleOp OP_FALSE = pushOp (i2b 0)
simpleOp OP_TRUE = pushOp (i2b 1)
simpleOp OP_0 = pushOp (i2b 0)
simpleOp OP_1 = pushOp (i2b 1)
simpleOp OP_2 = pushOp (i2b 2)
simpleOp OP_3 = pushOp (i2b 3)
simpleOp OP_4 = pushOp (i2b 4)
simpleOp OP_5 = pushOp (i2b 5)
simpleOp OP_6 = pushOp (i2b 6)
simpleOp OP_7 = pushOp (i2b 7)
simpleOp OP_8 = pushOp (i2b 8)
simpleOp OP_9 = pushOp (i2b 9)
simpleOp OP_10 = pushOp (i2b 10)
simpleOp OP_11 = pushOp (i2b 11)
simpleOp OP_12 = pushOp (i2b 12)
simpleOp OP_13 = pushOp (i2b 13)
simpleOp OP_14 = pushOp (i2b 14)
simpleOp OP_15 = pushOp (i2b 15)
simpleOp OP_16 = pushOp (i2b 16)
-- stack -- {{{3
simpleOp OP_IFDUP = stackOp 1 (\(x:xs) -> if bsIsTrue x then x:x:xs else x:xs)
simpleOp OP_DEPTH = (\stack -> Right $ (i2b . fromIntegral . length) stack : stack)
simpleOp OP_DROP = stackOp 1 (\(_:xs) -> xs)
simpleOp OP_DUP = stackOp 1 (\(x:xs) -> x:x:xs)
simpleOp OP_NIP = stackOp 2 (\(x:_:xs) -> x:xs)
simpleOp OP_OVER = stackOp 2 (\(x1:x2:xs) -> x2:x1:x2:xs)
simpleOp OP_PICK = stackOp' 1 (\(x:xs) -> case b2i x of
Left e -> Left $ Error e
Right n -> let n' = fromIntegral n in
stackOp n' (\xs' -> head (take n' xs') : xs') xs)
simpleOp OP_ROLL = stackOp' 1 (\(x:xs) -> case b2i x of
Left e -> Left $ Error e
Right n -> let n' = fromIntegral n in
stackOp n' (\xs' -> take (n'-1) xs' ++ drop n' xs') xs)
simpleOp OP_ROT = stackOp 3 (\(x1:x2:x3:xs) -> x3:x1:x2:xs)
simpleOp OP_SWAP = stackOp 2 (\(x1:x2:xs) -> x2:x1:xs)
simpleOp OP_TUCK = stackOp 2 (\(x1:x2:xs) -> x1:x2:x1:xs)
simpleOp OP_2DROP = stackOp 2 (\(_:_:xs) -> xs)
simpleOp OP_2DUP = stackOp 2 (\(x1:x2:xs) -> x1:x2:x1:x2:xs)
simpleOp OP_3DUP = stackOp 3 (\(x1:x2:x3:xs) -> x1:x2:x3:x1:x2:x3:xs)
simpleOp OP_2OVER = stackOp 4 (\(x1:x2:x3:x4:xs) -> x3:x4:x1:x2:x3:x4:xs)
simpleOp OP_2ROT = stackOp 6 (\(x1:x2:x3:x4:x5:x6:xs) -> x5:x6:x1:x2:x3:x4:xs)
simpleOp OP_2SWAP = stackOp 4 (\(x1:x2:x3:x4:xs) -> x3:x4:x1:x2:xs)
-- splice -- {{{3
simpleOp OP_CAT = stackOp 2 (\(x1:x2:xs) -> (B.append x1 x2) : xs)
simpleOp OP_SUBSTR = stackOp' 3 (\(size:begin:bytes:xs) -> opSubstr (b2i size) (b2i begin) bytes xs)
where
opSubstr (Left e) _ _ _ = Left $ Error e
opSubstr _ (Left e) _ _ = Left $ Error e
opSubstr (Right size) (Right begin) bytes xs =
let (size', begin') = tmap fromIntegral (size, begin) in
if B.length bytes < begin' + size'
then Left $ Error "OP_SUBSTR goes beyond the end of the string"
else Right $ (B.take size' $ B.drop begin' bytes) : xs
simpleOp OP_LEFT = stackOp' 2 (\(size:bytes:xs) -> opLeft (b2i size) bytes xs)
where
opLeft (Left e) _ _ = Left $ Error e
opLeft (Right size) bytes xs =
let size' = fromIntegral size in
if B.length bytes < size'
then Left $ Error "OP_LEFT goes beyond the end of the string"
else Right $ (B.take size' bytes) : xs
simpleOp OP_RIGHT = stackOp' 2 (\(size:bytes:xs) -> opRight (b2i size) bytes xs)
where
opRight (Left e) _ _ = Left $ Error e
opRight (Right size) bytes xs =
let size' = fromIntegral size in
if B.length bytes < size'
then Left $ Error "OP_RIGHT goes beyond the end of the string"
else Right $ (B.drop size' bytes) : xs
simpleOp OP_SIZE = stackOp 1 (\(bytes:xs) -> (i2b . fromIntegral . B.length) bytes : xs)
-- Bitwise logic -- {{{3
simpleOp OP_INVERT = stackOp 1 (\(bytes:xs) -> B.map complement bytes : xs)
simpleOp OP_AND = binaryBitwiseOp (.&.)
simpleOp OP_OR = binaryBitwiseOp (.|.)
simpleOp OP_XOR = binaryBitwiseOp xor
simpleOp OP_EQUAL = stackOp 2 (\(x1:x2:xs) -> (if x1 == x2 then i2b 1 else i2b 0) : xs)
-- arithmetic -- {{{3
simpleOp OP_1ADD = unaryArithmeticOp (+1)
simpleOp OP_1SUB = unaryArithmeticOp ((-)1)
simpleOp OP_2MUL = unaryArithmeticOp (*2)
simpleOp OP_2DIV = unaryArithmeticOp (quot 2)
simpleOp OP_NEGATE = unaryArithmeticOp (* (-1))
simpleOp OP_ABS = unaryArithmeticOp (\x -> if x >= 0 then x else -x)
simpleOp OP_NOT = unaryArithmeticOp (\x -> if x == 0 then 1 else 0)
simpleOp OP_0NOTEQUAL = unaryArithmeticOp (\x -> if x == 0 then 1 else 0)
simpleOp OP_ADD = binaryArithmeticOp (+)
simpleOp OP_SUB = binaryArithmeticOp (-)
simpleOp OP_MUL = binaryArithmeticOp (*)
simpleOp OP_DIV = binaryArithmeticOp quot
simpleOp OP_MOD = binaryArithmeticOp rem
simpleOp OP_LSHIFT = binaryArithmeticOp (\a b -> a * (2^b))
simpleOp OP_RSHIFT = binaryArithmeticOp (\a b -> a `quot` (2^b))
simpleOp OP_BOOLAND = binaryCondition (\a b -> a /= 0 && b /= 0)
simpleOp OP_BOOLOR = binaryCondition (\a b -> a /= 0 || b /= 0)
simpleOp OP_NUMEQUAL = binaryCondition (==)
simpleOp OP_NUMNOTEQUAL = binaryCondition (/=)
simpleOp OP_LESSTHAN = binaryCondition (<)
simpleOp OP_GREATERTHAN = binaryCondition (>)
simpleOp OP_LESSTHANOREQUAL = binaryCondition (<=)
simpleOp OP_GREATERTHANOREQUAL = binaryCondition (>=)
simpleOp OP_MIN = binaryArithmeticOp min
simpleOp OP_MAX = binaryArithmeticOp max
simpleOp OP_WITHIN = stackOp' 3 (\(x1:x2:x3:xs) ->
case (do n1 <- b2i x1; n2 <- b2i x2; n3 <- b2i x3; return (n1, n2, n3)) of
Left e -> Left $ Error e
Right (n1, n2, n3) -> Right $
i2b (if n1 >= n2 && n1 < n3 then 1 else 0) : xs)
-- crypto -- {{{3
simpleOp OP_RIPEMD160 = undefined
simpleOp OP_SHA1 = undefined
simpleOp OP_SHA256 = undefined
simpleOp OP_HASH160 = undefined
simpleOp OP_HASH256 = undefined
simpleOp OP_CODESEPARATOR = (\stack -> Right stack)
simpleOp OP_CHECKSIG = undefined
simpleOp OP_CHECKMULTISIG = undefined
-- pseude operations -- {{{3
simpleOp OP_PUBKEYHASH = pseudoOp OP_PUBKEYHASH
simpleOp OP_PUBKEY = pseudoOp OP_PUBKEY
simpleOp OP_INVALIDOPCODE = pseudoOp OP_INVALIDOPCODE
-- reserved operations -- {{{3
simpleOp OP_RESERVED = reservedOp OP_RESERVED
simpleOp OP_VER = reservedOp OP_VER
simpleOp OP_VERIF = reservedOp OP_VERIF
simpleOp OP_VERNOTIF = reservedOp OP_VERNOTIF
simpleOp OP_RESERVED1 = reservedOp OP_RESERVED1
simpleOp OP_RESERVED2 = reservedOp OP_RESERVED2
simpleOp OP_NOP1 = reservedOp OP_NOP1
simpleOp OP_NOP2 = reservedOp OP_NOP2
simpleOp OP_NOP3 = reservedOp OP_NOP3
simpleOp OP_NOP4 = reservedOp OP_NOP4
simpleOp OP_NOP5 = reservedOp OP_NOP5
simpleOp OP_NOP6 = reservedOp OP_NOP6
simpleOp OP_NOP7 = reservedOp OP_NOP7
simpleOp OP_NOP8 = reservedOp OP_NOP8
simpleOp OP_NOP9 = reservedOp OP_NOP9
simpleOp OP_NOP10 = reservedOp OP_NOP10
simpleOp (OP_PUSHDATA _ bytes) = pushOp bytes
simpleOp op = (\_ -> Left $ Error $ "sorry, opcode " ++ show op ++ " is not implemented yet.")
-- ops {{{2
pushOp :: B.ByteString -> Stack -> Either ResultCode Stack
pushOp x xs = Right $ x:xs
unaryArithmeticOp :: (Int32 -> Int32) -> Stack -> Either ResultCode Stack
unaryArithmeticOp operation = stackOp' 1 (\(x:xs) ->
case b2i x of
Left e -> Left $ Error e
Right n -> Right $ i2b (operation n) : xs)
binaryArithmeticOp :: (Int32 -> Int32 -> Int32) -> Stack -> Either ResultCode Stack
binaryArithmeticOp operation = stackOp' 2 (\(x1:x2:xs) ->
case (b2i x1, b2i x2) of
(Left e, _) -> Left $ Error e
(_, Left e) -> Left $ Error e
(Right n1, Right n2) -> Right $ i2b (operation n1 n2) : xs)
binaryCondition :: (Int32 -> Int32 -> Bool) -> Stack -> Either ResultCode Stack
binaryCondition condition = binaryArithmeticOp (\a b -> if condition a b then 1 else 0)
binaryBitwiseOp :: (Word8 -> Word8 -> Word8) -> Stack -> Either ResultCode Stack
binaryBitwiseOp byteOp = stackOp 2 (\(x1:x2:xs) ->
(B.pack $ map (uncurry byteOp) $ zip (B.unpack x1) (B.unpack x2)) : xs)
pseudoOp :: Opcode -> Stack -> Either ResultCode a
pseudoOp x _ = Left $ Error $ show x ++ " is a pseudo opcode. It can not be executed."
reservedOp :: Opcode -> Stack -> Either ResultCode a
reservedOp x _ = Left $ Error $ show x ++ " is a reserved opcode. It may not be used in scripts."
stackOp :: Int -> (Stack -> Stack) -> Stack -> Either ResultCode Stack
stackOp count operation stack = stackOp' count (\stack' -> Right $ operation stack') stack
stackOp' :: Int -> (Stack -> Either ResultCode Stack) -> Stack -> Either ResultCode Stack
stackOp' count operation stack =
if length stack < count
then Left $ Error $ "operation failed because there are less than " ++ show count ++ " element(s) on the stack"
else operation stack
execIfBlock :: (Bool -> Bool) -> Machine -> Result
execIfBlock _ machine@(Machine _ _ [] _) =
Result (Error "operation failed because there is no element on the stack") machine
execIfBlock condOp machine@(Machine (_:xs) kr (y:ys) as) =
case inlineIfBlock condOp y xs of
Left rc -> Result rc machine
Right xs' -> exec (Machine xs' kr ys as)
inlineIfBlock :: (Bool -> Bool) -> B.ByteString -> Program -> Either ResultCode Program
inlineIfBlock condOp condition program =
let
(ifblock, rest) = List.span isEndif program
(ifPart, elsePart) = List.span isElse ifblock
in
if null rest
then Left (Error "OP_ENDIF is missing")
else let rest' = tail rest in
if condOp (bsIsTrue condition)
then Right $ (ifPart ++ rest')
else if not (null elsePart)
then Right $ (tail elsePart) ++ rest'
else Right $ rest'
isEndif :: Opcode -> Bool
isEndif OP_ENDIF = True
isEndif _ = False
isElse :: Opcode -> Bool
isElse OP_ELSE = True
isElse _ = False
-- utils {{{2
tmap :: Arrow a => a b c -> a (b, b) (c, c)
tmap f = f *** f
topIsTrue :: [B.ByteString] -> Bool
topIsTrue (x:_) = bsIsTrue x
topIsTrue _ = False
| copton/bitcoin-script-tools | src/Language/Bitcoin/Interpreter.hs | gpl-3.0 | 12,478 | 4 | 19 | 2,503 | 5,222 | 2,672 | 2,550 | 244 | 17 |
{-# LANGUAGE OverloadedStrings #-}
module ShellSpec (spec) where
import Control.Monad.Trans.Either
import qualified Data.Array as A
import Data.Monoid ((<>))
import Test.Hspec
import Shell
import Kiss
sampleDir :: [Char]
sampleDir = "tests/samples"
samplePalettes :: Palettes
samplePalettes = toArray ["color.kcf"]
where toArray l = A.listArray (0, length l) l
fakeCel :: CNFKissCel
fakeCel = CNFKissCel 0 "aurora" 0 [] 0
spec :: Spec
spec = do
describe "convertCel" $ do
it "converts a cel to png" $
runEitherT (convertCel samplePalettes 0 "aurora" (sampleDir <> "/aurora")) `shouldReturn` Right ("aurora", (0,0))
it "gives an error if the file isn't there" $
runEitherT (convertCel samplePalettes 0 "aurora.cel" (sampleDir <> "/aurora")) `shouldReturn`
Left "Error while converting cel tests/samples/aurora/aurora.cel.cel. Exit code: 1. Error: Read palette tests/samples/aurora/color.kcf \nNew style palette\nRead cel tests/samples/aurora/aurora.cel.cel \ntests/samples/aurora/aurora.cel.cel: No such file or directory\n"
describe "convertCels" $ do
it "finds the transparent color and converts all the cels" $
runEitherT (convertCels samplePalettes [fakeCel] (sampleDir <> "/aurora")) `shouldReturn` Right [("aurora", (0,0))]
it "returns an error if a cel is missing" $
runEitherT (convertCels samplePalettes [fakeCel { cnfCelName = "aurora.cel"}] (sampleDir <> "/aurora")) `shouldReturn`
Left "Error while converting cel tests/samples/aurora/aurora.cel.cel. Exit code: 1. Error: Read palette tests/samples/aurora/color.kcf \nNew style palette\nRead cel tests/samples/aurora/aurora.cel.cel \ntests/samples/aurora/aurora.cel.cel: No such file or directory\n"
describe "transColor" $ do
it "gets the transparent color from a palette" $
runEitherT (transColor (sampleDir <> "/aurora/color.kcf")) `shouldReturn` Right "rgb:ff/f7/ff"
it "returns an error if the palette isn't there" $
runEitherT (transColor "potato") `shouldReturn`
Left "Error while finding transparency color. Exit code: 1. Error: Read palette potato \npotato: No such file or directory\n"
describe "borderColor" $ do
it "gets the transparent color from a palette" $
runEitherT (colorByIndex 3 (sampleDir <> "/aurora/color.kcf")) `shouldReturn` Right "#ffff94"
it "returns an error if the palette isn't there" $
runEitherT (colorByIndex 15 "potato") `shouldReturn`
Left "Error while finding background color. Exit code: 1. Error: Read palette potato \npotato: No such file or directory\n"
| huggablemonad/smooch | app/tests/ShellSpec.hs | gpl-3.0 | 2,669 | 0 | 17 | 524 | 507 | 264 | 243 | 41 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RecordWildCards #-}
module HEP.Physics.Analysis.Common.Merge where
import Control.Applicative
--
import HEP.Parser.LHCOAnalysis.PhysObj
--
tau2Jet :: PhyObj Tau -> PhyObj Jet
tau2Jet (ObjTau x _ _) = ObjJet x 1.777 1
bJet2Jet :: PhyObj BJet -> PhyObj Jet
bJet2Jet (ObjBJet x m n) = ObjJet x m n
-- | tau treated as jets
mergeTau :: PhyEventClassified -> PhyEventClassified
mergeTau ev@PhyEventClassified {..} =
ev { taulst = []
, jetlst = ptordering (jetlst ++ map ((,) <$> fst <*> tau2Jet.snd) taulst) }
mergeBJet :: PhyEventClassified -> PhyEventClassified
mergeBJet ev@PhyEventClassified {..} =
ev { jetlst = ptordering (jetlst ++ map ((,) <$> fst <*> bJet2Jet.snd) bjetlst)
, bjetlst = [] }
| wavewave/lhc-analysis-collection | lib/HEP/Physics/Analysis/Common/Merge.hs | gpl-3.0 | 765 | 0 | 14 | 147 | 252 | 138 | 114 | 17 | 1 |
-- HsParser: A Parsec builder, a toy for experimenting things:
-- 1) Generic parser to explore automated parser generation
-- 2) A Email parsing tool
-- 3) Quantum Chemistry Basis sets parsing... (The Haskell HartreeFock Project )
-- @2013 Angel Alvarez, Felipe Zapata, from The ResMol Group
module Main where
import Data.Maybe ( fromMaybe )
import Control.Concurrent
import Control.Concurrent.Async
import Control.Monad.Trans.Either
import System.Environment ( getArgs )
import System.FilePath
import System.IO
import System.Cmd ( system )
import System.Console.GetOpt
-- Cabal imports
import Data.Version (showVersion)
import Distribution.Version
import Paths_HsParser as HsParser
import OptsCheck
import Tasks
import GenericParser
import BasisParser
import MolcasParser
-- import FastParser
program = "Universal Parser"
authors = "@2013 Angel Alvarez, Felipe Zapata"
-- default options
defaultOptions = Options
{ optDump = False
, optModules = [("generic",processGenericFiles),("basis",processBasisFiles),("molcas",processMolcasFiles)]
, optMode = Nothing
, optVerbose = False
, optShowVersion = False
, optOutput = Nothing
, optDataDir = Nothing
, optInput = []
}
-- currently supported options
acceptedOptions :: [OptsPolicy]
acceptedOptions =
[
Option ['h','?'] ["help"] (NoArg ( check_help )) "Show this help message."
, Option ['v'] ["verbose"] (NoArg ( check_verbosity )) "Verbose run on stderr"
, Option ['V'] ["Version"] (NoArg ( check_version )) "Show version number"
, Option ['D'] ["datadir"] (ReqArg ( check_data_dir ) "Dir") "Directory where files are located"
, Option ['m'] ["mode"] (ReqArg ( check_operation_mode ) "Mode") "Mode of Operation"
, Option [] ["dump"] (NoArg ( check_dump_options )) "Force args cmdline dump"
]
-- Option ['e'] ["error"] (NoArg (\ _opts -> return $ Left "forced error on args detected!")) "Force args checking error"
-- , Option ['i'] ["input"] (OptArg (\f opts -> check_input_file f opts) "FILE") "Input file"
main :: IO ()
main = do
args <- getArgs
cores <- getNumCapabilities
progHeader cores
result <- runEitherT $ progOpts args defaultOptions acceptedOptions
either somethingIsWrong doSomeStuff result
somethingIsWrong :: String -> IO ()
somethingIsWrong msg = do
putStrLn $ "\nError: " ++ msg ++ "\n"
putStrLn $ usageInfo header acceptedOptions
doSomeStuff :: Options -> IO ()
doSomeStuff optsR@Options { optMode = mode } = do
case mode of
Nothing -> printFiles optsR
Just fun -> fun optsR
-- Keep calm and curry on, we are the good guys....
progHeader :: Int -> IO ()
progHeader c =
putStrLn $ program ++ " V:" ++ currVersion ++ " " ++ authors ++ "\n\t" ++ show(c) ++ " processor " ++ (core2string c) ++ " detected."
where
currVersion :: String
currVersion = showVersion HsParser.version
core2string :: Int -> String
core2string c = case c > 1 of
True -> "cores"
False -> "core"
header :: String
header = "Usage: Options [OPTION...] files..."
-- | "Efects for dummies", this functions has no purpouses other than printng args
printFiles :: Options -> IO ()
printFiles opts@Options { optInput = files, optDataDir = datadir } = do
mapM_ printargs filepaths
where
dir = fromMaybe "" datadir
filepaths = zipWith (combine) (cycle [dir]) files
printargs :: String -> IO ()
printargs path = putStrLn $ "Processing path: " ++ path ++ "..."
processGenericFiles :: Options -> IO ()
processGenericFiles opts@Options { optInput = files, optDataDir = datadir } = do
mapM_ processGenericFile filepaths
where
dir = fromMaybe "" datadir
filepaths = zipWith (combine) (cycle [dir]) files
processBasisFiles :: Options -> IO ()
processBasisFiles opts@Options { optInput = files, optDataDir = datadir } = do
mapM_ processBasisFile filepaths
where
dir = fromMaybe "" datadir
filepaths = zipWith (combine) (cycle [dir]) files
processMolcasFiles :: Options -> IO ()
processMolcasFiles opts@Options { optInput = files, optDataDir = datadir } = do
mapM_ processMolcasOutputFile filepaths
where
dir = fromMaybe "" datadir
filepaths = zipWith (combine) (cycle [dir]) files
| AngelitoJ/HsParser | src/Main.hs | gpl-3.0 | 4,588 | 0 | 14 | 1,185 | 1,073 | 586 | 487 | 87 | 2 |
{-# LANGUAGE UnicodeSyntax, NoImplicitPrelude #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module WithPlus
( WithPlus(..)
, fromList
, singleton
, toString
, parseString
) where
import BasePrelude hiding (toList, fromList, singleton)
import Prelude.Unicode
import Data.Monoid.Unicode ((∅))
import Util (HumanReadable, split)
import qualified Util as HR (HumanReadable(..))
import Data.Aeson (ToJSON, FromJSON, toJSON, parseJSON)
import Data.Aeson.Types (Value(String, Array), typeMismatch)
import Data.Foldable (toList)
import Data.Set (Set)
import qualified Data.Set as S (fromList, singleton)
import qualified Data.Text as T (pack, unpack)
newtype WithPlus α = WithPlus { getSet ∷ Set α }
deriving (Eq, Ord, Show, Read, Foldable, Semigroup, Monoid)
fromList ∷ Ord α ⇒ [α] → WithPlus α
fromList = WithPlus ∘ S.fromList
singleton ∷ α → WithPlus α
singleton = WithPlus ∘ S.singleton
toString ∷ HumanReadable α ⇒ WithPlus α → String
toString xs
| null xs = "None"
| otherwise = (intercalate "+" ∘ map HR.toString ∘ toList) xs
parseString ∷ (Ord α, HumanReadable α, MonadFail m) ⇒ String → m (WithPlus α)
parseString s
| null s = pure (∅)
| map toLower s ≡ "none" = pure (∅)
| otherwise = fromList <$> traverse HR.parseString (toList (split (≡'+') s))
instance HumanReadable α ⇒ ToJSON (WithPlus α) where
toJSON = String ∘ T.pack ∘ toString
instance (Ord α, HumanReadable α) ⇒ FromJSON (WithPlus α) where
parseJSON (String s) = parseString (T.unpack s)
parseJSON (Array a) = fromList <$> traverse HR.hrParseJSON (toList a)
parseJSON v = typeMismatch "String or Array" v
| 39aldo39/klfc | src/WithPlus.hs | gpl-3.0 | 1,721 | 0 | 12 | 322 | 604 | 331 | 273 | 40 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetPools.RemoveInstance
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Removes instance URL from a target pool.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetPools.removeInstance@.
module Network.Google.Resource.Compute.TargetPools.RemoveInstance
(
-- * REST Resource
TargetPoolsRemoveInstanceResource
-- * Creating a Request
, targetPoolsRemoveInstance
, TargetPoolsRemoveInstance
-- * Request Lenses
, tpriProject
, tpriTargetPool
, tpriPayload
, tpriRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetPools.removeInstance@ method which the
-- 'TargetPoolsRemoveInstance' request conforms to.
type TargetPoolsRemoveInstanceResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"targetPools" :>
Capture "targetPool" Text :>
"removeInstance" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TargetPoolsRemoveInstanceRequest :>
Post '[JSON] Operation
-- | Removes instance URL from a target pool.
--
-- /See:/ 'targetPoolsRemoveInstance' smart constructor.
data TargetPoolsRemoveInstance = TargetPoolsRemoveInstance'
{ _tpriProject :: !Text
, _tpriTargetPool :: !Text
, _tpriPayload :: !TargetPoolsRemoveInstanceRequest
, _tpriRegion :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TargetPoolsRemoveInstance' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tpriProject'
--
-- * 'tpriTargetPool'
--
-- * 'tpriPayload'
--
-- * 'tpriRegion'
targetPoolsRemoveInstance
:: Text -- ^ 'tpriProject'
-> Text -- ^ 'tpriTargetPool'
-> TargetPoolsRemoveInstanceRequest -- ^ 'tpriPayload'
-> Text -- ^ 'tpriRegion'
-> TargetPoolsRemoveInstance
targetPoolsRemoveInstance pTpriProject_ pTpriTargetPool_ pTpriPayload_ pTpriRegion_ =
TargetPoolsRemoveInstance'
{ _tpriProject = pTpriProject_
, _tpriTargetPool = pTpriTargetPool_
, _tpriPayload = pTpriPayload_
, _tpriRegion = pTpriRegion_
}
-- | Project ID for this request.
tpriProject :: Lens' TargetPoolsRemoveInstance Text
tpriProject
= lens _tpriProject (\ s a -> s{_tpriProject = a})
-- | Name of the TargetPool resource to remove instances from.
tpriTargetPool :: Lens' TargetPoolsRemoveInstance Text
tpriTargetPool
= lens _tpriTargetPool
(\ s a -> s{_tpriTargetPool = a})
-- | Multipart request metadata.
tpriPayload :: Lens' TargetPoolsRemoveInstance TargetPoolsRemoveInstanceRequest
tpriPayload
= lens _tpriPayload (\ s a -> s{_tpriPayload = a})
-- | Name of the region scoping this request.
tpriRegion :: Lens' TargetPoolsRemoveInstance Text
tpriRegion
= lens _tpriRegion (\ s a -> s{_tpriRegion = a})
instance GoogleRequest TargetPoolsRemoveInstance
where
type Rs TargetPoolsRemoveInstance = Operation
type Scopes TargetPoolsRemoveInstance =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient TargetPoolsRemoveInstance'{..}
= go _tpriProject _tpriRegion _tpriTargetPool
(Just AltJSON)
_tpriPayload
computeService
where go
= buildClient
(Proxy :: Proxy TargetPoolsRemoveInstanceResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetPools/RemoveInstance.hs | mpl-2.0 | 4,472 | 0 | 18 | 1,055 | 547 | 324 | 223 | 89 | 1 |
{-# LANGUAGE OverloadedStrings, TupleSections #-}
module Main where
import Control.Applicative hiding ((<|>))
import Control.Monad
import Prelude hiding (interact)
import Data.Default
import Data.List
import Data.Maybe
import qualified Data.Text.Lazy as LT
import Data.ByteString.Lazy.Char8 (interact)
import Text.Parsec
import Text.Parsec.ByteString.Lazy
import Text.ICalendar
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Time.Clock (UTCTime(..), DiffTime, secondsToDiffTime)
import Data.Time.Calendar (fromGregorian)
import Data.Time.LocalTime
remind :: Parser VCalendar
remind = do
es <- events
return $ def {
vcEvents = Map.fromList $ do
(n, event) <- zip [0..] es
let ident = (LT.pack $ show n, Nothing)
return (ident, event { veUID = UID (LT.pack $ show n) def
})
}
where
events :: Parser [VEvent]
events = (whitespace *> events)
<|>
(liftA2 (:) reminder events)
<|>
(ignored *> events)
<|>
(eof *> pure [])
whitespace = skipMany1 $ oneOf " \t\r"
ignored = do
string "INCLUDE "
skipMany (noneOf "\n")
char '\n'
reminder = do
string "REM"
whitespace
(mYear, month, day) <- date
mStartTime <- optionMaybe $ try $ do
whitespace
string "AT "
(h, m) <- time
return $ (h, m)
let startDay =
fromGregorian (fromMaybe 2000 mYear) (month + 1) day
stampTime =
secondsToDiffTime $
case mStartTime of
Nothing -> 0
Just (h, m) -> fromIntegral $ 60 * h + m
stamp :: DTStamp
stamp =
DTStamp (UTCTime startDay stampTime) def
start :: DTStart
start =
case mStartTime of
Nothing ->
DTStartDate (Date startDay) def
Just (h, m) ->
let time = TimeOfDay h m 0
in DTStartDateTime (FloatingDateTime $ LocalTime startDay time) def
recurs = case mYear of
Just _ -> Set.empty
Nothing -> Set.singleton $
RRule (Recur { recurFreq = Yearly
, recurUntilCount = Nothing
, recurInterval = 1
, recurBySecond = []
, recurByMinute = []
, recurByHour = []
, recurByDay = []
, recurByMonthDay = []
, recurByYearDay = []
, recurByWeekNo = []
, recurByMonth = []
, recurBySetPos = []
, recurWkSt = Monday
}) def
mDuration <- optionMaybe $ try $ do
whitespace
string "DURATION"
whitespace
(h, m) <- time
return $ Right $ DurationProp (DurationTime def h m 0) def
whitespace
string "MSG "
msg <- manyTill anyChar $ char '\n'
return $
VEvent { veDTStamp = stamp
, veUID = UID "" def
, veClass = def
, veDTStart = Just start
, veCreated = Nothing
, veDescription = Nothing -- Just $ Description (LT.pack msg) Nothing Nothing def
, veGeo = Nothing
, veLastMod = Nothing
, veLocation = Nothing
, veOrganizer = Nothing
, vePriority = def
, veSeq = def
, veStatus = Nothing
, veSummary = Just $ Summary (LT.pack msg) Nothing Nothing def
, veTransp = def
, veUrl = Nothing
, veRecurId = Nothing -- Maybe RecurrenceId
, veRRule = recurs
, veDTEndDuration = mDuration
, veAttach = mempty
, veAttendee = mempty
, veCategories = mempty
, veComment = mempty
, veContact = mempty
, veExDate = mempty
, veRStatus = mempty
, veRelated = mempty
, veResources = mempty
, veRDate = mempty
, veAlarms = mempty
, veOther = mempty
}
date =
(try $ do
y <- Just <$> year
whitespace
m <- month
whitespace
d <- day
return (y, m, d)
) <|>
(try $ do
m <- month
whitespace
d <- day
whitespace
y <- Just <$> year
return (y, m, d)
) <|>
(try $ do
d <- day
whitespace
m <- month
whitespace
y <- Just <$> year
return (y, m, d)
) <|>
(try $ liftA3 (,,) (Just <$> year) (whitespace *> day) (whitespace *> month)) <|>
(try $ liftA2 (Nothing,,) month (whitespace *> day)) <|>
(try $ liftA2 (flip (Nothing,,)) day (whitespace *> month))
year =
read <$> forM [1..4] (const $ digit)
day =
read <$> many1 digit
month =
fromMaybe undefined .
(`elemIndex` months) <$>
foldl (\m n -> m <|> try (string n)) (try $ string $ head months) (tail months)
where months = [ "Jan", "Feb", "Mar"
, "Apr", "May", "Jun"
, "Jul" , "Aug", "Sep"
, "Oct" , "Nov", "Dec"
]
time :: Parser (Int, Int)
time = do
hour <- read <$> many1 digit
char ':'
min <- read <$> many1 digit
return (hour, min)
main =
interact $ \input ->
printICalendar def $
case parse remind "<stdin>" input of
Left e -> error $ show e
Right a -> a
| astro/remind2ics | src/Main.hs | agpl-3.0 | 6,367 | 0 | 20 | 3,034 | 1,648 | 896 | 752 | 176 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
import Data.Text (Text)
import RegexShim
import Test.Framework (Test, defaultMain, testGroup)
import Test.HUnit hiding (Test)
import Test.Framework.Providers.HUnit (testCase)
main :: IO ()
main = defaultMain [allTests]
allTests :: Test
allTests = testGroup "RegexShim"
[
testCase "replaces groups" $ testReplacesGroups,
testCase "replaces multiple occurrences" $ testReplacesMultiGroups
]
testReplacesGroups :: Assertion
testReplacesGroups = do
let result = replace "a(b*)c(d*)e" "x\\2y\\1z" "abbbcdddde"
assertEqual "result" "xddddybbbz" result
testReplacesMultiGroups :: Assertion
testReplacesMultiGroups = do
let result = replace "a(b*)c(d*)e" "x\\1y\\2y\\1z" "abbbcddde"
assertEqual "result" "xbbbydddybbbz" result
| alphalambda/codeworld | codeworld-error-sanitizer/test/Main.hs | apache-2.0 | 1,410 | 0 | 10 | 244 | 205 | 101 | 104 | 21 | 1 |
{-# LANGUAGE TypeOperators #-}
------------------------------------------------------------------------------
module OpenArms.App where
------------------------------------------------------------------------------
import Control.Monad.Reader
import Servant
import Network.Wai
import Control.Monad.Trans.Either
------------------------------------------------------------------------------
import OpenArms.Config
import OpenArms.Core
import OpenArms.API
------------------------------------------------------------------------------
-- | Application
app :: AppConfig -> Application
app cfg = serve (Proxy :: Proxy OpenArmsAPI) server
where
server :: Server OpenArmsAPI
server = enter runV handlers
runV :: OpenArms :~> EitherT ServantErr IO
runV = Nat $ bimapEitherT toErr id . flip runReaderT cfg . runOpenArms
toErr :: String -> ServantErr
toErr = undefined
handlers :: ServerT OpenArmsAPI OpenArms
handlers = apiEndpoints
| dmjio/openarms | src/OpenArms/App.hs | bsd-2-clause | 969 | 0 | 10 | 132 | 169 | 95 | 74 | 19 | 1 |
{-# LANGUAGE DataKinds, RecordWildCards, TypeOperators #-}
module Sprockell where
import CLaSH.Prelude
{-------------------------------------------------------------
| SPROCKELL: Simple PROCessor in hasKELL :-)
|
| [email protected]
| October 28, 2012
-------------------------------------------------------------}
-- Types
type Word = Signed 16
type RegBankSize = 8
type ProgMemSize = 128
type DataMemSize = 128
type RegBank = Vec RegBankSize Word
type ProgMem = Vec ProgMemSize Assembly
type DataMem = Vec DataMemSize Word
type RegBankAddr = Unsigned 3
type ProgMemAddr = Unsigned 7
type DataMemAddr = Unsigned 7
-- value to be put in Register Bank
data RegValue = RAddr DataMemAddr
| RImm Word
deriving (Eq,Show)
-- value to be put in data memory
data MemValue = MAddr RegBankAddr
| MImm Word
deriving (Eq,Show)
data LdCode = NoLoad
| LdImm
| LdAddr
| LdAlu
deriving (Eq,Show)
data StCode = NoStore
| StImm
| StReg
deriving (Eq,Show)
data SPCode = None
| Up
| Down
deriving (Eq,Show)
data JmpCode = NoJump -- No jump
| UA -- UnConditional - Absolute
| UR -- UnConditional - Relative
| CA -- Conditional - Absolute
| CR -- Conditional - Relative
| Back -- Back from subroutine
deriving (Eq,Show)
data MachCode = MachCode { ldCode :: LdCode -- 0/1: load from dmem to rbank?
, stCode :: StCode -- storeCode
, spCode :: SPCode
, opCode :: OpCode -- opCode
, immvalueR :: Word -- value from Immediate - to regbank
, immvalueS :: Word -- value from Immediate - to store
, fromreg0 :: RegBankAddr -- ibid, first parameter of Compute
, fromreg1 :: RegBankAddr -- ibid, second parameter of Compute
, fromaddr :: DataMemAddr -- address in dmem
, toreg :: RegBankAddr -- ibid, third parameter of Compute
, toaddr :: DataMemAddr -- address in dmem
, wen :: Bool -- enable signal for store
, jmpCode :: JmpCode -- 0/1: indicates a jump
, jumpN :: ProgMemAddr -- which instruction to jump to
}
deriving (Eq,Show)
data OpCode = NoOp | Id | Incr | Decr -- no corresponding functions in prog.language
| Neg | Not -- unary operations
| Add | Sub | Mul | Equal | NEq | Gt | Lt | And | Or -- binary operations
deriving (Eq,Show)
data Assembly = Compute OpCode RegBankAddr RegBankAddr RegBankAddr -- Compute opCode r0 r1 r2: go to "alu",
-- do "opCode" on regs r0, r1, and put result in reg r2
| Jump JmpCode ProgMemAddr -- JumpAbs n: set program counter to n
| Load RegValue RegBankAddr -- Load (Addr a) r : from "memory a" to "regbank r"
-- Load (Imm v) r : put "Int v" in "regbank r"
| Store MemValue DataMemAddr -- Store (Addr r) a: from "regbank r" to "memory a"
-- Store (Imm v) r: put "Int v" in "memory r"
| Push RegBankAddr -- push a value on the stack
| Pop RegBankAddr -- pop a value from the stack
| EndProg -- end of program, handled bij exec function
| Debug Word
deriving (Eq,Show)
--record type for internal state of processor
data PState = PState { regbank :: RegBank -- register bank
, dmem :: DataMem -- main memory, data memory
, cnd :: Bool -- condition register (whether condition was true)
, pc :: ProgMemAddr
, sp :: DataMemAddr
}
deriving (Eq, Show)
-- move reg0 reg1 = Compute Id reg0 zeroreg reg1
-- wait = Jump UR 0
nullcode = MachCode { ldCode = NoLoad
, stCode = NoStore
, spCode = None
, opCode = NoOp
, immvalueR = 0
, immvalueS = 0
, fromreg0 = 0
, fromreg1 = 0
, fromaddr = 0
, toreg = 0
, toaddr = 0
, wen = False
, jmpCode = NoJump
, jumpN = 0
}
-- {-------------------------------------------------------------
-- | some constants
-- -------------------------------------------------------------}
-- zeroreg = 0 :: RegBankAddr
-- regA = 1 :: RegBankAddr
-- regB = 2 :: RegBankAddr
-- endreg = 3 :: RegBankAddr -- for FOR-loop
-- stepreg = 4 :: RegBankAddr -- ibid
jmpreg = 5 :: RegBankAddr -- for jump instructions
-- pcreg = 7 :: RegBankAddr -- pc is added at the end of the regbank => regbank0
-- sp0 = 20 :: DataMemAddr -- TODO: get sp0 from compiler, add OS
tobit True = 1
tobit False = 0
oddB = (== 1) . lsb
-- wmax :: Word -> Word -> Word
-- wmax w1 w2 = if w1 > w2 then w1 else w2
-- (<~) :: RegBank -> (RegBankAddr, Word) -> RegBank
-- xs <~ (0, x) = xs
-- xs <~ (7, x) = xs
-- xs <~ (i, x) = xs'
-- where
-- addr = i
-- xs' = vreplace xs (fromUnsigned addr) x
-- (<~~) :: DataMem -> (Bool, DataMemAddr, Word) -> DataMem
-- xs <~~ (False, i, x) = xs
-- xs <~~ (True, i , x) = vreplace xs i x
{-------------------------------------------------------------
| The actual Sprockell
-------------------------------------------------------------}
decode :: (ProgMemAddr, DataMemAddr) -> Assembly -> MachCode
decode (pc, sp) instr = case instr of
Compute c i0 i1 i2 -> nullcode {ldCode = LdAlu, opCode = c, fromreg0 = i0, fromreg1=i1, toreg=i2}
Jump jc n -> nullcode {jmpCode = jc, fromreg0 = jmpreg, jumpN = n}
Load (RImm n) j -> nullcode {ldCode = LdImm, immvalueR = n, toreg = j}
Load (RAddr i) j -> nullcode {ldCode = LdAddr, fromaddr = i, toreg = j}
Store (MAddr i) j -> nullcode {stCode = StReg, fromreg0 = i, toaddr = j, wen = True}
Store (MImm n) j -> nullcode {stCode = StImm, immvalueS = n, toaddr = j, wen = True}
Push r -> nullcode {stCode = StReg, fromreg0 = r, toaddr = sp + 1, spCode = Up, wen = True}
Pop r -> nullcode {ldCode = LdAddr, fromaddr = sp, toreg = r, spCode = Down}
EndProg -> nullcode
Debug _ -> nullcode
alu :: OpCode -> (Word, Word) -> (Word, Bool)
alu opCode (x, y) = (z, cnd)
where
(z, cnd) = (app opCode x y, oddB z)
app opCode = case opCode of
Id -> \x y -> x -- identity function on first argument
Incr -> \x y -> x + 1 -- increment first argument with 1
Decr -> \x y -> x - 1 -- decrement first argument with 1
Neg -> \x y -> -x
Add -> (+) -- goes without saying
Sub -> (-)
Mul -> (*)
Equal -> (tobit.).(==) -- test for equality; result 0 or 1
NEq -> (tobit.).(/=) -- test for inequality
Gt -> (tobit.).(>)
Lt -> (tobit.).(<)
And -> (*)
Or -> \x y -> 0
Not -> \x y -> 1-x
NoOp -> \x y -> 0 -- result will always be 0
-- load :: RegBank -> LdCode -> RegBankAddr -> (Word, Word, Word) -> RegBank
-- load regbank ldCode toreg (immvalueR, mval, z) = regbank'
-- where
-- v = case ldCode of
-- NoLoad -> 0
-- LdImm -> immvalueR
-- LdAddr -> mval
-- LdAlu -> z
-- regbank' = regbank <~ (toreg, v)
-- store :: DataMem -> StCode -> (Bool, DataMemAddr) -> (Word, Word) -> DataMem
-- store dmem stCode (wen, toaddr) (immvalueS, x) = dmem'
-- where
-- v = case stCode of
-- NoStore -> 0
-- StImm -> immvalueS
-- StReg -> x
-- dmem' = dmem <~~ (wen, toaddr, v)
-- pcUpd :: (JmpCode, Bool) -> (ProgMemAddr, ProgMemAddr, Word) -> ProgMemAddr
-- pcUpd (jmpCode, cnd) (pc, jumpN, x) = pc'
-- where
-- pc' = case jmpCode of
-- NoJump -> inc pc
-- UA -> jumpN
-- UR -> pc + jumpN
-- CA -> if cnd then jumpN else inc pc
-- CR -> if cnd then pc + jumpN else inc pc
-- Back -> bv2u (vdrop d9 (s2bv x))
-- inc i = i + 1
-- spUpd :: SPCode -> DataMemAddr -> DataMemAddr
-- spUpd spCode sp = case spCode of
-- Up -> sp + 1
-- Down -> sp - 1
-- None -> sp
-- -- ======================================================================================
-- -- Putting it all together
-- sprockell :: ProgMem -> (State PState) -> Bit -> (State PState, Bit)
-- sprockell prog (State state) inp = (State (PState {dmem = dmem',regbank = regbank',cnd = cnd',pc = pc',sp = sp'}), outp)
-- where
-- PState{..} = state
-- MachCode{..} = decode (pc,sp) (prog ! (fromUnsigned pc))
-- regbank0 = vreplace regbank (fromUnsigned pcreg) (pc2wrd pc)
-- (x,y) = (regbank0 ! (fromUnsigned fromreg0) , regbank0 ! (fromUnsigned fromreg1))
-- mval = dmem ! fromaddr
-- (z,cnd') = alu opCode (x,y)
-- regbank' = load regbank ldCode toreg (immvalueR,mval,z)
-- dmem' = store dmem stCode (wen,toaddr) (immvalueS,x)
-- pc' = pcUpd (jmpCode,cnd) (pc,jumpN,x)
-- sp' = spUpd spCode sp
-- outp = inp
-- pc2wrd pca = bv2s (u2bv (resizeUnsigned pca :: Unsigned 16))
-- prog1 = vcopy EndProg
-- initstate = PState {
-- regbank = vcopy 0,
-- dmem = vcopy 0,
-- cnd = False,
-- pc = 0,
-- sp = sp0
-- }
-- sprockellL = sprockell prog1 ^^^ initstate
topEntity = alu
| christiaanb/clash-compiler | examples/Sprockell.hs | bsd-2-clause | 11,221 | 4 | 11 | 4,775 | 1,461 | 919 | 542 | 123 | 15 |
-- 161667
import Data.List(sort, group)
nn = 1500000
-- generate all primitive pythagorean triples w/ Euclid's formula
-- a = m^2 - n^2, b = 2mn, c = m^2 + n^2
-- m - n is odd and m and n are coprime
genTri x m n
| n >= m = genTri x (m+1) 1 -- invalid pair, next m
| n == 1 && p > x = [] -- perimeter too big, done
| p > x = genTri x (m+1) 1 -- perimeter too big, next m
| even (m-n) = genTri x m (n+1) -- m-n must be odd, next n
| gcd m n /= 1 = genTri x m (n+2) -- must be coprime, next n
| otherwise = p : genTri x m (n+2) -- keep, next n
where p = 2*m*(m+n)
-- generate all pythagorean triples by multiplying by constant factors
-- count how many of each there are and count unique perimeters
countTri p = length $ filter (==1) $ map length $ group $ sort $
concatMap (\x -> takeWhile (p>=) $ map (x*) [1..]) $ genTri p 1 1
main = putStrLn $ show $ countTri nn
| higgsd/euler | hs/75.hs | bsd-2-clause | 947 | 8 | 12 | 281 | 364 | 177 | 187 | 13 | 1 |
{-# LANGUAGE TypeSynonymInstances, TypeOperators, FlexibleInstances,
StandaloneDeriving, DeriveFunctor, DeriveFoldable,
DeriveTraversable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Xournal.Select
-- Copyright : (c) 2011, 2012 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <[email protected]>
-- Stability : experimental
-- Portability : GHC
--
-- representing selection of xournal type
--
-----------------------------------------------------------------------------
module Data.Xournal.Select where
import Control.Applicative hiding (empty)
import Control.Compose
import Data.Foldable
import Data.Monoid
import Data.Sequence
import Data.Traversable
-- from this package
import Data.Xournal.Generic
--
import Prelude hiding (zipWith, length, splitAt)
-- |
newtype SeqZipper a = SZ { unSZ :: (a, (Seq a,Seq a)) }
-- |
deriving instance Functor SeqZipper
-- |
deriving instance Foldable SeqZipper
-- |
instance Applicative SeqZipper where
pure = singletonSZ
SZ (f,(f1s,f2s)) <*> SZ (x,(y1s,y2s)) = SZ (f x, (zipWith id f1s y1s, zipWith id f2s y2s))
-- |
deriving instance Traversable SeqZipper
-- |
singletonSZ :: a -> SeqZipper a
singletonSZ x = SZ (x, (empty,empty))
-- |
lengthSZ :: SeqZipper a -> Int
lengthSZ (SZ (_x, (x1s,x2s))) = length x1s + length x2s + 1
-- |
currIndex :: SeqZipper a -> Int
currIndex (SZ (_x, (x1s,_x2s))) = length x1s
-- |
appendGoLast :: SeqZipper a -> a -> SeqZipper a
appendGoLast (SZ (y,(y1s,y2s))) x = SZ (x, ((y1s |> y) >< y2s, empty))
-- |
chopFirst :: SeqZipper a -> Maybe (SeqZipper a)
chopFirst (SZ (y,(y1s,y2s))) =
case viewl y1s of
EmptyL -> case viewl y2s of
EmptyL -> Nothing
z :< zs -> Just (SZ (z,(empty,zs)))
_z :< zs -> Just (SZ (y,(zs,y2s)))
-- |
moveLeft :: SeqZipper a -> Maybe (SeqZipper a)
moveLeft (SZ (x,(x1s,x2s))) =
case viewr x1s of
EmptyR -> Nothing
zs :> z -> Just (SZ (z,(zs,x<|x2s)))
-- |
moveRight :: SeqZipper a -> Maybe (SeqZipper a)
moveRight (SZ (x,(x1s,x2s))) =
case viewl x2s of
EmptyL -> Nothing
z :< zs -> Just (SZ (z,(x1s|>x,zs)))
-- |
moveTo :: Int -> SeqZipper a -> Maybe (SeqZipper a)
moveTo n orig@(SZ (x,(x1s,x2s))) =
let n_x1s = length x1s
n_x2s = length x2s
res | n < 0 || n > n_x1s + n_x2s = Nothing
| n == n_x1s = Just orig
| n < n_x1s = let (x1s1, x1s2) = splitAt n x1s
el :< rm = viewl x1s2
in Just (SZ (el, (x1s1,(rm |> x) >< x2s)))
| n > n_x1s = let (x2s1,x2s2) = splitAt (n-n_x1s-1) x2s
el :< rm = viewl x2s2
in Just (SZ (el, ((x1s |> x) >< x2s1, rm)))
| otherwise = error "error in moveTo"
in res
-- |
goFirst :: SeqZipper a -> SeqZipper a
goFirst orig@(SZ (x,(x1s,x2s))) =
case viewl x1s of
EmptyL -> orig
z :< zs -> SZ (z,(empty, zs `mappend` (x <| x2s)))
-- |
goLast :: SeqZipper a -> SeqZipper a
goLast orig@(SZ (x,(x1s,x2s))) =
case viewr x2s of
EmptyR -> orig
zs :> z -> SZ (z,((x1s |> x) `mappend` zs , empty))
-- |
current :: SeqZipper a -> a
current (SZ (x,(_,_))) = x
-- |
prev :: SeqZipper a -> Maybe a
prev = fmap current . moveLeft
-- |
next :: SeqZipper a -> Maybe a
next = fmap current . moveRight
-- |
replace :: a -> SeqZipper a -> SeqZipper a
replace y (SZ (_x,zs)) = SZ (y,zs)
-- |
deleteCurrent :: SeqZipper a -> Maybe (SeqZipper a)
deleteCurrent (SZ (_,(xs,ys))) =
case viewl ys of
EmptyL -> case viewr xs of
EmptyR -> Nothing
zs :> z -> Just (SZ (z,(zs,ys)))
z :< zs -> Just (SZ (z,(xs,zs)))
-- |
data ZipperSelect a = NoSelect { allelems :: [a] }
| Select { zipper :: (Maybe :. SeqZipper) a }
-- |
deriving instance Functor ZipperSelect
-- |
selectFirst :: ZipperSelect a -> ZipperSelect a
selectFirst (NoSelect []) = NoSelect []
selectFirst (NoSelect lst@(_:_)) = Select . gFromList $ lst
selectFirst (Select (O Nothing)) = NoSelect []
selectFirst (Select (O msz)) = Select . O $ return . goFirst =<< msz
-- |
instance GListable (Maybe :. SeqZipper) where
gFromList [] = O Nothing
gFromList (x:xs) = O (Just (SZ (x, (empty,fromList xs))))
gToList (O Nothing) = []
gToList (O (Just (SZ (x,(xs,ys))))) = toList xs ++ (x : toList ys)
-- |
instance GListable ZipperSelect where
gFromList xs = NoSelect xs
gToList (NoSelect xs) = xs
gToList (Select xs) = gToList xs
-- |
deriving instance Foldable ZipperSelect
-- |
deriving instance Traversable ZipperSelect
| wavewave/xournal-types | src/Data/Xournal/Select.hs | bsd-2-clause | 4,701 | 0 | 19 | 1,194 | 1,935 | 1,041 | 894 | 102 | 3 |
module Day14_2 where
import Data.List
import Data.List.Split
type DeerInfo = (Int, String, Int, Int, Int)
main :: IO ()
main = do
f <- readFile "input.txt"
let deers = map parse $ map (splitOn " ") (lines f)
distAt = [distanceAtTime d 1 | d <- deers]
score = foldl calc deers [1..2503]
winner = head ((reverse . sort) score)
putStrLn $ "Optimal: " ++ show score
putStrLn (show winner)
putStrLn $ "Dist: " ++ (show distAt)
calc :: [DeerInfo] -> Int -> [DeerInfo]
calc deers sec = foldr incWinner deers winners
where winners = getWinners deers sec
getWinners :: [DeerInfo] -> Int -> [String]
getWinners deers sec = map snd $ takeWhile (\x -> fst x == high) sorted
where mapped = [(distanceAtTime d sec, name) | d@(_, name, _, _, _) <- deers]
sorted = reverse $ sort mapped
high = fst $ head sorted
incWinner :: String -> [DeerInfo] -> [DeerInfo]
incWinner name (x@(points, namex, a, b, c):xs)
| name == namex = (points+1, namex, a, b, c) : xs
| otherwise = (x:incWinner name xs)
parse :: [String] -> DeerInfo
parse [name, _can, _fly, speed, _kms, _for, time, _seconds,
_but, _then, _must, _rest, _for2, rest, _sec2] =
(0, name, read speed, read time, read rest)
distanceAtTime :: DeerInfo -> Int -> Int
distanceAtTime deer@(_, _name, speed, time, rest) dur
= sum $ take dur $ cycle $ activePeriod ++ restingPeriod
where activePeriod = take time (repeat speed)
restingPeriod = take rest (repeat 0)
| ksallberg/adventofcode | 2015/src/Day14_2.hs | bsd-2-clause | 1,508 | 0 | 14 | 360 | 662 | 359 | 303 | 35 | 1 |
module Infinity.Util (
-- * Functions
unlessM, whenM, mkdate, mktime, mkdir, ci,
run,
-- * Types
User, Channel, Command, Nick, Cmds
) where
import Data.List
import System.IO
import System.Exit
import System.Time
import Control.Monad
import System.Process
import System.FilePath
import System.Directory
import Control.Concurrent
import Control.Exception as Ex
type Nick = String
type User = String
type Channel = String
type Command = String
type Cmds = [String]
-- | Runs an executable program, returning output and anything from stderr
run :: FilePath -> [String] -> Maybe String -> IO (String,String)
run file args input = do
(inp,out,err,pid) <- runInteractiveProcess file args Nothing Nothing
case input of
Just i -> hPutStr inp i >> hClose inp
Nothing -> return ()
-- get contents
output <- hGetContents out
errs <- hGetContents err
-- at this point we force their evaluation
-- since hGetContents is lazy.
oMVar <- newEmptyMVar
eMVar <- newEmptyMVar
forkIO (Ex.evaluate (length output) >> putMVar oMVar ())
forkIO (Ex.evaluate (length errs) >> putMVar eMVar ())
takeMVar oMVar >> takeMVar eMVar
-- wait and return
Prelude.catch (waitForProcess pid) $ const (return ExitSuccess)
return (output,errs)
-- | Makes the current date, i.e. 1-8-08
mkdate :: IO String
mkdate = do
time <- (getClockTime >>= toCalendarTime)
let date = ci "-" $ map show $ [(fromEnum $ ctMonth time)+1,ctDay time,ctYear time]
return date
-- | Makes the current time, i.e. '22:14'
mktime :: IO String
mktime = do
time <- (getClockTime >>= toCalendarTime)
let h = show $ ctHour time
m = show $ ctMin time
h' = if (length h) == 1 then "0"++h else h
m' = if (length m) == 1 then "0"++m else m
return (h'++":"++m')
-- | Creates a directory if it doesn't already
-- exist
mkdir :: FilePath -> IO ()
mkdir p = unlessM (doesDirectoryExist p) (createDirectory p)
-- | unless with it's first parameter wrapped in
-- IO, i.e @unlessM b f = b >>= \x -> unless x f@
unlessM :: IO Bool -> IO () -> IO ()
unlessM b f = b >>= \x -> unless x f
-- | when with it's first parameter wrapped in
-- IO, i.e. @whenM b f = b >>= \x -> when x f@
whenM :: IO Bool -> IO () -> IO ()
whenM b f = b >>= \x -> when x f
-- | Concatenates a list of Strings and
-- intersperses a character inbetween each
-- element
ci :: String -> [String] -> String
ci x s = concat $ intersperse x s
| thoughtpolice/infinity | src/Infinity/Util.hs | bsd-3-clause | 2,442 | 0 | 15 | 532 | 791 | 412 | 379 | 55 | 3 |
module Sword.Daemon where
import Prelude hiding (Either(..))
import qualified Data.Map as Map
import Data.Time (UTCTime, getCurrentTime, diffUTCTime)
import Network.Socket
import System.IO
import Control.Exception
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Monad
import Control.Monad.Fix (fix)
import Sword.Utils
import Sword.World
import Sword.Hero
import Sword.Gui
type Msg = (Int, String, String)
daemonStart :: IO ()
daemonStart = do
timeNow <- getCurrentTime
level <- readFile "src/levels/0A_level.txt"
let (world, worldMap) = loadLevel level timeNow
chan <- newChan
sock <- socket AF_INET Stream 0
setSocketOption sock ReuseAddr 1
bindSocket sock (SockAddrInet 4242 iNADDR_ANY)
-- allow a maximum of 2 outstanding connections
listen sock 2
forkIO (daemonGameLoop chan world worldMap)
newChan <- dupChan chan
forkIO (monsterAlert newChan)
daemonAcceptLoop worldMap sock chan 1
monsterAlert :: Chan Msg -> IO ()
monsterAlert chan = do
threadDelay 500000
writeChan chan (5, "", "")
monsterAlert chan
daemonGameLoop :: Chan Msg -> World -> WorldMap -> IO ()
daemonGameLoop chan world worldMap = do
(nr, input, arg) <- readChan chan
tnow <- getCurrentTime
case (nr, input, arg) of
(0, _, _) ->
daemonGameLoop chan (modifyWorld 0 None tnow worldMap world) worldMap
(5, "", "") -> do
let newxWorld = modifyWorld 0 None tnow worldMap world
writeChan chan (0, show newxWorld ++ "\n", "")
daemonGameLoop chan newxWorld worldMap
(x, "login", name) ->
daemonGameLoop chan (addHero name x tnow world) worldMap
(x, "quit", _) ->
daemonGameLoop chan (removeHero x world) worldMap
(x, input, "") -> do
let newWorld = modifyWorld x (convertInput input) tnow worldMap world
writeChan chan (0, show newWorld ++ "\n", "")
daemonGameLoop chan newWorld worldMap
otherwise -> daemonGameLoop chan world worldMap
daemonAcceptLoop :: WorldMap -> Socket -> Chan Msg -> Int -> IO ()
daemonAcceptLoop wldMap sock chan nr = do
conn <- accept sock
forkIO (runConn conn chan nr wldMap)
daemonAcceptLoop wldMap sock chan $! nr + 1
runConn :: (Socket, SockAddr) -> Chan Msg -> Int -> WorldMap -> IO ()
runConn (sock, _) chan nr worldMap = do
hdl <- socketToHandle sock ReadWriteMode
hSetBuffering hdl LineBuffering
name <- liftM init (hGetLine hdl)
hPrint hdl worldMap
hPrint hdl nr
chan' <- dupChan chan
writeChan chan' (nr, "login", name)
reader <- forkIO $ fix $ \loop -> do
(nr', line, _) <- readChan chan'
when (nr' == 0) $ hPutStrLn hdl line
hFlush hdl
loop
handle (\(SomeException _) -> return ()) $ fix $ \loop -> do
line <- hGetLine hdl
case line of
"quit" -> do
writeChan chan (nr, "quit", "")
hPutStrLn hdl "Bye!"
_ -> do
writeChan chan (nr, line, "")
loop
killThread reader
hClose hdl
loop
loadLevel :: String -> UTCTime -> (World, WorldMap)
loadLevel str tnow = foldl consume (emptyWorld, Map.empty) elems
where lns = lines str
coords = [[(x,y) | x <- [0..]] | y <- [0..]]
elems = concat $ zipWith zip coords lns
consume (wld, wldMap) (c, elt) =
case elt of
'@' -> (wld, Map.insert c Ground wldMap)
'x' -> (wld{monster = Map.insert c emptyMonster{mlastMove = tnow} (monster wld)},
Map.insert c Ground wldMap)
'#' -> (wld, Map.insert c Wall wldMap)
'4' -> (wld, Map.insert c Tree wldMap)
'.' -> (wld, Map.insert c Ground wldMap)
otherwise -> error (show elt ++ " not recognized")
convertInput :: String -> Input
convertInput [] = None
convertInput (char:xs) =
case char of
'k' -> Up
'j' -> Down
'h' -> Left
'l' -> Right
'K' -> FightUp
'J' -> FightDown
'H' -> FightLeft
'L' -> FightRight
'q' -> Quit
otherwise -> None
| kmerz/the_sword | src/Sword/Daemon.hs | bsd-3-clause | 3,924 | 0 | 18 | 958 | 1,474 | 745 | 729 | 114 | 10 |
-- Turnir -- a tool for tournament management.
--
-- Author : Ivan N. Veselov
-- Created: 20-Sep-2010
--
-- Copyright (C) 2010 Ivan N. Veselov
--
-- License: BSD3
--
-- | Pretty printing of miscelanneous data structures.
-- Uses wonderful HughesPJ pretty-printing combinator library.
--
module Pretty (
ppTable,
ppRounds
) where
import Text.PrettyPrint.HughesPJ
import Types
--
-- Helper functions
--
t :: String -> Doc
t = text
pp :: Show a => a -> Doc
pp = t . show
dash = char '-'
plus = char '+'
pipe = char '|'
vpunctuate :: Doc -> [Doc] -> [Doc]
vpunctuate p [] = []
vpunctuate p (d:ds) = go d ds
where
go d [] = [d]
go d (e:es) = (d $$ p) : go e es
--
-- Pretty-printing
--
-- | Prints one round information
ppRound :: Int -> [Player] -> Table -> Doc
ppRound r ps table = vcat [ t "Round" <+> int r
, nest o (ppGames games)
, nest o (ppByes byes)
, space
]
where ppGames = vcat . map ppGame
ppGame (Game gid _ p1 p2 res) =
hsep [int gid <> colon, pp p1, dash, pp p2, parens . pp $ res]
ppByes [] = empty
ppByes bs = hsep . (t "bye:" :) . map pp $ bs
games = roundGames r table
byes = roundByes r ps table
o = 2 -- outline of games
-- | Pretty-prints all the rounds, using players list
ppRounds :: [Player] -> Table -> Doc
ppRounds ps table =
vcat . map (\r -> ppRound r ps table) $ [1 .. maxRound table]
-- | Pretty-prints all the rounds, using players list
ppTable :: [Player] -> Table -> Doc
ppTable ps t =
table (header : cells)
where
header = "name" : map show [1 .. n]
cells = map (\i -> playerName (ps !! i) : map (\j -> result i j t) [0 .. n - 1]) [0 .. n - 1]
n = length ps
result i j t = pp $ gameByPlayers (ps !! i) (ps !! j) t
pp Nothing = " "
pp (Just x) = show . gameResult $ x
--
-- Pretty-printing textual tables
--
-- | helper functions, ecloses list with pluses or pipes
pluses xs = plus <> xs <> plus
pipes xs = pipe <+> xs <+> pipe
-- | return widths of columns, currently width is unbound
widths :: [[String]] -> [Int]
widths = map (+2) . foldl1 (zipWith max) . map (map length)
-- | makes separator doc
s :: [[String]] -> Doc
s = pluses . hcat . punctuate plus . map (t . flip replicate '-') . widths
-- | makes values doc (row with values, separated by "|")
v :: [Int] -- ^ list which contains width of every column
-> [String] -- ^ list with cells
-> Doc
v ws dt = pipes . hcat . punctuate (t " | ") $ zipWith fill ws dt
-- | `fills` string to make it of the given length (actually adds spaces)
-- currently, it adds spaces to the right, eventually alignment will be used
fill :: Int -> String -> Doc
fill n s
| length s < n = t s <> hcat (replicate (n - length s - 2) space)
| otherwise = t (take n s)
-- | pretty prints table with data (first list is header row, next ones are rows with data)
table dt = sepRow $$ (vcat . vpunctuate sepRow $ map (v ws) dt) $$ sepRow
where
sepRow = s dt
ws = widths dt
-- test data
headers = ["ID", "Name", "Price"]
dt1 = ["1", "iPad", "12.00"]
dt2 = ["2", "Cool laptop", "122.00"]
dt3 = ["3", "Yet another cool laptop", "12004.44"]
t1 = [headers, dt1, dt2, dt3]
| sphynx/turnir | src/Pretty.hs | bsd-3-clause | 3,320 | 0 | 14 | 935 | 1,130 | 609 | 521 | 64 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit
import Text.IPv6Addr
main :: IO ()
main = defaultMain $ hUnitTestToTests tests
tests :: Test.HUnit.Test
tests = TestList
[ (~?=) (maybeIPv6Addr ":") Nothing
, (~?=) (maybeIPv6Addr "::") (Just (IPv6Addr "::"))
, (~?=) (maybeIPv6Addr ":::") Nothing
, (~?=) (maybeIPv6Addr "::::") Nothing
, (~?=) (maybeIPv6Addr "::df0::") Nothing
, (~?=) (maybeIPv6Addr "0:0:0:0:0:0:0") Nothing
, (~?=) (maybeIPv6Addr "0:0:0:0:0:0:0:0") (Just (IPv6Addr "::"))
, (~?=) (maybeIPv6Addr "0:0:0:0:0:0:0:0:0") Nothing
, (~?=) (maybeIPv6Addr "1") Nothing
, (~?=) (maybeIPv6Addr "::1") (Just (IPv6Addr "::1"))
, (~?=) (maybeIPv6Addr ":::1") Nothing
, (~?=) (maybeIPv6Addr "::1:") Nothing
, (~?=) (maybeIPv6Addr "0000:0000:0000:0000:0000:0000:0000:0001") (Just (IPv6Addr "::1"))
, (~?=) (maybeIPv6Addr "0:0:0:0:0:0:0:1") (Just (IPv6Addr "::1"))
, (~?=) (maybeIPv6Addr "a") Nothing
, (~?=) (maybeIPv6Addr "ab") Nothing
, (~?=) (maybeIPv6Addr "abc") Nothing
, (~?=) (maybeIPv6Addr "abcd") Nothing
, (~?=) (maybeIPv6Addr "abcd:") Nothing
, (~?=) (maybeIPv6Addr "abcd::") (Just (IPv6Addr "abcd::"))
, (~?=) (maybeIPv6Addr "abcd:::") Nothing
, (~?=) (maybeIPv6Addr "abcde::") Nothing
, (~?=) (maybeIPv6Addr "a::") (Just (IPv6Addr "a::"))
, (~?=) (maybeIPv6Addr "0a::") (Just (IPv6Addr "a::"))
, (~?=) (maybeIPv6Addr "00a::") (Just (IPv6Addr "a::"))
, (~?=) (maybeIPv6Addr "000a::") (Just (IPv6Addr "a::"))
, (~?=) (maybeIPv6Addr "0000a::") Nothing
, (~?=) (maybeIPv6Addr "adb6") Nothing
, (~?=) (maybeIPv6Addr "adb6ce67") Nothing
, (~?=) (maybeIPv6Addr "adb6:ce67") Nothing
, (~?=) (maybeIPv6Addr "adb6::ce67") (Just (IPv6Addr "adb6::ce67"))
, (~?=) (maybeIPv6Addr "::1.2.3.4") (Just (IPv6Addr "::1.2.3.4"))
, (~?=) (maybeIPv6Addr "::ffff:1.2.3.4") (Just (IPv6Addr "::ffff:1.2.3.4"))
, (~?=) (maybeIPv6Addr "::ffff:0:1.2.3.4") (Just (IPv6Addr "::ffff:0:1.2.3.4"))
, (~?=) (maybeIPv6Addr "64:ff9b::1.2.3.4") (Just (IPv6Addr "64:ff9b::1.2.3.4"))
, (~?=) (maybeIPv6Addr "fe80::5efe:1.2.3.4") (Just (IPv6Addr "fe80::5efe:1.2.3.4"))
, (~?=) (maybeIPv6Addr "FE80:CD00:0000:0CDE:1257:0000:211E:729C") (Just (IPv6Addr "fe80:cd00:0:cde:1257:0:211e:729c"))
, (~?=) (maybeIPv6Addr "FE80:CD00:0000:0CDE:1257:0000:211E:729X") Nothing
, (~?=) (maybeIPv6Addr "FE80:CD00:0000:0CDE:1257:0000:211E:729CX") Nothing
, (~?=) (maybeIPv6Addr "FE80:CD00:0000:0CDE:0000:211E:729C") Nothing
, (~?=) (maybeIPv6Addr "FE80:CD00:0000:0CDE:FFFF:1257:0000:211E:729C") Nothing
, (~?=) (maybeIPv6Addr "1111:2222:3333:4444:5555:6666:7777:8888") (Just (IPv6Addr "1111:2222:3333:4444:5555:6666:7777:8888"))
, (~?=) (maybeIPv6Addr ":1111:2222:3333:4444:5555:6666:7777:8888") Nothing
, (~?=) (maybeIPv6Addr "1111:2222:3333:4444:5555:6666:7777:8888:") Nothing
, (~?=) (maybeIPv6Addr "1111::3333:4444:5555:6666::8888") Nothing
, (~?=) (maybeIPv6Addr "AAAA:BBBB:CCCC:DDDD:EEEE:FFFF:0000:0000") (Just (IPv6Addr "aaaa:bbbb:cccc:dddd:eeee:ffff::"))
, (~?=) (maybeIPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:eeee:0001") (Just (IPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:eeee:1"))
, (~?=) (maybeIPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:eeee:001") (Just (IPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:eeee:1"))
, (~?=) (maybeIPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:eeee:01") (Just (IPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:eeee:1"))
, (~?=) (maybeIPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:eeee:1") (Just (IPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:eeee:1"))
, (~?=) (maybeIPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd::1") (Just (IPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:0:1"))
, (~?=) (maybeIPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:0:1") (Just (IPv6Addr "2001:db8:aaaa:bbbb:cccc:dddd:0:1"))
, (~?=) (maybeIPv6Addr "2001:db8:0:0:1:0:0:1") (Just (IPv6Addr "2001:db8::1:0:0:1"))
, (~?=) (maybeIPv6Addr "2001:db8:0:1:0:0:0:1") (Just (IPv6Addr "2001:db8:0:1::1"))
, (~?=) (maybeIPv6Addr "2001:DB8:0:0:0::1") (Just (IPv6Addr "2001:db8::1"))
, (~?=) (maybeIPv6Addr "2001:0DB8:0:0::1") (Just (IPv6Addr "2001:db8::1"))
, (~?=) (maybeIPv6Addr "2001:0dB8:0::1") (Just (IPv6Addr "2001:db8::1"))
, (~?=) (maybeIPv6Addr "2001:db8::1") (Just (IPv6Addr "2001:db8::1"))
, (~?=) (maybeIPv6Addr "2001:db8:0:1::1") (Just (IPv6Addr "2001:db8:0:1::1"))
, (~?=) (maybeIPv6Addr "2001:0db8:0:1:0:0:0:1") (Just (IPv6Addr "2001:db8:0:1::1"))
, (~?=) (maybeIPv6Addr "2001:DB8::1:1:1:1:1") (Just (IPv6Addr "2001:db8:0:1:1:1:1:1"))
, (~?=) (maybeIPv6Addr "2001:DB8::1:1:0:1:1") (Just (IPv6Addr "2001:db8:0:1:1:0:1:1"))
, (~?=) (maybeIPv6Addr "fe80") Nothing
, (~?=) (maybeIPv6Addr "fe80::") (Just (IPv6Addr "fe80::"))
, (~?=) (maybeIPv6Addr "0:0:0:0:0:ffff:192.0.2.1") (Just (IPv6Addr "::ffff:192.0.2.1"))
, (~?=) (maybeIPv6Addr "::192.0.2.1") (Just (IPv6Addr "::192.0.2.1"))
, (~?=) (maybeIPv6Addr "192.0.2.1::") Nothing
, (~?=) (maybeIPv6Addr "::ffff:192.0.2.1") (Just (IPv6Addr "::ffff:192.0.2.1"))
, (~?=) (maybeIPv6Addr "fe80:0:0:0:0:0:0:0") (Just (IPv6Addr "fe80::"))
, (~?=) (maybeIPv6Addr "fe80:0000:0000:0000:0000:0000:0000:0000") (Just (IPv6Addr "fe80::"))
, (~?=) (maybeIPv6Addr "2001:db8:Bad:0:0::0:1") (Just (IPv6Addr "2001:db8:bad::1"))
, (~?=) (maybeIPv6Addr "2001:0:0:1:b:0:0:A") (Just (IPv6Addr "2001::1:b:0:0:a"))
, (~?=) (maybeIPv6Addr "2001:0:0:1:000B:0:0:0") (Just (IPv6Addr "2001:0:0:1:b::"))
, (~?=) (maybeIPv6Addr "2001:0DB8:85A3:0000:0000:8A2E:0370:7334") (Just (IPv6Addr "2001:db8:85a3::8a2e:370:7334"))
, (~?=) (maybePureIPv6Addr "0:0:0:0:0:ffff:192.0.2.1") (Just (IPv6Addr "::ffff:c000:201"))
, (~?=) (maybePureIPv6Addr "::ffff:192.0.2.1") (Just (IPv6Addr "::ffff:c000:201"))
, (~?=) (maybeFullIPv6Addr "::") (Just (IPv6Addr "0000:0000:0000:0000:0000:0000:0000:0000"))
, (~?=) (maybeFullIPv6Addr "0:0:0:0:0:0:0:0") (Just (IPv6Addr "0000:0000:0000:0000:0000:0000:0000:0000"))
, (~?=) (maybeFullIPv6Addr "::1") (Just (IPv6Addr "0000:0000:0000:0000:0000:0000:0000:0001"))
, (~?=) (maybeFullIPv6Addr "2001:db8::1") (Just (IPv6Addr "2001:0db8:0000:0000:0000:0000:0000:0001"))
, (~?=) (maybeFullIPv6Addr "a:bb:ccc:dddd:1cDc::1") (Just (IPv6Addr "000a:00bb:0ccc:dddd:1cdc:0000:0000:0001"))
, (~?=) (maybeFullIPv6Addr "FE80::0202:B3FF:FE1E:8329") (Just (IPv6Addr "fe80:0000:0000:0000:0202:b3ff:fe1e:8329"))
, (~?=) (maybeFullIPv6Addr "aDb6::CE67") (Just (IPv6Addr "adb6:0000:0000:0000:0000:0000:0000:ce67"))
, (~?=) (toIP6ARPA (IPv6Addr "::1")) "1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.IP6.ARPA."
, (~?=) (toIP6ARPA (IPv6Addr "2b02:0b08:0:7::0001")) "1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.7.0.0.0.0.0.0.0.8.0.b.0.2.0.b.2.IP6.ARPA."
, (~?=) (toIP6ARPA (IPv6Addr "2b02:b08:0:7::1")) "1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.7.0.0.0.0.0.0.0.8.0.b.0.2.0.b.2.IP6.ARPA."
, (~?=) (toIP6ARPA (IPv6Addr "fdda:5cc1:23:4::1f")) "f.1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.4.0.0.0.3.2.0.0.1.c.c.5.a.d.d.f.IP6.ARPA."
, (~?=) (toIP6ARPA (IPv6Addr "2001:db8::")) "0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.IP6.ARPA."
, (~?=) (toIP6ARPA (IPv6Addr "4321:0:1:2:3:4:567:89ab")) "b.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1.0.0.0.0.0.0.0.1.2.3.4.IP6.ARPA."
, (~?=) (toUNC (IPv6Addr "2001:0DB8:002a:1005:230:48ff:FE73:989d")) "2001-db8-2a-1005-230-48ff-fe73-989d.ipv6-literal.net"
, (~?=) (toUNC (IPv6Addr "2001:0db8:85a3:0000:0000:8a2e:0370:7334")) "2001-db8-85a3--8a2e-370-7334.ipv6-literal.net"
, (~?=) (macAddrToIPv6AddrTokens "fa:1d:58:cc:95:16") (Just [SixteenBit "fa1d", Colon, SixteenBit "58cc", Colon, SixteenBit "9516"])
]
| MichelBoucey/IPv6Addr | tests/Main.hs | bsd-3-clause | 7,625 | 0 | 11 | 808 | 2,263 | 1,230 | 1,033 | 102 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RecordWildCards #-}
module Refact.Fixity (applyFixities) where
import SrcLoc
import Refact.Utils
import BasicTypes (Fixity(..), defaultFixity, compareFixity, negateFixity, FixityDirection(..))
import HsExpr
import RdrName
import OccName
import PlaceHolder
import Data.Generics hiding (Fixity)
import Data.Maybe
import Language.Haskell.GHC.ExactPrint.Types
import Control.Monad.State
import qualified Data.Map as Map
import Data.Tuple
-- | Rearrange infix expressions to account for fixity.
-- The set of fixities is wired in and includes all fixities in base.
applyFixities :: Anns -> Module -> (Anns, Module)
applyFixities as m = swap $ runState (everywhereM (mkM expFix) m) as
expFix :: LHsExpr RdrName -> M (LHsExpr RdrName)
expFix (L loc (OpApp l op _ r)) = do
newExpr <- mkOpAppRn baseFixities l op (findFixity baseFixities op) r
return (L loc newExpr)
expFix e = return e
getIdent :: Expr -> String
getIdent (unLoc -> HsVar n) = occNameString . rdrNameOcc $ n
getIdent _ = error "Must be HsVar"
moveDelta :: AnnKey -> AnnKey -> M ()
moveDelta old new = do
a@Ann{..} <- gets (fromMaybe annNone . Map.lookup old)
modify (Map.insert new (annNone { annEntryDelta = annEntryDelta, annPriorComments = annPriorComments }))
modify (Map.insert old (a { annEntryDelta = DP (0,0), annPriorComments = []}))
---------------------------
-- Modified from GHC Renamer
mkOpAppRn ::
[(String, Fixity)]
-> LHsExpr RdrName -- Left operand; already rearrange
-> LHsExpr RdrName -> Fixity -- Operator and fixity
-> LHsExpr RdrName -- Right operand (not an OpApp, but might
-- be a NegApp)
-> M (HsExpr RdrName)
-- (e11 `op1` e12) `op2` e2
mkOpAppRn fs e1@(L _ (OpApp e11 op1 p e12)) op2 fix2 e2
| nofix_error
= return $ OpApp e1 op2 p e2
| associate_right = do
new_e <- L loc' <$> mkOpAppRn fs e12 op2 fix2 e2
moveDelta (mkAnnKey e12) (mkAnnKey new_e)
return $ OpApp e11 op1 p new_e
where
fix1 = findFixity fs op1
loc'= combineLocs e12 e2
(nofix_error, associate_right) = compareFixity fix1 fix2
---------------------------
-- (- neg_arg) `op` e2
mkOpAppRn fs e1@(L _ (NegApp neg_arg neg_name)) op2 fix2 e2
| nofix_error
= return $ OpApp e1 op2 PlaceHolder e2
| associate_right
= do
new_e <- L loc' <$> mkOpAppRn fs neg_arg op2 fix2 e2
moveDelta (mkAnnKey neg_arg) (mkAnnKey new_e)
return (NegApp new_e neg_name)
where
loc' = combineLocs neg_arg e2
(nofix_error, associate_right) = compareFixity negateFixity fix2
---------------------------
-- e1 `op` - neg_arg
mkOpAppRn _ e1 op1 fix1 e2@(L _ (NegApp _ _)) -- NegApp can occur on the right
| not associate_right -- We *want* right association
= return $ OpApp e1 op1 PlaceHolder e2
where
(_, associate_right) = compareFixity fix1 negateFixity
---------------------------
-- Default case
mkOpAppRn _ e1 op _ e2 -- Default case, no rearrangment
= return $ OpApp e1 op PlaceHolder e2
findFixity :: [(String, Fixity)] -> Expr -> Fixity
findFixity fs r = askFix fs (getIdent r)
askFix :: [(String, Fixity)] -> String -> Fixity
askFix xs = \k -> lookupWithDefault defaultFixity k xs
where
lookupWithDefault def k mp1 = fromMaybe def $ lookup k mp1
-- | All fixities defined in the Prelude.
preludeFixities :: [(String, Fixity)]
preludeFixities = concat
[infixr_ 9 ["."]
,infixl_ 9 ["!!"]
,infixr_ 8 ["^","^^","**"]
,infixl_ 7 ["*","/","quot","rem","div","mod",":%","%"]
,infixl_ 6 ["+","-"]
,infixr_ 5 [":","++"]
,infix_ 4 ["==","/=","<","<=",">=",">","elem","notElem"]
,infixr_ 3 ["&&"]
,infixr_ 2 ["||"]
,infixl_ 1 [">>",">>="]
,infixr_ 1 ["=<<"]
,infixr_ 0 ["$","$!","seq"]
]
-- | All fixities defined in the base package.
--
-- Note that the @+++@ operator appears in both Control.Arrows and
-- Text.ParserCombinators.ReadP. The listed precedence for @+++@ in
-- this list is that of Control.Arrows.
baseFixities :: [(String, Fixity)]
baseFixities = preludeFixities ++ concat
[infixl_ 9 ["!","//","!:"]
,infixl_ 8 ["shift","rotate","shiftL","shiftR","rotateL","rotateR"]
,infixl_ 7 [".&."]
,infixl_ 6 ["xor"]
,infix_ 6 [":+"]
,infixl_ 5 [".|."]
,infixr_ 5 ["+:+","<++","<+>"] -- fixity conflict for +++ between ReadP and Arrow
,infix_ 5 ["\\\\"]
,infixl_ 4 ["<$>","<$","<*>","<*","*>","<**>"]
,infix_ 4 ["elemP","notElemP"]
,infixl_ 3 ["<|>"]
,infixr_ 3 ["&&&","***"]
,infixr_ 2 ["+++","|||"]
,infixr_ 1 ["<=<",">=>",">>>","<<<","^<<","<<^","^>>",">>^"]
,infixl_ 0 ["on"]
,infixr_ 0 ["par","pseq"]
]
infixr_, infixl_, infix_ :: Int -> [String] -> [(String,Fixity)]
infixr_ = fixity InfixR
infixl_ = fixity InfixL
infix_ = fixity InfixN
-- Internal: help function for the above definitions.
fixity :: FixityDirection -> Int -> [String] -> [(String, Fixity)]
fixity a p = map (,Fixity p a)
| bitemyapp/apply-refact | src/Refact/Fixity.hs | bsd-3-clause | 5,164 | 0 | 14 | 1,163 | 1,652 | 898 | 754 | 107 | 1 |
{-
Copyright (c) 2015, Joshua Brot
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Joshua Brot nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
import Distribution.Simple
main = defaultMain
| Pamelloes/AAGenAlg | Setup.hs | bsd-3-clause | 1,572 | 0 | 4 | 284 | 12 | 7 | 5 | 2 | 1 |
module IptAdmin.AddChainPage where
import Control.Monad.Error
import Happstack.Server.SimpleHTTP
import IptAdmin.EditChainForm.Parse
import IptAdmin.EditChainForm.Render
import IptAdmin.Render
import IptAdmin.System
import IptAdmin.Types
import IptAdmin.Utils
import Iptables
import Iptables.Types
import Text.ParserCombinators.Parsec.Prim hiding (State (..))
import Text.Blaze.Renderer.Pretty (renderHtml)
pageHandlers :: IptAdmin Response
pageHandlers = msum [ methodSP GET pageHandlerGet
, methodSP POST pageHandlerPost
]
pageHandlerGet :: IptAdmin Response
pageHandlerGet = do
tableName <- getInputNonEmptyString "table"
return $ buildResponse $ renderHtml $ do
editChainForm (tableName, "") "" Nothing
pageHandlerPost :: IptAdmin Response
pageHandlerPost = do
tableName <- getInputNonEmptyString "table"
newChainName <- getInputString "newChainName"
let newChainNameE = parse parseChainName "chain name" newChainName
case newChainNameE of
Left e -> return $ buildResponse $ renderHtml $ do
editChainForm (tableName, "") newChainName $ Just $ "Parameter error: " ++ show e
Right newChainName' -> do
iptables <- getIptables
table <- case tableName of
"filter" -> return $ tFilter iptables
"nat" -> return $ tNat iptables
"mangle" -> return $ tMangle iptables
"raw" -> return $ tRaw iptables
a -> throwError $ "Invalid table parameter: " ++ a
let checkChainMay = getChainByName newChainName' table
case checkChainMay of
Just _ -> return $ buildResponse $ renderHtml $ do
editChainForm (tableName, "") newChainName' $ Just "A chain with the same name already exists"
Nothing -> do
submit <- getInputString "submit"
case submit of
"Check" -> return $ buildResponse $ renderHtml $ do
editChainForm (tableName, "") newChainName' $ Just "The name is valid"
"Submit" -> do
tryChange $ addChain tableName newChainName'
-- redir $ "/show?table=" ++ tableName ++ bookmarkForJump newChainName' Nothing
return $ buildResponse $ "ok:" ++ newChainName'
a -> throwError $ "Invalid value for 'submit' parameter: " ++ a
| etarasov/iptadmin | src/IptAdmin/AddChainPage.hs | bsd-3-clause | 2,515 | 0 | 25 | 773 | 545 | 272 | 273 | 50 | 9 |
{-# LANGUAGE RecordWildCards #-}
module Main (main) where
import Control.Monad
import Data.Binary.Get
import qualified Data.ByteString.Lazy as BL
import Data.List
import Text.Printf
import Codec.Tracker.S3M
import Codec.Tracker.S3M.Header
import Codec.Tracker.S3M.Instrument
import Codec.Tracker.S3M.Instrument.Adlib
import Codec.Tracker.S3M.Instrument.PCM
import Codec.Tracker.S3M.Pattern
pprintInstrument :: Instrument -> IO ()
pprintInstrument Instrument{..} = do
BL.putStrLn $ BL.pack fileName
forM_ pcmSample pprintPCMSample
forM_ adlibSample pprintAdlibSample
pprintAdlibSample :: AdlibSample -> IO ()
pprintAdlibSample AdlibSample{..} = do
putStr "Adlib: "
BL.putStrLn $ BL.pack title
pprintPCMSample :: PCMSample -> IO ()
pprintPCMSample PCMSample{..} = do
putStrLn "PCM: "
BL.putStrLn $ BL.pack title
pprintHeader :: Header -> IO ()
pprintHeader Header{..} = do
putStr "Song name.......: "
BL.putStrLn $ BL.pack songName
putStrLn $ "Orders..........: " ++ show songLength
putStrLn $ "Instruments.....: " ++ show numInstruments
putStrLn $ "Patterns........: " ++ show numPatterns
putStrLn $ "Version.........: " ++ show trackerVersion
putStrLn $ "Global volume...: " ++ show globalVolume
putStrLn $ "Initial speed...: " ++ show initialSpeed
putStrLn $ "Initial tempo...: " ++ show initialTempo
putStrLn $ "Mix volume......: " ++ show mixVolume
putStrLn $ "Channel settings: " ++ show channelSettings
pprintPattern :: Pattern -> IO ()
pprintPattern Pattern{..} = do
putStrLn $ "Packed length: " ++ show (packedSize Pattern{..})
mapM_ putStrLn (map (foldr (++) ([])) (map (intersperse " | ") (map (map show) rows)))
main :: IO ()
main = do
file <- BL.getContents
let s3m = runGet getModule file
putStrLn "Header:"
putStrLn "======="
pprintHeader $ header s3m
putStrLn "<>"
print (orders s3m)
putStrLn "<>"
putStrLn "Instruments:"
putStrLn "============"
mapM_ pprintInstrument (instruments s3m)
putStrLn "<>"
putStrLn "Patterns:"
putStrLn "========="
mapM_ pprintPattern (patterns s3m)
putStrLn "<>"
| riottracker/modfile | examples/readS3M.hs | bsd-3-clause | 2,265 | 0 | 15 | 517 | 671 | 319 | 352 | 61 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
import Data.Typeable
import Control.Exception
import GHC.IO.Exception
import System.IO
import Network
main :: IO ()
main = test `catch` ioHandle
test :: IO ()
test = do
h <- connectTo "localhost" $ PortNumber 54492
hGetLine h >>= putStrLn
(hGetLine h >>= putStrLn) `catch` ioHandle
hGetLine h >>= putStrLn
ioHandle :: IOException -> IO ()
ioHandle e = do
print $ ioe_handle e
print $ ioe_type e
print $ ioe_location e
print $ ioe_description e
print $ ioe_errno e
print $ ioe_filename e
| YoshikuniJujo/xmpipe | test/exClient.hs | bsd-3-clause | 539 | 0 | 10 | 100 | 200 | 96 | 104 | 22 | 1 |
module Graphics.Pastel.WX
( module Graphics.Pastel.WX.Draw
, module Graphics.Pastel.WX.Test
) where
import Graphics.Pastel.WX.Draw
import Graphics.Pastel.WX.Test
| willdonnelly/pastel | Graphics/Pastel/WX.hs | bsd-3-clause | 175 | 0 | 5 | 27 | 39 | 28 | 11 | 5 | 0 |
module GameStage
( GameStage
, gameStage
) where
import Control.Applicative
import Control.Monad
import Data.Set
import qualified Data.Map as M
import qualified Data.List as L
import Data.Unique
import qualified Class.GameScene as GS
import Class.Sprite
import KeyBind
import GlobalValue
import qualified Sound as SO
import MissScene (missScene)
import ClearScene (clearScene)
import GameStage.GameObject
import qualified GameStage.Player as P
import qualified GameStage.Bullet as B
import qualified GameStage.Enemy as E
import qualified GameStage.EnemyManager as EM
import qualified GameStage.BGManager as BG
import GameStage.Collider
data GameStage = GameStage
{ player :: P.Player
, playerBullets :: B.PlayerBullets
, enemies :: M.Map Unique E.Enemy
, enemyList :: EM.EnemyList
, enemyBullets :: M.Map Unique B.Bullet
, bgStruct :: BG.BGStruct
, time :: Integer
}
data GameOver = Continue | Miss | Clear
instance GS.GameScene GameStage where
update gv@(GV {keyset = key}) scene = do
case member QUIT key of
True -> return GS.EndScene
False -> do
newScene <- ( update >=> shoot >=> spawnEnemy >=> hitEnemy >=> hitPlayer >=> shootEnemy ) scene
case gameOver newScene of
Continue -> return $ GS.Replace newScene
Miss -> GS.dispose newScene >> GS.Replace <$> missScene
Clear -> GS.dispose newScene >> GS.Replace <$> clearScene 0
where
gameOver :: GameStage -> GameOver
gameOver GameStage { player = p
, enemies = es
, enemyList = el
}
| P.gameOver p = Miss
| L.null el && M.null es = Clear
| otherwise = Continue
hitPlayer stage@GameStage { player = p
, enemies = es
, enemyBullets = ebs
}
= do let ds = (Prelude.map gameObject . M.elems) es ++
(Prelude.map gameObject . M.elems) ebs
hits = or $ Prelude.map (within (gameObject p)) ds
return $ stage { player = P.hit hits p
}
hitEnemy stage@(GameStage { playerBullets = pbs
, enemies = es
})
= do let list = collide (B.container pbs) es
kpbs = Prelude.map fst list
kes = Prelude.map snd list
return stage { playerBullets = pbs { B.container = Prelude.foldl
(flip M.delete)
(B.container pbs)
kpbs
}
, enemies = Prelude.foldl
(flip M.delete)
es
kes
}
spawnEnemy stage@(GameStage { enemies = es
, enemyList = el
, time = t
})
= do let (newEs, newEl) = EM.spawnEnemy t el
nes <- mapM (\x -> (,) <$> newUnique <*> pure x) newEs
return $ stage { enemies = Prelude.foldl
((flip . uncurry) M.insert)
es
nes
, enemyList = newEl
}
update :: GameStage -> IO GameStage
update (GameStage p pbs es el ebs bgs time)
= return $ GameStage (P.update key p)
(B.updatePB pbs)
(M.mapMaybe E.update es)
el
(M.mapMaybe B.update ebs)
(BG.update bgs)
(time + 1)
shoot :: GameStage -> IO GameStage
shoot stage@(GameStage { player = p
, playerBullets = pbs
})
= do let ppos = (pos.gameObject) p
(bt,newP) = P.shoot (member A key) p
newPbs <- case bt of
Nothing -> return pbs
Just t -> do
SO.writeChan (sound gv) (SO.Shoot)
B.spawnPB t ppos pbs
return $ stage { player = newP
, playerBullets = newPbs
}
shootEnemy :: GameStage -> IO GameStage
shootEnemy stage@GameStage { enemies = es
, enemyBullets = ebs
}
= do
let newB = concatMap E.getBullets (M.elems es)
nebs <- mapM (\x -> (,) <$> newUnique <*> pure x) newB
return $ stage { enemyBullets = Prelude.foldl
((flip . uncurry) M.insert)
ebs
nebs
}
render (GameStage { player = p
, playerBullets = pbs
, enemies = es
, enemyBullets = ebs
, bgStruct = bgs
}) = do
BG.render bgs
P.render p
render pbs
mapM_ (render.gameObject) $ M.elems es
mapM_ (render.gameObject) $ M.elems ebs
BG.renderRim bgs
return ()
dispose GameStage { bgStruct = bgs
}
= do BG.dispose bgs
gameStage :: IO GameStage
gameStage = GameStage
<$> P.player
<*> B.playerBullets
<*> pure M.empty
<*> pure EM.enemies
<*> pure M.empty
<*> BG.load
<*> pure 0
| c000/PaperPuppet | src/GameStage.hs | bsd-3-clause | 5,848 | 0 | 20 | 2,775 | 1,521 | 798 | 723 | 134 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Utility functions on @Core@ syntax
-}
{-# LANGUAGE CPP #-}
-- | Commonly useful utilites for manipulating the Core language
module CoreUtils (
-- * Constructing expressions
mkCast,
mkTick, mkTicks, mkTickNoHNF, tickHNFArgs,
bindNonRec, needsCaseBinding,
mkAltExpr,
-- * Taking expressions apart
findDefault, addDefault, findAlt, isDefaultAlt,
mergeAlts, trimConArgs,
filterAlts, combineIdenticalAlts, refineDefaultAlt,
-- * Properties of expressions
exprType, coreAltType, coreAltsType,
exprIsDupable, exprIsTrivial, getIdFromTrivialExpr, exprIsBottom,
exprIsCheap, exprIsExpandable, exprIsCheap', CheapAppFun,
exprIsHNF, exprOkForSpeculation, exprOkForSideEffects, exprIsWorkFree,
exprIsBig, exprIsConLike,
rhsIsStatic, isCheapApp, isExpandableApp,
-- * Equality
cheapEqExpr, cheapEqExpr', eqExpr,
diffExpr, diffBinds,
-- * Eta reduction
tryEtaReduce,
-- * Manipulating data constructors and types
exprToType, exprToCoercion_maybe,
applyTypeToArgs, applyTypeToArg,
dataConRepInstPat, dataConRepFSInstPat,
isEmptyTy,
-- * Working with ticks
stripTicksTop, stripTicksTopE, stripTicksTopT,
stripTicksE, stripTicksT
) where
#include "HsVersions.h"
import CoreSyn
import PprCore
import CoreFVs( exprFreeVars )
import Var
import SrcLoc
import VarEnv
import VarSet
import Name
import Literal
import DataCon
import PrimOp
import Id
import IdInfo
import Type
import Coercion
import TyCon
import Unique
import Outputable
import TysPrim
import DynFlags
import FastString
import Maybes
import ListSetOps ( minusList )
import Platform
import Util
import Pair
import Data.Function ( on )
import Data.List
import Data.Ord ( comparing )
import OrdList
{-
************************************************************************
* *
\subsection{Find the type of a Core atom/expression}
* *
************************************************************************
-}
exprType :: CoreExpr -> Type
-- ^ Recover the type of a well-typed Core expression. Fails when
-- applied to the actual 'CoreSyn.Type' expression as it cannot
-- really be said to have a type
exprType (Var var) = idType var
exprType (Lit lit) = literalType lit
exprType (Coercion co) = coercionType co
exprType (Let bind body)
| NonRec tv rhs <- bind -- See Note [Type bindings]
, Type ty <- rhs = substTyWithUnchecked [tv] [ty] (exprType body)
| otherwise = exprType body
exprType (Case _ _ ty _) = ty
exprType (Cast _ co) = pSnd (coercionKind co)
exprType (Tick _ e) = exprType e
exprType (Lam binder expr) = mkPiType binder (exprType expr)
exprType e@(App _ _)
= case collectArgs e of
(fun, args) -> applyTypeToArgs e (exprType fun) args
exprType other = pprTrace "exprType" (pprCoreExpr other) alphaTy
coreAltType :: CoreAlt -> Type
-- ^ Returns the type of the alternatives right hand side
coreAltType (_,bs,rhs)
| any bad_binder bs = expandTypeSynonyms ty
| otherwise = ty -- Note [Existential variables and silly type synonyms]
where
ty = exprType rhs
free_tvs = tyCoVarsOfType ty
bad_binder b = b `elemVarSet` free_tvs
coreAltsType :: [CoreAlt] -> Type
-- ^ Returns the type of the first alternative, which should be the same as for all alternatives
coreAltsType (alt:_) = coreAltType alt
coreAltsType [] = panic "corAltsType"
{-
Note [Type bindings]
~~~~~~~~~~~~~~~~~~~~
Core does allow type bindings, although such bindings are
not much used, except in the output of the desuguarer.
Example:
let a = Int in (\x:a. x)
Given this, exprType must be careful to substitute 'a' in the
result type (Trac #8522).
Note [Existential variables and silly type synonyms]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T = forall a. T (Funny a)
type Funny a = Bool
f :: T -> Bool
f (T x) = x
Now, the type of 'x' is (Funny a), where 'a' is existentially quantified.
That means that 'exprType' and 'coreAltsType' may give a result that *appears*
to mention an out-of-scope type variable. See Trac #3409 for a more real-world
example.
Various possibilities suggest themselves:
- Ignore the problem, and make Lint not complain about such variables
- Expand all type synonyms (or at least all those that discard arguments)
This is tricky, because at least for top-level things we want to
retain the type the user originally specified.
- Expand synonyms on the fly, when the problem arises. That is what
we are doing here. It's not too expensive, I think.
Note that there might be existentially quantified coercion variables, too.
-}
-- Not defined with applyTypeToArg because you can't print from CoreSyn.
applyTypeToArgs :: CoreExpr -> Type -> [CoreExpr] -> Type
-- ^ A more efficient version of 'applyTypeToArg' when we have several arguments.
-- The first argument is just for debugging, and gives some context
applyTypeToArgs e op_ty args
= go op_ty args
where
go op_ty [] = op_ty
go op_ty (Type ty : args) = go_ty_args op_ty [ty] args
go op_ty (Coercion co : args) = go_ty_args op_ty [mkCoercionTy co] args
go op_ty (_ : args) | Just (_, res_ty) <- splitFunTy_maybe op_ty
= go res_ty args
go _ _ = pprPanic "applyTypeToArgs" panic_msg
-- go_ty_args: accumulate type arguments so we can instantiate all at once
go_ty_args op_ty rev_tys (Type ty : args)
= go_ty_args op_ty (ty:rev_tys) args
go_ty_args op_ty rev_tys (Coercion co : args)
= go_ty_args op_ty (mkCoercionTy co : rev_tys) args
go_ty_args op_ty rev_tys args
= go (applyTysD panic_msg_w_hdr op_ty (reverse rev_tys)) args
panic_msg_w_hdr = hang (text "applyTypeToArgs") 2 panic_msg
panic_msg = vcat [ text "Expression:" <+> pprCoreExpr e
, text "Type:" <+> ppr op_ty
, text "Args:" <+> ppr args ]
{-
************************************************************************
* *
\subsection{Attaching notes}
* *
************************************************************************
-}
-- | Wrap the given expression in the coercion safely, dropping
-- identity coercions and coalescing nested coercions
mkCast :: CoreExpr -> Coercion -> CoreExpr
mkCast e co
| ASSERT2( coercionRole co == Representational
, text "coercion" <+> ppr co <+> ptext (sLit "passed to mkCast")
<+> ppr e <+> text "has wrong role" <+> ppr (coercionRole co) )
isReflCo co
= e
mkCast (Coercion e_co) co
| isCoercionType (pSnd (coercionKind co))
-- The guard here checks that g has a (~#) on both sides,
-- otherwise decomposeCo fails. Can in principle happen
-- with unsafeCoerce
= Coercion (mkCoCast e_co co)
mkCast (Cast expr co2) co
= WARN(let { Pair from_ty _to_ty = coercionKind co;
Pair _from_ty2 to_ty2 = coercionKind co2} in
not (from_ty `eqType` to_ty2),
vcat ([ text "expr:" <+> ppr expr
, text "co2:" <+> ppr co2
, text "co:" <+> ppr co ]) )
mkCast expr (mkTransCo co2 co)
mkCast (Tick t expr) co
= Tick t (mkCast expr co)
mkCast expr co
= let Pair from_ty _to_ty = coercionKind co in
WARN( not (from_ty `eqType` exprType expr),
text "Trying to coerce" <+> text "(" <> ppr expr
$$ text "::" <+> ppr (exprType expr) <> text ")"
$$ ppr co $$ ppr (coercionType co) )
(Cast expr co)
-- | Wraps the given expression in the source annotation, dropping the
-- annotation if possible.
mkTick :: Tickish Id -> CoreExpr -> CoreExpr
mkTick t orig_expr = mkTick' id id orig_expr
where
-- Some ticks (cost-centres) can be split in two, with the
-- non-counting part having laxer placement properties.
canSplit = tickishCanSplit t && tickishPlace (mkNoCount t) /= tickishPlace t
mkTick' :: (CoreExpr -> CoreExpr) -- ^ apply after adding tick (float through)
-> (CoreExpr -> CoreExpr) -- ^ apply before adding tick (float with)
-> CoreExpr -- ^ current expression
-> CoreExpr
mkTick' top rest expr = case expr of
-- Cost centre ticks should never be reordered relative to each
-- other. Therefore we can stop whenever two collide.
Tick t2 e
| ProfNote{} <- t2, ProfNote{} <- t -> top $ Tick t $ rest expr
-- Otherwise we assume that ticks of different placements float
-- through each other.
| tickishPlace t2 /= tickishPlace t -> mkTick' (top . Tick t2) rest e
-- For annotations this is where we make sure to not introduce
-- redundant ticks.
| tickishContains t t2 -> mkTick' top rest e
| tickishContains t2 t -> orig_expr
| otherwise -> mkTick' top (rest . Tick t2) e
-- Ticks don't care about types, so we just float all ticks
-- through them. Note that it's not enough to check for these
-- cases top-level. While mkTick will never produce Core with type
-- expressions below ticks, such constructs can be the result of
-- unfoldings. We therefore make an effort to put everything into
-- the right place no matter what we start with.
Cast e co -> mkTick' (top . flip Cast co) rest e
Coercion co -> Coercion co
Lam x e
-- Always float through type lambdas. Even for non-type lambdas,
-- floating is allowed for all but the most strict placement rule.
| not (isRuntimeVar x) || tickishPlace t /= PlaceRuntime
-> mkTick' (top . Lam x) rest e
-- If it is both counting and scoped, we split the tick into its
-- two components, often allowing us to keep the counting tick on
-- the outside of the lambda and push the scoped tick inside.
-- The point of this is that the counting tick can probably be
-- floated, and the lambda may then be in a position to be
-- beta-reduced.
| canSplit
-> top $ Tick (mkNoScope t) $ rest $ Lam x $ mkTick (mkNoCount t) e
App f arg
-- Always float through type applications.
| not (isRuntimeArg arg)
-> mkTick' (top . flip App arg) rest f
-- We can also float through constructor applications, placement
-- permitting. Again we can split.
| isSaturatedConApp expr && (tickishPlace t==PlaceCostCentre || canSplit)
-> if tickishPlace t == PlaceCostCentre
then top $ rest $ tickHNFArgs t expr
else top $ Tick (mkNoScope t) $ rest $ tickHNFArgs (mkNoCount t) expr
Var x
| notFunction && tickishPlace t == PlaceCostCentre
-> orig_expr
| notFunction && canSplit
-> top $ Tick (mkNoScope t) $ rest expr
where
-- SCCs can be eliminated on variables provided the variable
-- is not a function. In these cases the SCC makes no difference:
-- the cost of evaluating the variable will be attributed to its
-- definition site. When the variable refers to a function, however,
-- an SCC annotation on the variable affects the cost-centre stack
-- when the function is called, so we must retain those.
notFunction = not (isFunTy (idType x))
Lit{}
| tickishPlace t == PlaceCostCentre
-> orig_expr
-- Catch-all: Annotate where we stand
_any -> top $ Tick t $ rest expr
mkTicks :: [Tickish Id] -> CoreExpr -> CoreExpr
mkTicks ticks expr = foldr mkTick expr ticks
isSaturatedConApp :: CoreExpr -> Bool
isSaturatedConApp e = go e []
where go (App f a) as = go f (a:as)
go (Var fun) args
= isConLikeId fun && idArity fun == valArgCount args
go (Cast f _) as = go f as
go _ _ = False
mkTickNoHNF :: Tickish Id -> CoreExpr -> CoreExpr
mkTickNoHNF t e
| exprIsHNF e = tickHNFArgs t e
| otherwise = mkTick t e
-- push a tick into the arguments of a HNF (call or constructor app)
tickHNFArgs :: Tickish Id -> CoreExpr -> CoreExpr
tickHNFArgs t e = push t e
where
push t (App f (Type u)) = App (push t f) (Type u)
push t (App f arg) = App (push t f) (mkTick t arg)
push _t e = e
-- | Strip ticks satisfying a predicate from top of an expression
stripTicksTop :: (Tickish Id -> Bool) -> Expr b -> ([Tickish Id], Expr b)
stripTicksTop p = go []
where go ts (Tick t e) | p t = go (t:ts) e
go ts other = (reverse ts, other)
-- | Strip ticks satisfying a predicate from top of an expression,
-- returning the remaining expresion
stripTicksTopE :: (Tickish Id -> Bool) -> Expr b -> Expr b
stripTicksTopE p = go
where go (Tick t e) | p t = go e
go other = other
-- | Strip ticks satisfying a predicate from top of an expression,
-- returning the ticks
stripTicksTopT :: (Tickish Id -> Bool) -> Expr b -> [Tickish Id]
stripTicksTopT p = go []
where go ts (Tick t e) | p t = go (t:ts) e
go ts _ = ts
-- | Completely strip ticks satisfying a predicate from an
-- expression. Note this is O(n) in the size of the expression!
stripTicksE :: (Tickish Id -> Bool) -> Expr b -> Expr b
stripTicksE p expr = go expr
where go (App e a) = App (go e) (go a)
go (Lam b e) = Lam b (go e)
go (Let b e) = Let (go_bs b) (go e)
go (Case e b t as) = Case (go e) b t (map go_a as)
go (Cast e c) = Cast (go e) c
go (Tick t e)
| p t = go e
| otherwise = Tick t (go e)
go other = other
go_bs (NonRec b e) = NonRec b (go e)
go_bs (Rec bs) = Rec (map go_b bs)
go_b (b, e) = (b, go e)
go_a (c,bs,e) = (c,bs, go e)
stripTicksT :: (Tickish Id -> Bool) -> Expr b -> [Tickish Id]
stripTicksT p expr = fromOL $ go expr
where go (App e a) = go e `appOL` go a
go (Lam _ e) = go e
go (Let b e) = go_bs b `appOL` go e
go (Case e _ _ as) = go e `appOL` concatOL (map go_a as)
go (Cast e _) = go e
go (Tick t e)
| p t = t `consOL` go e
| otherwise = go e
go _ = nilOL
go_bs (NonRec _ e) = go e
go_bs (Rec bs) = concatOL (map go_b bs)
go_b (_, e) = go e
go_a (_, _, e) = go e
{-
************************************************************************
* *
\subsection{Other expression construction}
* *
************************************************************************
-}
bindNonRec :: Id -> CoreExpr -> CoreExpr -> CoreExpr
-- ^ @bindNonRec x r b@ produces either:
--
-- > let x = r in b
--
-- or:
--
-- > case r of x { _DEFAULT_ -> b }
--
-- depending on whether we have to use a @case@ or @let@
-- binding for the expression (see 'needsCaseBinding').
-- It's used by the desugarer to avoid building bindings
-- that give Core Lint a heart attack, although actually
-- the simplifier deals with them perfectly well. See
-- also 'MkCore.mkCoreLet'
bindNonRec bndr rhs body
| needsCaseBinding (idType bndr) rhs = Case rhs bndr (exprType body) [(DEFAULT, [], body)]
| otherwise = Let (NonRec bndr rhs) body
-- | Tests whether we have to use a @case@ rather than @let@ binding for this expression
-- as per the invariants of 'CoreExpr': see "CoreSyn#let_app_invariant"
needsCaseBinding :: Type -> CoreExpr -> Bool
needsCaseBinding ty rhs = isUnliftedType ty && not (exprOkForSpeculation rhs)
-- Make a case expression instead of a let
-- These can arise either from the desugarer,
-- or from beta reductions: (\x.e) (x +# y)
mkAltExpr :: AltCon -- ^ Case alternative constructor
-> [CoreBndr] -- ^ Things bound by the pattern match
-> [Type] -- ^ The type arguments to the case alternative
-> CoreExpr
-- ^ This guy constructs the value that the scrutinee must have
-- given that you are in one particular branch of a case
mkAltExpr (DataAlt con) args inst_tys
= mkConApp con (map Type inst_tys ++ varsToCoreExprs args)
mkAltExpr (LitAlt lit) [] []
= Lit lit
mkAltExpr (LitAlt _) _ _ = panic "mkAltExpr LitAlt"
mkAltExpr DEFAULT _ _ = panic "mkAltExpr DEFAULT"
{-
************************************************************************
* *
Operations oer case alternatives
* *
************************************************************************
The default alternative must be first, if it exists at all.
This makes it easy to find, though it makes matching marginally harder.
-}
-- | Extract the default case alternative
findDefault :: [(AltCon, [a], b)] -> ([(AltCon, [a], b)], Maybe b)
findDefault ((DEFAULT,args,rhs) : alts) = ASSERT( null args ) (alts, Just rhs)
findDefault alts = (alts, Nothing)
addDefault :: [(AltCon, [a], b)] -> Maybe b -> [(AltCon, [a], b)]
addDefault alts Nothing = alts
addDefault alts (Just rhs) = (DEFAULT, [], rhs) : alts
isDefaultAlt :: (AltCon, a, b) -> Bool
isDefaultAlt (DEFAULT, _, _) = True
isDefaultAlt _ = False
-- | Find the case alternative corresponding to a particular
-- constructor: panics if no such constructor exists
findAlt :: AltCon -> [(AltCon, a, b)] -> Maybe (AltCon, a, b)
-- A "Nothing" result *is* legitmiate
-- See Note [Unreachable code]
findAlt con alts
= case alts of
(deflt@(DEFAULT,_,_):alts) -> go alts (Just deflt)
_ -> go alts Nothing
where
go [] deflt = deflt
go (alt@(con1,_,_) : alts) deflt
= case con `cmpAltCon` con1 of
LT -> deflt -- Missed it already; the alts are in increasing order
EQ -> Just alt
GT -> ASSERT( not (con1 == DEFAULT) ) go alts deflt
{- Note [Unreachable code]
~~~~~~~~~~~~~~~~~~~~~~~~~~
It is possible (although unusual) for GHC to find a case expression
that cannot match. For example:
data Col = Red | Green | Blue
x = Red
f v = case x of
Red -> ...
_ -> ...(case x of { Green -> e1; Blue -> e2 })...
Suppose that for some silly reason, x isn't substituted in the case
expression. (Perhaps there's a NOINLINE on it, or profiling SCC stuff
gets in the way; cf Trac #3118.) Then the full-lazines pass might produce
this
x = Red
lvl = case x of { Green -> e1; Blue -> e2 })
f v = case x of
Red -> ...
_ -> ...lvl...
Now if x gets inlined, we won't be able to find a matching alternative
for 'Red'. That's because 'lvl' is unreachable. So rather than crashing
we generate (error "Inaccessible alternative").
Similar things can happen (augmented by GADTs) when the Simplifier
filters down the matching alternatives in Simplify.rebuildCase.
-}
---------------------------------
mergeAlts :: [(AltCon, a, b)] -> [(AltCon, a, b)] -> [(AltCon, a, b)]
-- ^ Merge alternatives preserving order; alternatives in
-- the first argument shadow ones in the second
mergeAlts [] as2 = as2
mergeAlts as1 [] = as1
mergeAlts (a1:as1) (a2:as2)
= case a1 `cmpAlt` a2 of
LT -> a1 : mergeAlts as1 (a2:as2)
EQ -> a1 : mergeAlts as1 as2 -- Discard a2
GT -> a2 : mergeAlts (a1:as1) as2
---------------------------------
trimConArgs :: AltCon -> [CoreArg] -> [CoreArg]
-- ^ Given:
--
-- > case (C a b x y) of
-- > C b x y -> ...
--
-- We want to drop the leading type argument of the scrutinee
-- leaving the arguments to match against the pattern
trimConArgs DEFAULT args = ASSERT( null args ) []
trimConArgs (LitAlt _) args = ASSERT( null args ) []
trimConArgs (DataAlt dc) args = dropList (dataConUnivTyVars dc) args
filterAlts :: TyCon -- ^ Type constructor of scrutinee's type (used to prune possibilities)
-> [Type] -- ^ And its type arguments
-> [AltCon] -- ^ 'imposs_cons': constructors known to be impossible due to the form of the scrutinee
-> [(AltCon, [Var], a)] -- ^ Alternatives
-> ([AltCon], [(AltCon, [Var], a)])
-- Returns:
-- 1. Constructors that will never be encountered by the
-- *default* case (if any). A superset of imposs_cons
-- 2. The new alternatives, trimmed by
-- a) remove imposs_cons
-- b) remove constructors which can't match because of GADTs
-- and with the DEFAULT expanded to a DataAlt if there is exactly
-- remaining constructor that can match
--
-- NB: the final list of alternatives may be empty:
-- This is a tricky corner case. If the data type has no constructors,
-- which GHC allows, or if the imposs_cons covers all constructors (after taking
-- account of GADTs), then no alternatives can match.
--
-- If callers need to preserve the invariant that there is always at least one branch
-- in a "case" statement then they will need to manually add a dummy case branch that just
-- calls "error" or similar.
filterAlts _tycon inst_tys imposs_cons alts
= (imposs_deflt_cons, addDefault trimmed_alts maybe_deflt)
where
(alts_wo_default, maybe_deflt) = findDefault alts
alt_cons = [con | (con,_,_) <- alts_wo_default]
trimmed_alts = filterOut (impossible_alt inst_tys) alts_wo_default
imposs_deflt_cons = nub (imposs_cons ++ alt_cons)
-- "imposs_deflt_cons" are handled
-- EITHER by the context,
-- OR by a non-DEFAULT branch in this case expression.
impossible_alt :: [Type] -> (AltCon, a, b) -> Bool
impossible_alt _ (con, _, _) | con `elem` imposs_cons = True
impossible_alt inst_tys (DataAlt con, _, _) = dataConCannotMatch inst_tys con
impossible_alt _ _ = False
refineDefaultAlt :: [Unique] -> TyCon -> [Type]
-> [AltCon] -- Constructors tha cannot match the DEFAULT (if any)
-> [CoreAlt]
-> (Bool, [CoreAlt])
-- Refine the default alterantive to a DataAlt,
-- if there is a unique way to do so
refineDefaultAlt us tycon tys imposs_deflt_cons all_alts
| (DEFAULT,_,rhs) : rest_alts <- all_alts
, isAlgTyCon tycon -- It's a data type, tuple, or unboxed tuples.
, not (isNewTyCon tycon) -- We can have a newtype, if we are just doing an eval:
-- case x of { DEFAULT -> e }
-- and we don't want to fill in a default for them!
, Just all_cons <- tyConDataCons_maybe tycon
, let imposs_data_cons = [con | DataAlt con <- imposs_deflt_cons] -- We now know it's a data type
impossible con = con `elem` imposs_data_cons || dataConCannotMatch tys con
= case filterOut impossible all_cons of
-- Eliminate the default alternative
-- altogether if it can't match:
[] -> (False, rest_alts)
-- It matches exactly one constructor, so fill it in:
[con] -> (True, mergeAlts rest_alts [(DataAlt con, ex_tvs ++ arg_ids, rhs)])
-- We need the mergeAlts to keep the alternatives in the right order
where
(ex_tvs, arg_ids) = dataConRepInstPat us con tys
-- It matches more than one, so do nothing
_ -> (False, all_alts)
| debugIsOn, isAlgTyCon tycon, null (tyConDataCons tycon)
, not (isFamilyTyCon tycon || isAbstractTyCon tycon)
-- Check for no data constructors
-- This can legitimately happen for abstract types and type families,
-- so don't report that
= (False, all_alts)
| otherwise -- The common case
= (False, all_alts)
{- Note [Combine identical alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If several alternatives are identical, merge them into a single
DEFAULT alternative. I've occasionally seen this making a big
difference:
case e of =====> case e of
C _ -> f x D v -> ....v....
D v -> ....v.... DEFAULT -> f x
DEFAULT -> f x
The point is that we merge common RHSs, at least for the DEFAULT case.
[One could do something more elaborate but I've never seen it needed.]
To avoid an expensive test, we just merge branches equal to the *first*
alternative; this picks up the common cases
a) all branches equal
b) some branches equal to the DEFAULT (which occurs first)
The case where Combine Identical Alternatives transformation showed up
was like this (base/Foreign/C/Err/Error.hs):
x | p `is` 1 -> e1
| p `is` 2 -> e2
...etc...
where @is@ was something like
p `is` n = p /= (-1) && p == n
This gave rise to a horrible sequence of cases
case p of
(-1) -> $j p
1 -> e1
DEFAULT -> $j p
and similarly in cascade for all the join points!
NB: it's important that all this is done in [InAlt], *before* we work
on the alternatives themselves, because Simpify.simplAlt may zap the
occurrence info on the binders in the alternatives, which in turn
defeats combineIdenticalAlts (see Trac #7360).
Note [Care with impossible-constructors when combining alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have (Trac #10538)
data T = A | B | C | D
case x::T of (Imposs-default-cons {A,B})
DEFAULT -> e1
A -> e2
B -> e1
When calling combineIdentialAlts, we'll have computed that the
"impossible constructors" for the DEFAULT alt is {A,B}, since if x is
A or B we'll take the other alternatives. But suppose we combine B
into the DEFAULT, to get
case x::T of (Imposs-default-cons {A})
DEFAULT -> e1
A -> e2
Then we must be careful to trim the impossible constructors to just {A},
else we risk compiling 'e1' wrong!
Not only that, but we take care when there is no DEFAULT beforehand,
because we are introducing one. Consider
case x of (Imposs-default-cons {A,B,C})
A -> e1
B -> e2
C -> e1
Then when combining the A and C alternatives we get
case x of (Imposs-default-cons {B})
DEFAULT -> e1
B -> e2
Note that we have a new DEFAULT branch that we didn't have before. So
we need delete from the "impossible-default-constructors" all the
known-con alternatives that we have eliminated. (In Trac #11172 we
missed the first one.)
-}
combineIdenticalAlts :: [AltCon] -- Constructors that cannot match DEFAULT
-> [CoreAlt]
-> (Bool, -- True <=> something happened
[AltCon], -- New contructors that cannot match DEFAULT
[CoreAlt]) -- New alternatives
-- See Note [Combine identical alternatives]
-- True <=> we did some combining, result is a single DEFAULT alternative
combineIdenticalAlts imposs_deflt_cons ((con1,bndrs1,rhs1) : rest_alts)
| all isDeadBinder bndrs1 -- Remember the default
, not (null elim_rest) -- alternative comes first
= (True, imposs_deflt_cons', deflt_alt : filtered_rest)
where
(elim_rest, filtered_rest) = partition identical_to_alt1 rest_alts
deflt_alt = (DEFAULT, [], mkTicks (concat tickss) rhs1)
-- See Note [Care with impossible-constructors when combining alternatives]
imposs_deflt_cons' = imposs_deflt_cons `minusList` elim_cons
elim_cons = elim_con1 ++ map fstOf3 elim_rest
elim_con1 = case con1 of -- Don't forget con1!
DEFAULT -> [] -- See Note [
_ -> [con1]
cheapEqTicked e1 e2 = cheapEqExpr' tickishFloatable e1 e2
identical_to_alt1 (_con,bndrs,rhs)
= all isDeadBinder bndrs && rhs `cheapEqTicked` rhs1
tickss = map (stripTicksT tickishFloatable . thdOf3) elim_rest
combineIdenticalAlts imposs_cons alts
= (False, imposs_cons, alts)
{- *********************************************************************
* *
exprIsTrivial
* *
************************************************************************
Note [exprIsTrivial]
~~~~~~~~~~~~~~~~~~~~
@exprIsTrivial@ is true of expressions we are unconditionally happy to
duplicate; simple variables and constants, and type
applications. Note that primop Ids aren't considered
trivial unless
Note [Variable are trivial]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
There used to be a gruesome test for (hasNoBinding v) in the
Var case:
exprIsTrivial (Var v) | hasNoBinding v = idArity v == 0
The idea here is that a constructor worker, like \$wJust, is
really short for (\x -> \$wJust x), because \$wJust has no binding.
So it should be treated like a lambda. Ditto unsaturated primops.
But now constructor workers are not "have-no-binding" Ids. And
completely un-applied primops and foreign-call Ids are sufficiently
rare that I plan to allow them to be duplicated and put up with
saturating them.
Note [Tick trivial]
~~~~~~~~~~~~~~~~~~~
Ticks are only trivial if they are pure annotations. If we treat
"tick<n> x" as trivial, it will be inlined inside lambdas and the
entry count will be skewed, for example. Furthermore "scc<n> x" will
turn into just "x" in mkTick.
Note [Empty case is trivial]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The expression (case (x::Int) Bool of {}) is just a type-changing
case used when we are sure that 'x' will not return. See
Note [Empty case alternatives] in CoreSyn.
If the scrutinee is trivial, then so is the whole expression; and the
CoreToSTG pass in fact drops the case expression leaving only the
scrutinee.
Having more trivial expressions is good. Moreover, if we don't treat
it as trivial we may land up with let-bindings like
let v = case x of {} in ...
and after CoreToSTG that gives
let v = x in ...
and that confuses the code generator (Trac #11155). So best to kill
it off at source.
-}
exprIsTrivial :: CoreExpr -> Bool
exprIsTrivial (Var _) = True -- See Note [Variables are trivial]
exprIsTrivial (Type _) = True
exprIsTrivial (Coercion _) = True
exprIsTrivial (Lit lit) = litIsTrivial lit
exprIsTrivial (App e arg) = not (isRuntimeArg arg) && exprIsTrivial e
exprIsTrivial (Tick t e) = not (tickishIsCode t) && exprIsTrivial e
-- See Note [Tick trivial]
exprIsTrivial (Cast e _) = exprIsTrivial e
exprIsTrivial (Lam b body) = not (isRuntimeVar b) && exprIsTrivial body
exprIsTrivial (Case e _ _ []) = exprIsTrivial e -- See Note [Empty case is trivial]
exprIsTrivial _ = False
{-
When substituting in a breakpoint we need to strip away the type cruft
from a trivial expression and get back to the Id. The invariant is
that the expression we're substituting was originally trivial
according to exprIsTrivial.
-}
getIdFromTrivialExpr :: CoreExpr -> Id
getIdFromTrivialExpr e = go e
where go (Var v) = v
go (App f t) | not (isRuntimeArg t) = go f
go (Tick t e) | not (tickishIsCode t) = go e
go (Cast e _) = go e
go (Lam b e) | not (isRuntimeVar b) = go e
go e = pprPanic "getIdFromTrivialExpr" (ppr e)
{-
exprIsBottom is a very cheap and cheerful function; it may return
False for bottoming expressions, but it never costs much to ask. See
also CoreArity.exprBotStrictness_maybe, but that's a bit more
expensive.
-}
exprIsBottom :: CoreExpr -> Bool
-- See Note [Bottoming expressions]
exprIsBottom e
| isEmptyTy (exprType e)
= True
| otherwise
= go 0 e
where
go n (Var v) = isBottomingId v && n >= idArity v
go n (App e a) | isTypeArg a = go n e
| otherwise = go (n+1) e
go n (Tick _ e) = go n e
go n (Cast e _) = go n e
go n (Let _ e) = go n e
go n (Lam v e) | isTyVar v = go n e
go _ (Case _ _ _ alts) = null alts
-- See Note [Empty case alternatives] in CoreSyn
go _ _ = False
{- Note [Bottoming expressions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A bottoming expression is guaranteed to diverge, or raise an
exception. We can test for it in two different ways, and exprIsBottom
checks for both of these situations:
* Visibly-bottom computations. For example
(error Int "Hello")
is visibly bottom. The strictness analyser also finds out if
a function diverges or raises an exception, and puts that info
in its strictness signature.
* Empty types. If a type is empty, its only inhabitant is bottom.
For example:
data T
f :: T -> Bool
f = \(x:t). case x of Bool {}
Since T has no data constructors, the case alternatives are of course
empty. However note that 'x' is not bound to a visibly-bottom value;
it's the *type* that tells us it's going to diverge.
A GADT may also be empty even though it has constructors:
data T a where
T1 :: a -> T Bool
T2 :: T Int
...(case (x::T Char) of {})...
Here (T Char) is uninhabited. A more realistic case is (Int ~ Bool),
which is likewise uninhabited.
************************************************************************
* *
exprIsDupable
* *
************************************************************************
Note [exprIsDupable]
~~~~~~~~~~~~~~~~~~~~
@exprIsDupable@ is true of expressions that can be duplicated at a modest
cost in code size. This will only happen in different case
branches, so there's no issue about duplicating work.
That is, exprIsDupable returns True of (f x) even if
f is very very expensive to call.
Its only purpose is to avoid fruitless let-binding
and then inlining of case join points
-}
exprIsDupable :: DynFlags -> CoreExpr -> Bool
exprIsDupable dflags e
= isJust (go dupAppSize e)
where
go :: Int -> CoreExpr -> Maybe Int
go n (Type {}) = Just n
go n (Coercion {}) = Just n
go n (Var {}) = decrement n
go n (Tick _ e) = go n e
go n (Cast e _) = go n e
go n (App f a) | Just n' <- go n a = go n' f
go n (Lit lit) | litIsDupable dflags lit = decrement n
go _ _ = Nothing
decrement :: Int -> Maybe Int
decrement 0 = Nothing
decrement n = Just (n-1)
dupAppSize :: Int
dupAppSize = 8 -- Size of term we are prepared to duplicate
-- This is *just* big enough to make test MethSharing
-- inline enough join points. Really it should be
-- smaller, and could be if we fixed Trac #4960.
{-
************************************************************************
* *
exprIsCheap, exprIsExpandable
* *
************************************************************************
Note [exprIsWorkFree]
~~~~~~~~~~~~~~~~~~~~~
exprIsWorkFree is used when deciding whether to inline something; we
don't inline it if doing so might duplicate work, by peeling off a
complete copy of the expression. Here we do not want even to
duplicate a primop (Trac #5623):
eg let x = a #+ b in x +# x
we do not want to inline/duplicate x
Previously we were a bit more liberal, which led to the primop-duplicating
problem. However, being more conservative did lead to a big regression in
one nofib benchmark, wheel-sieve1. The situation looks like this:
let noFactor_sZ3 :: GHC.Types.Int -> GHC.Types.Bool
noFactor_sZ3 = case s_adJ of _ { GHC.Types.I# x_aRs ->
case GHC.Prim.<=# x_aRs 2 of _ {
GHC.Types.False -> notDivBy ps_adM qs_adN;
GHC.Types.True -> lvl_r2Eb }}
go = \x. ...(noFactor (I# y))....(go x')...
The function 'noFactor' is heap-allocated and then called. Turns out
that 'notDivBy' is strict in its THIRD arg, but that is invisible to
the caller of noFactor, which therefore cannot do w/w and
heap-allocates noFactor's argument. At the moment (May 12) we are just
going to put up with this, because the previous more aggressive inlining
(which treated 'noFactor' as work-free) was duplicating primops, which
in turn was making inner loops of array calculations runs slow (#5623)
-}
exprIsWorkFree :: CoreExpr -> Bool
-- See Note [exprIsWorkFree]
exprIsWorkFree e = go 0 e
where -- n is the number of value arguments
go _ (Lit {}) = True
go _ (Type {}) = True
go _ (Coercion {}) = True
go n (Cast e _) = go n e
go n (Case scrut _ _ alts) = foldl (&&) (exprIsWorkFree scrut)
[ go n rhs | (_,_,rhs) <- alts ]
-- See Note [Case expressions are work-free]
go _ (Let {}) = False
go n (Var v) = isCheapApp v n
go n (Tick t e) | tickishCounts t = False
| otherwise = go n e
go n (Lam x e) | isRuntimeVar x = n==0 || go (n-1) e
| otherwise = go n e
go n (App f e) | isRuntimeArg e = exprIsWorkFree e && go (n+1) f
| otherwise = go n f
{-
Note [Case expressions are work-free]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Are case-expressions work-free? Consider
let v = case x of (p,q) -> p
go = \y -> ...case v of ...
Should we inline 'v' at its use site inside the loop? At the moment
we do. I experimented with saying that case are *not* work-free, but
that increased allocation slightly. It's a fairly small effect, and at
the moment we go for the slightly more aggressive version which treats
(case x of ....) as work-free if the alternatives are.
Note [exprIsCheap] See also Note [Interaction of exprIsCheap and lone variables]
~~~~~~~~~~~~~~~~~~ in CoreUnfold.hs
@exprIsCheap@ looks at a Core expression and returns \tr{True} if
it is obviously in weak head normal form, or is cheap to get to WHNF.
[Note that that's not the same as exprIsDupable; an expression might be
big, and hence not dupable, but still cheap.]
By ``cheap'' we mean a computation we're willing to:
push inside a lambda, or
inline at more than one place
That might mean it gets evaluated more than once, instead of being
shared. The main examples of things which aren't WHNF but are
``cheap'' are:
* case e of
pi -> ei
(where e, and all the ei are cheap)
* let x = e in b
(where e and b are cheap)
* op x1 ... xn
(where op is a cheap primitive operator)
* error "foo"
(because we are happy to substitute it inside a lambda)
Notice that a variable is considered 'cheap': we can push it inside a lambda,
because sharing will make sure it is only evaluated once.
Note [exprIsCheap and exprIsHNF]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note that exprIsHNF does not imply exprIsCheap. Eg
let x = fac 20 in Just x
This responds True to exprIsHNF (you can discard a seq), but
False to exprIsCheap.
-}
exprIsCheap :: CoreExpr -> Bool
exprIsCheap = exprIsCheap' isCheapApp
exprIsExpandable :: CoreExpr -> Bool
exprIsExpandable = exprIsCheap' isExpandableApp -- See Note [CONLIKE pragma] in BasicTypes
exprIsCheap' :: CheapAppFun -> CoreExpr -> Bool
exprIsCheap' _ (Lit _) = True
exprIsCheap' _ (Type _) = True
exprIsCheap' _ (Coercion _) = True
exprIsCheap' _ (Var _) = True
exprIsCheap' good_app (Cast e _) = exprIsCheap' good_app e
exprIsCheap' good_app (Lam x e) = isRuntimeVar x
|| exprIsCheap' good_app e
exprIsCheap' good_app (Case e _ _ alts) = exprIsCheap' good_app e &&
and [exprIsCheap' good_app rhs | (_,_,rhs) <- alts]
-- Experimentally, treat (case x of ...) as cheap
-- (and case __coerce x etc.)
-- This improves arities of overloaded functions where
-- there is only dictionary selection (no construction) involved
exprIsCheap' good_app (Tick t e)
| tickishCounts t = False
| otherwise = exprIsCheap' good_app e
-- never duplicate counting ticks. If we get this wrong, then
-- HPC's entry counts will be off (check test in
-- libraries/hpc/tests/raytrace)
exprIsCheap' good_app (Let (NonRec _ b) e)
= exprIsCheap' good_app b && exprIsCheap' good_app e
exprIsCheap' good_app (Let (Rec prs) e)
= all (exprIsCheap' good_app . snd) prs && exprIsCheap' good_app e
exprIsCheap' good_app other_expr -- Applications and variables
= go other_expr []
where
-- Accumulate value arguments, then decide
go (Cast e _) val_args = go e val_args
go (App f a) val_args | isRuntimeArg a = go f (a:val_args)
| otherwise = go f val_args
go (Var _) [] = True
-- Just a type application of a variable
-- (f t1 t2 t3) counts as WHNF
-- This case is probably handeld by the good_app case
-- below, which should have a case for n=0, but putting
-- it here too is belt and braces; and it's such a common
-- case that checking for null directly seems like a
-- good plan
go (Var f) args
| good_app f (length args) -- Typically holds of data constructor applications
= go_pap args -- E.g. good_app = isCheapApp below
| otherwise
= case idDetails f of
RecSelId {} -> go_sel args
ClassOpId {} -> go_sel args
PrimOpId op -> go_primop op args
_ | isBottomingId f -> True
| otherwise -> False
-- Application of a function which
-- always gives bottom; we treat this as cheap
-- because it certainly doesn't need to be shared!
go (Tick t e) args
| not (tickishCounts t) -- don't duplicate counting ticks, see above
= go e args
go _ _ = False
--------------
go_pap args = all (exprIsCheap' good_app) args
-- Used to be "all exprIsTrivial args" due to concerns about
-- duplicating nested constructor applications, but see #4978.
-- The principle here is that
-- let x = a +# b in c *# x
-- should behave equivalently to
-- c *# (a +# b)
-- Since lets with cheap RHSs are accepted,
-- so should paps with cheap arguments
--------------
go_primop op args = primOpIsCheap op && all (exprIsCheap' good_app) args
-- In principle we should worry about primops
-- that return a type variable, since the result
-- might be applied to something, but I'm not going
-- to bother to check the number of args
--------------
go_sel [arg] = exprIsCheap' good_app arg -- I'm experimenting with making record selection
go_sel _ = False -- look cheap, so we will substitute it inside a
-- lambda. Particularly for dictionary field selection.
-- BUT: Take care with (sel d x)! The (sel d) might be cheap, but
-- there's no guarantee that (sel d x) will be too. Hence (n_val_args == 1)
-------------------------------------
type CheapAppFun = Id -> Int -> Bool
-- Is an application of this function to n *value* args
-- always cheap, assuming the arguments are cheap?
-- Mainly true of partial applications, data constructors,
-- and of course true if the number of args is zero
isCheapApp :: CheapAppFun
isCheapApp fn n_val_args
= isDataConWorkId fn
|| n_val_args == 0
|| n_val_args < idArity fn
isExpandableApp :: CheapAppFun
isExpandableApp fn n_val_args
= isConLikeId fn
|| n_val_args < idArity fn
|| go n_val_args (idType fn)
where
-- See if all the arguments are PredTys (implicit params or classes)
-- If so we'll regard it as expandable; see Note [Expandable overloadings]
-- This incidentally picks up the (n_val_args = 0) case
go 0 _ = True
go n_val_args ty
| Just (bndr, ty) <- splitPiTy_maybe ty
= caseBinder bndr
(\_tv -> go n_val_args ty)
(\bndr_ty -> isPredTy bndr_ty && go (n_val_args-1) ty)
| otherwise
= False
{-
Note [Expandable overloadings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose the user wrote this
{-# RULE forall x. foo (negate x) = h x #-}
f x = ....(foo (negate x))....
He'd expect the rule to fire. But since negate is overloaded, we might
get this:
f = \d -> let n = negate d in \x -> ...foo (n x)...
So we treat the application of a function (negate in this case) to a
*dictionary* as expandable. In effect, every function is CONLIKE when
it's applied only to dictionaries.
************************************************************************
* *
exprOkForSpeculation
* *
************************************************************************
-}
-----------------------------
-- | 'exprOkForSpeculation' returns True of an expression that is:
--
-- * Safe to evaluate even if normal order eval might not
-- evaluate the expression at all, or
--
-- * Safe /not/ to evaluate even if normal order would do so
--
-- It is usually called on arguments of unlifted type, but not always
-- In particular, Simplify.rebuildCase calls it on lifted types
-- when a 'case' is a plain 'seq'. See the example in
-- Note [exprOkForSpeculation: case expressions] below
--
-- Precisely, it returns @True@ iff:
-- a) The expression guarantees to terminate,
-- b) soon,
-- c) without causing a write side effect (e.g. writing a mutable variable)
-- d) without throwing a Haskell exception
-- e) without risking an unchecked runtime exception (array out of bounds,
-- divide by zero)
--
-- For @exprOkForSideEffects@ the list is the same, but omitting (e).
--
-- Note that
-- exprIsHNF implies exprOkForSpeculation
-- exprOkForSpeculation implies exprOkForSideEffects
--
-- See Note [PrimOp can_fail and has_side_effects] in PrimOp
-- and Note [Implementation: how can_fail/has_side_effects affect transformations]
--
-- As an example of the considerations in this test, consider:
--
-- > let x = case y# +# 1# of { r# -> I# r# }
-- > in E
--
-- being translated to:
--
-- > case y# +# 1# of { r# ->
-- > let x = I# r#
-- > in E
-- > }
--
-- We can only do this if the @y + 1@ is ok for speculation: it has no
-- side effects, and can't diverge or raise an exception.
exprOkForSpeculation, exprOkForSideEffects :: Expr b -> Bool
exprOkForSpeculation = expr_ok primOpOkForSpeculation
exprOkForSideEffects = expr_ok primOpOkForSideEffects
-- Polymorphic in binder type
-- There is one call at a non-Id binder type, in SetLevels
expr_ok :: (PrimOp -> Bool) -> Expr b -> Bool
expr_ok _ (Lit _) = True
expr_ok _ (Type _) = True
expr_ok _ (Coercion _) = True
expr_ok primop_ok (Var v) = app_ok primop_ok v []
expr_ok primop_ok (Cast e _) = expr_ok primop_ok e
-- Tick annotations that *tick* cannot be speculated, because these
-- are meant to identify whether or not (and how often) the particular
-- source expression was evaluated at runtime.
expr_ok primop_ok (Tick tickish e)
| tickishCounts tickish = False
| otherwise = expr_ok primop_ok e
expr_ok primop_ok (Case e _ _ alts)
= expr_ok primop_ok e -- Note [exprOkForSpeculation: case expressions]
&& all (\(_,_,rhs) -> expr_ok primop_ok rhs) alts
&& altsAreExhaustive alts -- Note [Exhaustive alts]
expr_ok primop_ok other_expr
= case collectArgs other_expr of
(expr, args) | Var f <- stripTicksTopE (not . tickishCounts) expr
-> app_ok primop_ok f args
_ -> False
-----------------------------
app_ok :: (PrimOp -> Bool) -> Id -> [Expr b] -> Bool
app_ok primop_ok fun args
= case idDetails fun of
DFunId new_type -> not new_type
-- DFuns terminate, unless the dict is implemented
-- with a newtype in which case they may not
DataConWorkId {} -> True
-- The strictness of the constructor has already
-- been expressed by its "wrapper", so we don't need
-- to take the arguments into account
PrimOpId op
| isDivOp op -- Special case for dividing operations that fail
, [arg1, Lit lit] <- args -- only if the divisor is zero
-> not (isZeroLit lit) && expr_ok primop_ok arg1
-- Often there is a literal divisor, and this
-- can get rid of a thunk in an inner looop
| DataToTagOp <- op -- See Note [dataToTag speculation]
-> True
| otherwise
-> primop_ok op -- A bit conservative: we don't really need
&& all (expr_ok primop_ok) args -- to care about lazy arguments, but this is easy
_other -> isUnliftedType (idType fun) -- c.f. the Var case of exprIsHNF
|| idArity fun > n_val_args -- Partial apps
|| (n_val_args == 0 &&
isEvaldUnfolding (idUnfolding fun)) -- Let-bound values
where
n_val_args = valArgCount args
-----------------------------
altsAreExhaustive :: [Alt b] -> Bool
-- True <=> the case alternatives are definiely exhaustive
-- False <=> they may or may not be
altsAreExhaustive []
= False -- Should not happen
altsAreExhaustive ((con1,_,_) : alts)
= case con1 of
DEFAULT -> True
LitAlt {} -> False
DataAlt c -> 1 + length alts == tyConFamilySize (dataConTyCon c)
-- It is possible to have an exhaustive case that does not
-- enumerate all constructors, notably in a GADT match, but
-- we behave conservatively here -- I don't think it's important
-- enough to deserve special treatment
-- | True of dyadic operators that can fail only if the second arg is zero!
isDivOp :: PrimOp -> Bool
-- This function probably belongs in PrimOp, or even in
-- an automagically generated file.. but it's such a
-- special case I thought I'd leave it here for now.
isDivOp IntQuotOp = True
isDivOp IntRemOp = True
isDivOp WordQuotOp = True
isDivOp WordRemOp = True
isDivOp FloatDivOp = True
isDivOp DoubleDivOp = True
isDivOp _ = False
{-
Note [exprOkForSpeculation: case expressions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's always sound for exprOkForSpeculation to return False, and we
don't want it to take too long, so it bales out on complicated-looking
terms. Notably lets, which can be stacked very deeply; and in any
case the argument of exprOkForSpeculation is usually in a strict context,
so any lets will have been floated away.
However, we keep going on case-expressions. An example like this one
showed up in DPH code (Trac #3717):
foo :: Int -> Int
foo 0 = 0
foo n = (if n < 5 then 1 else 2) `seq` foo (n-1)
If exprOkForSpeculation doesn't look through case expressions, you get this:
T.$wfoo =
\ (ww :: GHC.Prim.Int#) ->
case ww of ds {
__DEFAULT -> case (case <# ds 5 of _ {
GHC.Types.False -> lvl1;
GHC.Types.True -> lvl})
of _ { __DEFAULT ->
T.$wfoo (GHC.Prim.-# ds_XkE 1) };
0 -> 0
}
The inner case is redundant, and should be nuked.
Note [Exhaustive alts]
~~~~~~~~~~~~~~~~~~~~~~
We might have something like
case x of {
A -> ...
_ -> ...(case x of { B -> ...; C -> ... })...
Here, the inner case is fine, because the A alternative
can't happen, but it's not ok to float the inner case outside
the outer one (even if we know x is evaluated outside), because
then it would be non-exhaustive. See Trac #5453.
Similarly, this is a valid program (albeit a slightly dodgy one)
let v = case x of { B -> ...; C -> ... }
in case x of
A -> ...
_ -> ...v...v....
But we don't want to speculate the v binding.
One could try to be clever, but the easy fix is simpy to regard
a non-exhaustive case as *not* okForSpeculation.
Note [dataToTag speculation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is this OK?
f x = let v::Int# = dataToTag# x
in ...
We say "yes", even though 'x' may not be evaluated. Reasons
* dataToTag#'s strictness means that its argument often will be
evaluated, but FloatOut makes that temporarily untrue
case x of y -> let v = dataToTag# y in ...
-->
case x of y -> let v = dataToTag# x in ...
Note that we look at 'x' instead of 'y' (this is to improve
floating in FloatOut). So Lint complains.
Moreover, it really *might* improve floating to let the
v-binding float out
* CorePrep makes sure dataToTag#'s argument is evaluated, just
before code gen. Until then, it's not guaranteed
************************************************************************
* *
exprIsHNF, exprIsConLike
* *
************************************************************************
-}
-- Note [exprIsHNF] See also Note [exprIsCheap and exprIsHNF]
-- ~~~~~~~~~~~~~~~~
-- | exprIsHNF returns true for expressions that are certainly /already/
-- evaluated to /head/ normal form. This is used to decide whether it's ok
-- to change:
--
-- > case x of _ -> e
--
-- into:
--
-- > e
--
-- and to decide whether it's safe to discard a 'seq'.
--
-- So, it does /not/ treat variables as evaluated, unless they say they are.
-- However, it /does/ treat partial applications and constructor applications
-- as values, even if their arguments are non-trivial, provided the argument
-- type is lifted. For example, both of these are values:
--
-- > (:) (f x) (map f xs)
-- > map (...redex...)
--
-- because 'seq' on such things completes immediately.
--
-- For unlifted argument types, we have to be careful:
--
-- > C (f x :: Int#)
--
-- Suppose @f x@ diverges; then @C (f x)@ is not a value. However this can't
-- happen: see "CoreSyn#let_app_invariant". This invariant states that arguments of
-- unboxed type must be ok-for-speculation (or trivial).
exprIsHNF :: CoreExpr -> Bool -- True => Value-lambda, constructor, PAP
exprIsHNF = exprIsHNFlike isDataConWorkId isEvaldUnfolding
-- | Similar to 'exprIsHNF' but includes CONLIKE functions as well as
-- data constructors. Conlike arguments are considered interesting by the
-- inliner.
exprIsConLike :: CoreExpr -> Bool -- True => lambda, conlike, PAP
exprIsConLike = exprIsHNFlike isConLikeId isConLikeUnfolding
-- | Returns true for values or value-like expressions. These are lambdas,
-- constructors / CONLIKE functions (as determined by the function argument)
-- or PAPs.
--
exprIsHNFlike :: (Var -> Bool) -> (Unfolding -> Bool) -> CoreExpr -> Bool
exprIsHNFlike is_con is_con_unf = is_hnf_like
where
is_hnf_like (Var v) -- NB: There are no value args at this point
= is_con v -- Catches nullary constructors,
-- so that [] and () are values, for example
|| idArity v > 0 -- Catches (e.g.) primops that don't have unfoldings
|| is_con_unf (idUnfolding v)
-- Check the thing's unfolding; it might be bound to a value
-- We don't look through loop breakers here, which is a bit conservative
-- but otherwise I worry that if an Id's unfolding is just itself,
-- we could get an infinite loop
is_hnf_like (Lit _) = True
is_hnf_like (Type _) = True -- Types are honorary Values;
-- we don't mind copying them
is_hnf_like (Coercion _) = True -- Same for coercions
is_hnf_like (Lam b e) = isRuntimeVar b || is_hnf_like e
is_hnf_like (Tick tickish e) = not (tickishCounts tickish)
&& is_hnf_like e
-- See Note [exprIsHNF Tick]
is_hnf_like (Cast e _) = is_hnf_like e
is_hnf_like (App e a)
| isValArg a = app_is_value e 1
| otherwise = is_hnf_like e
is_hnf_like (Let _ e) = is_hnf_like e -- Lazy let(rec)s don't affect us
is_hnf_like _ = False
-- There is at least one value argument
-- 'n' is number of value args to which the expression is applied
app_is_value :: CoreExpr -> Int -> Bool
app_is_value (Var fun) n_val_args
= idArity fun > n_val_args -- Under-applied function
|| is_con fun -- or constructor-like
app_is_value (Tick _ f) nva = app_is_value f nva
app_is_value (Cast f _) nva = app_is_value f nva
app_is_value (App f a) nva
| isValArg a = app_is_value f (nva + 1)
| otherwise = app_is_value f nva
app_is_value _ _ = False
{-
Note [exprIsHNF Tick]
We can discard source annotations on HNFs as long as they aren't
tick-like:
scc c (\x . e) => \x . e
scc c (C x1..xn) => C x1..xn
So we regard these as HNFs. Tick annotations that tick are not
regarded as HNF if the expression they surround is HNF, because the
tick is there to tell us that the expression was evaluated, so we
don't want to discard a seq on it.
-}
{-
************************************************************************
* *
Instantiating data constructors
* *
************************************************************************
These InstPat functions go here to avoid circularity between DataCon and Id
-}
dataConRepInstPat :: [Unique] -> DataCon -> [Type] -> ([TyVar], [Id])
dataConRepFSInstPat :: [FastString] -> [Unique] -> DataCon -> [Type] -> ([TyVar], [Id])
dataConRepInstPat = dataConInstPat (repeat ((fsLit "ipv")))
dataConRepFSInstPat = dataConInstPat
dataConInstPat :: [FastString] -- A long enough list of FSs to use for names
-> [Unique] -- An equally long list of uniques, at least one for each binder
-> DataCon
-> [Type] -- Types to instantiate the universally quantified tyvars
-> ([TyVar], [Id]) -- Return instantiated variables
-- dataConInstPat arg_fun fss us con inst_tys returns a triple
-- (ex_tvs, arg_ids),
--
-- ex_tvs are intended to be used as binders for existential type args
--
-- arg_ids are indended to be used as binders for value arguments,
-- and their types have been instantiated with inst_tys and ex_tys
-- The arg_ids include both evidence and
-- programmer-specified arguments (both after rep-ing)
--
-- Example.
-- The following constructor T1
--
-- data T a where
-- T1 :: forall b. Int -> b -> T(a,b)
-- ...
--
-- has representation type
-- forall a. forall a1. forall b. (a ~ (a1,b)) =>
-- Int -> b -> T a
--
-- dataConInstPat fss us T1 (a1',b') will return
--
-- ([a1'', b''], [c :: (a1', b')~(a1'', b''), x :: Int, y :: b''])
--
-- where the double-primed variables are created with the FastStrings and
-- Uniques given as fss and us
dataConInstPat fss uniqs con inst_tys
= ASSERT( univ_tvs `equalLength` inst_tys )
(ex_bndrs, arg_ids)
where
univ_tvs = dataConUnivTyVars con
ex_tvs = dataConExTyVars con
arg_tys = dataConRepArgTys con
arg_strs = dataConRepStrictness con -- 1-1 with arg_tys
n_ex = length ex_tvs
-- split the Uniques and FastStrings
(ex_uniqs, id_uniqs) = splitAt n_ex uniqs
(ex_fss, id_fss) = splitAt n_ex fss
-- Make the instantiating substitution for universals
univ_subst = zipTvSubst univ_tvs inst_tys
-- Make existential type variables, applyingn and extending the substitution
(full_subst, ex_bndrs) = mapAccumL mk_ex_var univ_subst
(zip3 ex_tvs ex_fss ex_uniqs)
mk_ex_var :: TCvSubst -> (TyVar, FastString, Unique) -> (TCvSubst, TyVar)
mk_ex_var subst (tv, fs, uniq) = (Type.extendTCvSubst subst tv
(mkTyVarTy new_tv)
, new_tv)
where
new_tv = mkTyVar (mkSysTvName uniq fs) kind
kind = Type.substTyUnchecked subst (tyVarKind tv)
-- Make value vars, instantiating types
arg_ids = zipWith4 mk_id_var id_uniqs id_fss arg_tys arg_strs
mk_id_var uniq fs ty str
= mkLocalIdOrCoVarWithInfo name (Type.substTyUnchecked full_subst ty) info
where
name = mkInternalName uniq (mkVarOccFS fs) noSrcSpan
info | isMarkedStrict str = vanillaIdInfo `setUnfoldingInfo` evaldUnfolding
| otherwise = vanillaIdInfo
-- See Note [Mark evaluated arguments]
{-
Note [Mark evaluated arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When pattern matching on a constructor with strict fields, the binder
can have an 'evaldUnfolding'. Moreover, it *should* have one, so that
when loading an interface file unfolding like:
data T = MkT !Int
f x = case x of { MkT y -> let v::Int# = case y of I# n -> n+1
in ... }
we don't want Lint to complain. The 'y' is evaluated, so the
case in the RHS of the binding for 'v' is fine. But only if we
*know* that 'y' is evaluated.
c.f. add_evals in Simplify.simplAlt
************************************************************************
* *
Equality
* *
************************************************************************
-}
-- | A cheap equality test which bales out fast!
-- If it returns @True@ the arguments are definitely equal,
-- otherwise, they may or may not be equal.
--
-- See also 'exprIsBig'
cheapEqExpr :: Expr b -> Expr b -> Bool
cheapEqExpr = cheapEqExpr' (const False)
-- | Cheap expression equality test, can ignore ticks by type.
cheapEqExpr' :: (Tickish Id -> Bool) -> Expr b -> Expr b -> Bool
cheapEqExpr' ignoreTick = go_s
where go_s = go `on` stripTicksTopE ignoreTick
go (Var v1) (Var v2) = v1 == v2
go (Lit lit1) (Lit lit2) = lit1 == lit2
go (Type t1) (Type t2) = t1 `eqType` t2
go (Coercion c1) (Coercion c2) = c1 `eqCoercion` c2
go (App f1 a1) (App f2 a2)
= f1 `go_s` f2 && a1 `go_s` a2
go (Cast e1 t1) (Cast e2 t2)
= e1 `go_s` e2 && t1 `eqCoercion` t2
go (Tick t1 e1) (Tick t2 e2)
= t1 == t2 && e1 `go_s` e2
go _ _ = False
{-# INLINE go #-}
{-# INLINE cheapEqExpr' #-}
exprIsBig :: Expr b -> Bool
-- ^ Returns @True@ of expressions that are too big to be compared by 'cheapEqExpr'
exprIsBig (Lit _) = False
exprIsBig (Var _) = False
exprIsBig (Type _) = False
exprIsBig (Coercion _) = False
exprIsBig (Lam _ e) = exprIsBig e
exprIsBig (App f a) = exprIsBig f || exprIsBig a
exprIsBig (Cast e _) = exprIsBig e -- Hopefully coercions are not too big!
exprIsBig (Tick _ e) = exprIsBig e
exprIsBig _ = True
eqExpr :: InScopeSet -> CoreExpr -> CoreExpr -> Bool
-- Compares for equality, modulo alpha
eqExpr in_scope e1 e2
= go (mkRnEnv2 in_scope) e1 e2
where
go env (Var v1) (Var v2)
| rnOccL env v1 == rnOccR env v2
= True
go _ (Lit lit1) (Lit lit2) = lit1 == lit2
go env (Type t1) (Type t2) = eqTypeX env t1 t2
go env (Coercion co1) (Coercion co2) = eqCoercionX env co1 co2
go env (Cast e1 co1) (Cast e2 co2) = eqCoercionX env co1 co2 && go env e1 e2
go env (App f1 a1) (App f2 a2) = go env f1 f2 && go env a1 a2
go env (Tick n1 e1) (Tick n2 e2) = eqTickish env n1 n2 && go env e1 e2
go env (Lam b1 e1) (Lam b2 e2)
= eqTypeX env (varType b1) (varType b2) -- False for Id/TyVar combination
&& go (rnBndr2 env b1 b2) e1 e2
go env (Let (NonRec v1 r1) e1) (Let (NonRec v2 r2) e2)
= go env r1 r2 -- No need to check binder types, since RHSs match
&& go (rnBndr2 env v1 v2) e1 e2
go env (Let (Rec ps1) e1) (Let (Rec ps2) e2)
= length ps1 == length ps2
&& all2 (go env') rs1 rs2 && go env' e1 e2
where
(bs1,rs1) = unzip ps1
(bs2,rs2) = unzip ps2
env' = rnBndrs2 env bs1 bs2
go env (Case e1 b1 t1 a1) (Case e2 b2 t2 a2)
| null a1 -- See Note [Empty case alternatives] in TrieMap
= null a2 && go env e1 e2 && eqTypeX env t1 t2
| otherwise
= go env e1 e2 && all2 (go_alt (rnBndr2 env b1 b2)) a1 a2
go _ _ _ = False
-----------
go_alt env (c1, bs1, e1) (c2, bs2, e2)
= c1 == c2 && go (rnBndrs2 env bs1 bs2) e1 e2
eqTickish :: RnEnv2 -> Tickish Id -> Tickish Id -> Bool
eqTickish env (Breakpoint lid lids) (Breakpoint rid rids)
= lid == rid && map (rnOccL env) lids == map (rnOccR env) rids
eqTickish _ l r = l == r
-- | Finds differences between core expressions, modulo alpha and
-- renaming. Setting @top@ means that the @IdInfo@ of bindings will be
-- checked for differences as well.
diffExpr :: Bool -> RnEnv2 -> CoreExpr -> CoreExpr -> [SDoc]
diffExpr _ env (Var v1) (Var v2) | rnOccL env v1 == rnOccR env v2 = []
diffExpr _ _ (Lit lit1) (Lit lit2) | lit1 == lit2 = []
diffExpr _ env (Type t1) (Type t2) | eqTypeX env t1 t2 = []
diffExpr _ env (Coercion co1) (Coercion co2)
| eqCoercionX env co1 co2 = []
diffExpr top env (Cast e1 co1) (Cast e2 co2)
| eqCoercionX env co1 co2 = diffExpr top env e1 e2
diffExpr top env (Tick n1 e1) e2
| not (tickishIsCode n1) = diffExpr top env e1 e2
diffExpr top env e1 (Tick n2 e2)
| not (tickishIsCode n2) = diffExpr top env e1 e2
diffExpr top env (Tick n1 e1) (Tick n2 e2)
| eqTickish env n1 n2 = diffExpr top env e1 e2
-- The error message of failed pattern matches will contain
-- generated names, which are allowed to differ.
diffExpr _ _ (App (App (Var absent) _) _)
(App (App (Var absent2) _) _)
| isBottomingId absent && isBottomingId absent2 = []
diffExpr top env (App f1 a1) (App f2 a2)
= diffExpr top env f1 f2 ++ diffExpr top env a1 a2
diffExpr top env (Lam b1 e1) (Lam b2 e2)
| eqTypeX env (varType b1) (varType b2) -- False for Id/TyVar combination
= diffExpr top (rnBndr2 env b1 b2) e1 e2
diffExpr top env (Let bs1 e1) (Let bs2 e2)
= let (ds, env') = diffBinds top env (flattenBinds [bs1]) (flattenBinds [bs2])
in ds ++ diffExpr top env' e1 e2
diffExpr top env (Case e1 b1 t1 a1) (Case e2 b2 t2 a2)
| length a1 == length a2 && not (null a1) || eqTypeX env t1 t2
-- See Note [Empty case alternatives] in TrieMap
= diffExpr top env e1 e2 ++ concat (zipWith diffAlt a1 a2)
where env' = rnBndr2 env b1 b2
diffAlt (c1, bs1, e1) (c2, bs2, e2)
| c1 /= c2 = [text "alt-cons " <> ppr c1 <> text " /= " <> ppr c2]
| otherwise = diffExpr top (rnBndrs2 env' bs1 bs2) e1 e2
diffExpr _ _ e1 e2
= [fsep [ppr e1, text "/=", ppr e2]]
-- | Finds differences between core bindings, see @diffExpr@.
--
-- The main problem here is that while we expect the binds to have the
-- same order in both lists, this is not guaranteed. To do this
-- properly we'd either have to do some sort of unification or check
-- all possible mappings, which would be seriously expensive. So
-- instead we simply match single bindings as far as we can. This
-- leaves us just with mutually recursive and/or mismatching bindings,
-- which we then specuatively match by ordering them. It's by no means
-- perfect, but gets the job done well enough.
diffBinds :: Bool -> RnEnv2 -> [(Var, CoreExpr)] -> [(Var, CoreExpr)]
-> ([SDoc], RnEnv2)
diffBinds top env binds1 = go (length binds1) env binds1
where go _ env [] []
= ([], env)
go fuel env binds1 binds2
-- No binds left to compare? Bail out early.
| null binds1 || null binds2
= (warn env binds1 binds2, env)
-- Iterated over all binds without finding a match? Then
-- try speculatively matching binders by order.
| fuel == 0
= if not $ env `inRnEnvL` fst (head binds1)
then let env' = uncurry (rnBndrs2 env) $ unzip $
zip (sort $ map fst binds1) (sort $ map fst binds2)
in go (length binds1) env' binds1 binds2
-- If we have already tried that, give up
else (warn env binds1 binds2, env)
go fuel env ((bndr1,expr1):binds1) binds2
| let matchExpr (bndr,expr) =
(not top || null (diffIdInfo env bndr bndr1)) &&
null (diffExpr top (rnBndr2 env bndr1 bndr) expr1 expr)
, (binds2l, (bndr2,_):binds2r) <- break matchExpr binds2
= go (length binds1) (rnBndr2 env bndr1 bndr2)
binds1 (binds2l ++ binds2r)
| otherwise -- No match, so push back (FIXME O(n^2))
= go (fuel-1) env (binds1++[(bndr1,expr1)]) binds2
go _ _ _ _ = panic "diffBinds: impossible" -- GHC isn't smart enough
-- We have tried everything, but couldn't find a good match. So
-- now we just return the comparison results when we pair up
-- the binds in a pseudo-random order.
warn env binds1 binds2 =
concatMap (uncurry (diffBind env)) (zip binds1' binds2') ++
unmatched "unmatched left-hand:" (drop l binds1') ++
unmatched "unmatched right-hand:" (drop l binds2')
where binds1' = sortBy (comparing fst) binds1
binds2' = sortBy (comparing fst) binds2
l = min (length binds1') (length binds2')
unmatched _ [] = []
unmatched txt bs = [text txt $$ ppr (Rec bs)]
diffBind env (bndr1,expr1) (bndr2,expr2)
| ds@(_:_) <- diffExpr top env expr1 expr2
= locBind "in binding" bndr1 bndr2 ds
| otherwise
= diffIdInfo env bndr1 bndr2
-- | Find differences in @IdInfo@. We will especially check whether
-- the unfoldings match, if present (see @diffUnfold@).
diffIdInfo :: RnEnv2 -> Var -> Var -> [SDoc]
diffIdInfo env bndr1 bndr2
| arityInfo info1 == arityInfo info2
&& cafInfo info1 == cafInfo info2
&& oneShotInfo info1 == oneShotInfo info2
&& inlinePragInfo info1 == inlinePragInfo info2
&& occInfo info1 == occInfo info2
&& demandInfo info1 == demandInfo info2
&& callArityInfo info1 == callArityInfo info2
= locBind "in unfolding of" bndr1 bndr2 $
diffUnfold env (unfoldingInfo info1) (unfoldingInfo info2)
| otherwise
= locBind "in Id info of" bndr1 bndr2
[fsep [pprBndr LetBind bndr1, text "/=", pprBndr LetBind bndr2]]
where info1 = idInfo bndr1; info2 = idInfo bndr2
-- | Find differences in unfoldings. Note that we will not check for
-- differences of @IdInfo@ in unfoldings, as this is generally
-- redundant, and can lead to an exponential blow-up in complexity.
diffUnfold :: RnEnv2 -> Unfolding -> Unfolding -> [SDoc]
diffUnfold _ NoUnfolding NoUnfolding = []
diffUnfold _ (OtherCon cs1) (OtherCon cs2) | cs1 == cs2 = []
diffUnfold env (DFunUnfolding bs1 c1 a1)
(DFunUnfolding bs2 c2 a2)
| c1 == c2 && length bs1 == length bs2
= concatMap (uncurry (diffExpr False env')) (zip a1 a2)
where env' = rnBndrs2 env bs1 bs2
diffUnfold env (CoreUnfolding t1 _ _ v1 cl1 wf1 x1 g1)
(CoreUnfolding t2 _ _ v2 cl2 wf2 x2 g2)
| v1 == v2 && cl1 == cl2
&& wf1 == wf2 && x1 == x2 && g1 == g2
= diffExpr False env t1 t2
diffUnfold _ uf1 uf2
= [fsep [ppr uf1, text "/=", ppr uf2]]
-- | Add location information to diff messages
locBind :: String -> Var -> Var -> [SDoc] -> [SDoc]
locBind loc b1 b2 diffs = map addLoc diffs
where addLoc d = d $$ nest 2 (parens (text loc <+> bindLoc))
bindLoc | b1 == b2 = ppr b1
| otherwise = ppr b1 <> char '/' <> ppr b2
{-
************************************************************************
* *
Eta reduction
* *
************************************************************************
Note [Eta reduction conditions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We try for eta reduction here, but *only* if we get all the way to an
trivial expression. We don't want to remove extra lambdas unless we
are going to avoid allocating this thing altogether.
There are some particularly delicate points here:
* We want to eta-reduce if doing so leaves a trivial expression,
*including* a cast. For example
\x. f |> co --> f |> co
(provided co doesn't mention x)
* Eta reduction is not valid in general:
\x. bot /= bot
This matters, partly for old-fashioned correctness reasons but,
worse, getting it wrong can yield a seg fault. Consider
f = \x.f x
h y = case (case y of { True -> f `seq` True; False -> False }) of
True -> ...; False -> ...
If we (unsoundly) eta-reduce f to get f=f, the strictness analyser
says f=bottom, and replaces the (f `seq` True) with just
(f `cast` unsafe-co). BUT, as thing stand, 'f' got arity 1, and it
*keeps* arity 1 (perhaps also wrongly). So CorePrep eta-expands
the definition again, so that it does not termninate after all.
Result: seg-fault because the boolean case actually gets a function value.
See Trac #1947.
So it's important to do the right thing.
* Note [Arity care]: we need to be careful if we just look at f's
arity. Currently (Dec07), f's arity is visible in its own RHS (see
Note [Arity robustness] in SimplEnv) so we must *not* trust the
arity when checking that 'f' is a value. Otherwise we will
eta-reduce
f = \x. f x
to
f = f
Which might change a terminating program (think (f `seq` e)) to a
non-terminating one. So we check for being a loop breaker first.
However for GlobalIds we can look at the arity; and for primops we
must, since they have no unfolding.
* Regardless of whether 'f' is a value, we always want to
reduce (/\a -> f a) to f
This came up in a RULE: foldr (build (/\a -> g a))
did not match foldr (build (/\b -> ...something complex...))
The type checker can insert these eta-expanded versions,
with both type and dictionary lambdas; hence the slightly
ad-hoc isDictId
* Never *reduce* arity. For example
f = \xy. g x y
Then if h has arity 1 we don't want to eta-reduce because then
f's arity would decrease, and that is bad
These delicacies are why we don't use exprIsTrivial and exprIsHNF here.
Alas.
Note [Eta reduction with casted arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
(\(x:t3). f (x |> g)) :: t3 -> t2
where
f :: t1 -> t2
g :: t3 ~ t1
This should be eta-reduced to
f |> (sym g -> t2)
So we need to accumulate a coercion, pushing it inward (past
variable arguments only) thus:
f (x |> co_arg) |> co --> (f |> (sym co_arg -> co)) x
f (x:t) |> co --> (f |> (t -> co)) x
f @ a |> co --> (f |> (forall a.co)) @ a
f @ (g:t1~t2) |> co --> (f |> (t1~t2 => co)) @ (g:t1~t2)
These are the equations for ok_arg.
It's true that we could also hope to eta reduce these:
(\xy. (f x |> g) y)
(\xy. (f x y) |> g)
But the simplifier pushes those casts outwards, so we don't
need to address that here.
-}
tryEtaReduce :: [Var] -> CoreExpr -> Maybe CoreExpr
tryEtaReduce bndrs body
= go (reverse bndrs) body (mkRepReflCo (exprType body))
where
incoming_arity = count isId bndrs
go :: [Var] -- Binders, innermost first, types [a3,a2,a1]
-> CoreExpr -- Of type tr
-> Coercion -- Of type tr ~ ts
-> Maybe CoreExpr -- Of type a1 -> a2 -> a3 -> ts
-- See Note [Eta reduction with casted arguments]
-- for why we have an accumulating coercion
go [] fun co
| ok_fun fun
, let used_vars = exprFreeVars fun `unionVarSet` tyCoVarsOfCo co
, not (any (`elemVarSet` used_vars) bndrs)
= Just (mkCast fun co) -- Check for any of the binders free in the result
-- including the accumulated coercion
go bs (Tick t e) co
| tickishFloatable t
= fmap (Tick t) $ go bs e co
-- Float app ticks: \x -> Tick t (e x) ==> Tick t e
go (b : bs) (App fun arg) co
| Just (co', ticks) <- ok_arg b arg co
= fmap (flip (foldr mkTick) ticks) $ go bs fun co'
-- Float arg ticks: \x -> e (Tick t x) ==> Tick t e
go _ _ _ = Nothing -- Failure!
---------------
-- Note [Eta reduction conditions]
ok_fun (App fun (Type {})) = ok_fun fun
ok_fun (Cast fun _) = ok_fun fun
ok_fun (Tick _ expr) = ok_fun expr
ok_fun (Var fun_id) = ok_fun_id fun_id || all ok_lam bndrs
ok_fun _fun = False
---------------
ok_fun_id fun = fun_arity fun >= incoming_arity
---------------
fun_arity fun -- See Note [Arity care]
| isLocalId fun
, isStrongLoopBreaker (idOccInfo fun) = 0
| arity > 0 = arity
| isEvaldUnfolding (idUnfolding fun) = 1
-- See Note [Eta reduction of an eval'd function]
| otherwise = 0
where
arity = idArity fun
---------------
ok_lam v = isTyVar v || isEvVar v
---------------
ok_arg :: Var -- Of type bndr_t
-> CoreExpr -- Of type arg_t
-> Coercion -- Of kind (t1~t2)
-> Maybe (Coercion -- Of type (arg_t -> t1 ~ bndr_t -> t2)
-- (and similarly for tyvars, coercion args)
, [Tickish Var])
-- See Note [Eta reduction with casted arguments]
ok_arg bndr (Type ty) co
| Just tv <- getTyVar_maybe ty
, bndr == tv = Just (mkHomoForAllCos [tv] co, [])
ok_arg bndr (Var v) co
| bndr == v = let reflCo = mkRepReflCo (idType bndr)
in Just (mkFunCo Representational reflCo co, [])
ok_arg bndr (Cast e co_arg) co
| (ticks, Var v) <- stripTicksTop tickishFloatable e
, bndr == v
= Just (mkFunCo Representational (mkSymCo co_arg) co, ticks)
-- The simplifier combines multiple casts into one,
-- so we can have a simple-minded pattern match here
ok_arg bndr (Tick t arg) co
| tickishFloatable t, Just (co', ticks) <- ok_arg bndr arg co
= Just (co', t:ticks)
ok_arg _ _ _ = Nothing
{-
Note [Eta reduction of an eval'd function]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In Haskell is is not true that f = \x. f x
because f might be bottom, and 'seq' can distinguish them.
But it *is* true that f = f `seq` \x. f x
and we'd like to simplify the latter to the former. This amounts
to the rule that
* when there is just *one* value argument,
* f is not bottom
we can eta-reduce \x. f x ===> f
This turned up in Trac #7542.
************************************************************************
* *
\subsection{Determining non-updatable right-hand-sides}
* *
************************************************************************
Top-level constructor applications can usually be allocated
statically, but they can't if the constructor, or any of the
arguments, come from another DLL (because we can't refer to static
labels in other DLLs).
If this happens we simply make the RHS into an updatable thunk,
and 'execute' it rather than allocating it statically.
-}
-- | This function is called only on *top-level* right-hand sides.
-- Returns @True@ if the RHS can be allocated statically in the output,
-- with no thunks involved at all.
rhsIsStatic :: Platform
-> (Name -> Bool) -- Which names are dynamic
-> (Integer -> CoreExpr) -- Desugaring for integer literals (disgusting)
-- C.f. Note [Disgusting computation of CafRefs]
-- in TidyPgm
-> CoreExpr -> Bool
-- It's called (i) in TidyPgm.hasCafRefs to decide if the rhs is, or
-- refers to, CAFs; (ii) in CoreToStg to decide whether to put an
-- update flag on it and (iii) in DsExpr to decide how to expand
-- list literals
--
-- The basic idea is that rhsIsStatic returns True only if the RHS is
-- (a) a value lambda
-- (b) a saturated constructor application with static args
--
-- BUT watch out for
-- (i) Any cross-DLL references kill static-ness completely
-- because they must be 'executed' not statically allocated
-- ("DLL" here really only refers to Windows DLLs, on other platforms,
-- this is not necessary)
--
-- (ii) We treat partial applications as redexes, because in fact we
-- make a thunk for them that runs and builds a PAP
-- at run-time. The only appliations that are treated as
-- static are *saturated* applications of constructors.
-- We used to try to be clever with nested structures like this:
-- ys = (:) w ((:) w [])
-- on the grounds that CorePrep will flatten ANF-ise it later.
-- But supporting this special case made the function much more
-- complicated, because the special case only applies if there are no
-- enclosing type lambdas:
-- ys = /\ a -> Foo (Baz ([] a))
-- Here the nested (Baz []) won't float out to top level in CorePrep.
--
-- But in fact, even without -O, nested structures at top level are
-- flattened by the simplifier, so we don't need to be super-clever here.
--
-- Examples
--
-- f = \x::Int. x+7 TRUE
-- p = (True,False) TRUE
--
-- d = (fst p, False) FALSE because there's a redex inside
-- (this particular one doesn't happen but...)
--
-- h = D# (1.0## /## 2.0##) FALSE (redex again)
-- n = /\a. Nil a TRUE
--
-- t = /\a. (:) (case w a of ...) (Nil a) FALSE (redex)
--
--
-- This is a bit like CoreUtils.exprIsHNF, with the following differences:
-- a) scc "foo" (\x -> ...) is updatable (so we catch the right SCC)
--
-- b) (C x xs), where C is a contructor is updatable if the application is
-- dynamic
--
-- c) don't look through unfolding of f in (f x).
rhsIsStatic platform is_dynamic_name cvt_integer rhs = is_static False rhs
where
is_static :: Bool -- True <=> in a constructor argument; must be atomic
-> CoreExpr -> Bool
is_static False (Lam b e) = isRuntimeVar b || is_static False e
is_static in_arg (Tick n e) = not (tickishIsCode n)
&& is_static in_arg e
is_static in_arg (Cast e _) = is_static in_arg e
is_static _ (Coercion {}) = True -- Behaves just like a literal
is_static in_arg (Lit (LitInteger i _)) = is_static in_arg (cvt_integer i)
is_static _ (Lit (MachLabel {})) = False
is_static _ (Lit _) = True
-- A MachLabel (foreign import "&foo") in an argument
-- prevents a constructor application from being static. The
-- reason is that it might give rise to unresolvable symbols
-- in the object file: under Linux, references to "weak"
-- symbols from the data segment give rise to "unresolvable
-- relocation" errors at link time This might be due to a bug
-- in the linker, but we'll work around it here anyway.
-- SDM 24/2/2004
is_static in_arg other_expr = go other_expr 0
where
go (Var f) n_val_args
| (platformOS platform /= OSMinGW32) ||
not (is_dynamic_name (idName f))
= saturated_data_con f n_val_args
|| (in_arg && n_val_args == 0)
-- A naked un-applied variable is *not* deemed a static RHS
-- E.g. f = g
-- Reason: better to update so that the indirection gets shorted
-- out, and the true value will be seen
-- NB: if you change this, you'll break the invariant that THUNK_STATICs
-- are always updatable. If you do so, make sure that non-updatable
-- ones have enough space for their static link field!
go (App f a) n_val_args
| isTypeArg a = go f n_val_args
| not in_arg && is_static True a = go f (n_val_args + 1)
-- The (not in_arg) checks that we aren't in a constructor argument;
-- if we are, we don't allow (value) applications of any sort
--
-- NB. In case you wonder, args are sometimes not atomic. eg.
-- x = D# (1.0## /## 2.0##)
-- can't float because /## can fail.
go (Tick n f) n_val_args = not (tickishIsCode n) && go f n_val_args
go (Cast e _) n_val_args = go e n_val_args
go _ _ = False
saturated_data_con f n_val_args
= case isDataConWorkId_maybe f of
Just dc -> n_val_args == dataConRepArity dc
Nothing -> False
{-
************************************************************************
* *
\subsection{Type utilities}
* *
************************************************************************
-}
-- | True if the type has no non-bottom elements, e.g. when it is an empty
-- datatype, or a GADT with non-satisfiable type parameters, e.g. Int :~: Bool.
-- See Note [Bottoming expressions]
--
-- See Note [No alternatives lint check] for another use of this function.
isEmptyTy :: Type -> Bool
isEmptyTy ty
-- Data types where, given the particular type parameters, no data
-- constructor matches, are empty.
-- This includes data types with no constructors, e.g. Data.Void.Void.
| Just (tc, inst_tys) <- splitTyConApp_maybe ty
, Just dcs <- tyConDataCons_maybe tc
, all (dataConCannotMatch inst_tys) dcs
= True
| otherwise
= False
| nushio3/ghc | compiler/coreSyn/CoreUtils.hs | bsd-3-clause | 88,132 | 0 | 21 | 25,774 | 14,831 | 7,644 | 7,187 | 846 | 14 |
module Main where
import Data.Lens.Common ((^.), (^=))
import Prelude hiding (Either(..))
import System.Console.ANSI
import System.IO
import Console
import Level
import Types
-- operator to add 2 coordinates together
(|+|) :: Coord -> Coord -> Coord
(|+|) (x1, y1) (x2, y2) = (x1 + x2, y1 + y2)
-- receive a character and return our Input data structure,
-- recursing on invalid input
getInput :: IO Input
getInput = do
char <- getChar
case char of
'q' -> return Exit
'w' -> return (Dir Up)
's' -> return (Dir Down)
'a' -> return (Dir Left)
'd' -> return (Dir Right)
_ -> getInput
-- translate a direction to a coordinate so it can be added to
-- the hero's coordinate to move the hero around
dirToCoord :: Direction -> Coord
dirToCoord Up = (0, -1)
dirToCoord Down = (0, 1)
dirToCoord Left = (-1, 0)
dirToCoord Right = (1, 0)
-- add the supplied direction to the hero's position,
-- and set that to be the hero's new position, making
-- sure to limit it between 0 and 80 in either direction
handleDir :: World -> Direction -> IO ()
handleDir w dir
| isWall coord lvl ||
isClosedDoor coord lvl = gameLoop ((^=) posL (w ^. posL) w)
| otherwise = gameLoop ((^=) posL coord w)
where
h = wHero w
lvl = wLevel w
coord = (newX, newY)
newX = hConst heroX
newY = hConst heroY
(heroX, heroY) = hCurrPos h |+| dirToCoord dir
hConst i = max 0 (min i 80)
-- when the user wants to exit we give them a thank you
-- message and then reshow the cursor
handleExit :: IO ()
handleExit = do
clearScreen
setCursorPosition 0 0
showCursor
setSGR [Reset]
putStrLn "Thank you for playing!"
-- draw the hero, process input, and either recur or exit
gameLoop :: World -> IO ()
gameLoop world = do
drawHero world
input <- getInput
case input of
Exit -> handleExit
Dir dir -> handleDir world dir
main :: IO ()
main = do
hSetEcho stdin False
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
hideCursor
setTitle "Thieflike"
clearScreen
let world = genesis { wLevel = level1, wLevels = [level1] }
drawWorld world
gameLoop world
| jamiltron/Thieflike | src/Main.hs | bsd-3-clause | 2,227 | 0 | 12 | 579 | 667 | 346 | 321 | 62 | 6 |
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-}
{-# OPTIONS_GHC -fcontext-stack47 #-}
module Games.Chaos2010.Database.Spells_with_order where
import Games.Chaos2010.Database.Fields
import Database.HaskellDB.DBLayout
type Spells_with_order =
Record
(HCons (LVPair Spell_category (Expr (Maybe String)))
(HCons (LVPair Spell_name (Expr (Maybe String)))
(HCons (LVPair Base_chance (Expr (Maybe Int)))
(HCons (LVPair Alignment (Expr (Maybe Int)))
(HCons (LVPair Description (Expr (Maybe String)))
(HCons (LVPair Section_order (Expr (Maybe Int)))
(HCons (LVPair Alignment_order (Expr (Maybe Int))) HNil)))))))
spells_with_order :: Table Spells_with_order
spells_with_order = baseTable "spells_with_order" | JakeWheat/Chaos-2010 | Games/Chaos2010/Database/Spells_with_order.hs | bsd-3-clause | 826 | 0 | 25 | 192 | 244 | 128 | 116 | 16 | 1 |
module Euler.E2
( fib
, every
)
where
fib :: [Int]
fib = scanl (+) 1 (1:fib)
every :: Int -> [a] -> [a]
every _ [] = []
every n (x:xs) = x : every n (drop (n-1) xs)
| lslah/euler | src/Euler/E2.hs | bsd-3-clause | 184 | 0 | 10 | 59 | 117 | 65 | 52 | 8 | 1 |
module Language.GDL.Unify
( Substitution
, unify
) where
import qualified Data.Map as M
import Language.GDL.Syntax
type Substitution = M.Map Identifier Term
occurs :: Identifier -> Term -> Bool
occurs _ (Atom _) = False
occurs ident (Var identr) = ident == identr
occurs ident (Compound children) = any (occurs ident) children
occurs _ _ = False
extend :: Substitution -> Identifier -> Term -> Maybe Substitution
extend sub ident value = case M.lookup ident sub of
Just struct -> unify sub struct value
Nothing -> extvar value
where extvar (Var identr) = case M.lookup identr sub of
Just struct -> unify sub (Var ident) struct
Nothing -> if ident == identr then Just sub else Just extsub
extvar struct = if occurs ident struct then Nothing else Just extsub
extsub = M.insert ident value sub
unify :: Substitution -> Term -> Term -> Maybe Substitution
unify sub (Atom x) (Atom y)
| x == y = Just sub
| otherwise = Nothing
unify sub (Var ident) right = extend sub ident right
unify sub left (Var ident) = extend sub ident left
unify sub (Compound []) (Compound []) = Just sub
unify sub (Compound (x:xs)) (Compound (y:ys)) = case unify sub x y of
Just sub' -> unify sub' (Compound xs) (Compound ys)
Nothing -> Nothing
unify _ _ _ = Nothing
| ian-ross/ggp | Language/GDL/Unify.hs | bsd-3-clause | 1,333 | 0 | 15 | 318 | 549 | 273 | 276 | 31 | 6 |
module WASH.CGI.AbstractSelector
-- the public interface
-- ( as_rows, as_cols, table_io, getText, selectionGroup, selectionButton, selectionDisplay)
where
import WASH.CGI.BaseCombinators (unsafe_io, once)
import WASH.CGI.CGIInternals (HTMLField, INVALID, ValidationError (..))
import WASH.CGI.CGIMonad hiding (lift)
import WASH.CGI.HTMLWrapper
import WASH.CGI.RawCGIInternal hiding (CGIEnv (..))
import WASH.Utility.JavaScript
import Data.Char (isSpace)
import Data.List ((\\))
import Data.Maybe (isJust, fromMaybe)
-- |abstract table (twodimensional)
data AT =
AT { as_raw :: [[String]]
, as_rows :: Int
, as_cols :: Int
}
instance Show AT where
showsPrec i as = showsPrec i (as_rows as, as_cols as)
instance Read AT where
readsPrec i inp =
[ (AT { as_raw = [], as_rows = r, as_cols = c }, str')
| ((r,c), str') <- readsPrec i inp
]
-- |abstract row
data AR = AR [String]
deriving (Eq, Show)
instance Read AR where
readsPrec i inp =
case dropWhile isSpace inp of
'A':'R':xs ->
[(AR xss, rest) | (xss, rest) <- reads xs]
_ -> []
readList inp =
case dropWhile isSpace inp of
'+':xs ->
[ (ar:ars, xs2)| (ar, xs1) <- reads xs, (ars, xs2) <- readList xs1 ]
'-':xs ->
[ (ars\\[ar], xs2)| (ar, xs1) <- reads xs, (ars, xs2) <- readList xs1 ]
"" ->
[([],[])]
_ -> []
getAR :: AT -> Int -> AR
getAR at r =
AR (getRow (as_raw at) r)
unAR :: AR -> [String]
unAR (AR x) = x
-- |Transform an IO action that produces a table in list form into a CGI action
-- that returns an abstract table.
table_io :: IO [[String]] -> CGI AT
table_io io =
once $
do raw <- unsafe_io io
let r = length raw
c = length (Prelude.head raw)
return (AT { as_raw = raw
, as_rows = r
, as_cols = c
})
-- |Access abstract table by row and column. Produces a test node in the
-- document monad.
getText :: Monad m => AT -> Int -> Int -> WithHTML x m ()
getText as r c =
text (getEntry (as_raw as) r c)
getRow xss r
| 0 <= r && r < length xss = xss !! r
| otherwise = []
getCol xs c
| 0 <= c && c < length xs = xs !! c
| otherwise = ""
getEntry xss r c =
getCol (getRow xss r) c
-- |a selection group is a virtual field that never appears on the screen, but
-- gives rise to a hidden input field!
data SelectionGroup a x =
SelectionGroup { selectionName :: String
, selectionToken :: CGIFieldName
, selectionString :: Maybe String
, selectionValue :: Maybe a
, selectionBound :: Bool
}
validateSelectionGroup rg =
case selectionValue rg of
Nothing | selectionBound rg ->
Left [ValidationError (selectionName rg) (selectionToken rg) (selectionString rg)]
_ ->
Right SelectionGroup { selectionName = selectionName rg
, selectionToken = selectionToken rg
, selectionString = selectionString rg
, selectionValue = selectionValue rg
, selectionBound = selectionBound rg
}
valueSelectionGroup rg =
case selectionValue rg of
Nothing -> error ("SelectionGroup { " ++
"selectionName = " ++ show (selectionName rg) ++ ", " ++
"selectionString = " ++ show (selectionString rg) ++ ", " ++
"selectionBound = " ++ show (selectionBound rg) ++
" }")
Just vl -> vl
-- |Create a selection group for a table. Selects one row.
selectionGroup :: (CGIMonad cgi) => WithHTML y cgi (SelectionGroup AR INVALID)
selectionGroup =
do token <- lift nextName
let fieldName = show token
info <- lift getInfo
lift $ addField fieldName False
let bds = bindings info
maybeString = bds >>= assocParm fieldName
-- experimental
isBound = fromMaybe False (do "UNSET" <- maybeString
return True)
maybeVal = maybeString >>= (g . reads)
g ((a,""):_) = Just a
g _ = Nothing
input (do attr "type" "hidden"
attr "name" fieldName
attr "value" "UNSET")
return $
SelectionGroup { selectionName = fieldName
, selectionToken = token
, selectionString = maybeString
, selectionValue = maybeVal
, selectionBound = isBound
}
-- |Create a selection button for an abstract table
selectionButton :: (CGIMonad cgi) =>
SelectionGroup AR INVALID -> AT -> Int -> HTMLField cgi x y ()
selectionButton sg at row buttonAttrs =
input (do attr "type" "radio"
attr "name" (fieldName++"_")
attr "onclick" ("var ff=this.form."++fieldName++
";ff.value=" ++ jsShow (show (getAR at row))++
";if(ff.getAttribute('onchange'))"++
"{WASHSubmit(ff.name);"++
"};")
buttonAttrs)
where
fieldName = selectionName sg
-- |Create a labelled selection display for an abstract table. The display
-- function takes the button element and a list of text nodes corresponding to
-- the selected row and is expected to perform the layout.
selectionDisplay :: (CGIMonad cgi) =>
SelectionGroup AR INVALID -> AT -> Int ->
(WithHTML x cgi () -> [WithHTML x cgi ()] -> WithHTML x cgi a) ->
WithHTML x cgi a
selectionDisplay sg at row displayFun =
displayFun (selectionButton sg at row empty)
(Prelude.map text $ getRow (as_raw at) row)
-- |Create a choice group for a table (0-*).
choiceGroup :: (CGIMonad cgi) => WithHTML x cgi (SelectionGroup [AR] INVALID)
choiceGroup =
do token <- lift nextName
let fieldName = show token
info <- lift getInfo
lift $ addField fieldName False
let bds = bindings info
maybeString = bds >>= assocParm fieldName
maybeVal = maybeString >>= (g . reads)
g ((a,""):_) = Just a
g _ = Nothing
input (do attr "type" "hidden"
attr "name" fieldName
attr "value" "")
return $
SelectionGroup { selectionName = fieldName
, selectionToken = token
, selectionString = maybeString
, selectionValue = maybeVal
, selectionBound = isJust bds
}
-- |Create one choice button for an abstract table
choiceButton :: (CGIMonad cgi) =>
SelectionGroup [AR] INVALID -> AT -> Int -> HTMLField cgi x y ()
choiceButton sg at row buttonAttrs =
do script_T (rawtext $
"SubmitAction[SubmitAction.length]=" ++
"function(){"++
"var f=document.forms[0];" ++
"if(f."++buttonFieldName++".checked){" ++
"f."++fieldName++".value=" ++ jsShow ('+':show (getAR at row)) ++
"+f."++fieldName++".value;" ++
"};return true};")
input_T
(do attr "type" "checkbox"
attr "name" buttonFieldName
buttonAttrs)
where
fieldName = selectionName sg
buttonFieldName = fieldName++'_':show row
-- |Create a labelled choice display for an abstract table. The display
-- function takes the button element and a list of text nodes corresponding to
-- the selected row and is expected to perform the layout.
choiceDisplay :: (CGIMonad cgi) =>
SelectionGroup [AR] INVALID -> AT -> Int ->
(WithHTML x cgi () -> [WithHTML x cgi ()] -> WithHTML x cgi a) ->
WithHTML x cgi a
choiceDisplay sg at row displayFun =
displayFun (choiceButton sg at row empty)
(Prelude.map text $ getRow (as_raw at) row)
| nh2/WashNGo | WASH/CGI/AbstractSelector.hs | bsd-3-clause | 7,097 | 86 | 22 | 1,736 | 2,310 | 1,197 | 1,113 | 171 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Bits.Lens
-- Copyright : (C) 2012-14 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : LiberalTypeSynonyms
--
----------------------------------------------------------------------------
module Data.Bits.Lens
( (.|.~), (.&.~), (<.|.~), (<.&.~), (<<.|.~), (<<.&.~)
, (.|.=), (.&.=), (<.|.=), (<.&.=), (<<.|.=), (<<.&.=)
, bitAt
, bits
, byteAt
) where
import Control.Lens
import Control.Monad.State
import Data.Bits
import Data.Functor
import Data.Word
-- $setup
-- >>> :set -XNoOverloadedStrings
-- >>> import Data.Word
infixr 4 .|.~, .&.~, <.|.~, <.&.~, <<.|.~, <<.&.~
infix 4 .|.=, .&.=, <.|.=, <.&.=, <<.|.=, <<.&.=
-- | Bitwise '.|.' the target(s) of a 'Lens' or 'Setter'.
--
-- >>> _2 .|.~ 6 $ ("hello",3)
-- ("hello",7)
--
-- @
-- ('.|.~') :: 'Bits' a => 'Setter' s t a a -> a -> s -> t
-- ('.|.~') :: 'Bits' a => 'Iso' s t a a -> a -> s -> t
-- ('.|.~') :: 'Bits' a => 'Lens' s t a a -> a -> s -> t
-- ('.|.~') :: ('Data.Monoid.Monoid' a, 'Bits' a) => 'Traversal' s t a a -> a -> s -> t
-- @
(.|.~):: Bits a => ASetter s t a a -> a -> s -> t
l .|.~ n = over l (.|. n)
{-# INLINE (.|.~) #-}
-- | Bitwise '.&.' the target(s) of a 'Lens' or 'Setter'.
--
-- >>> _2 .&.~ 7 $ ("hello",254)
-- ("hello",6)
--
-- @
-- ('.&.~') :: 'Bits' a => 'Setter' s t a a -> a -> s -> t
-- ('.&.~') :: 'Bits' a => 'Iso' s t a a -> a -> s -> t
-- ('.&.~') :: 'Bits' a => 'Lens' s t a a -> a -> s -> t
-- ('.&.~') :: ('Data.Monoid.Monoid' a, 'Bits' a) => 'Traversal' s t a a -> a -> s -> t
-- @
(.&.~) :: Bits a => ASetter s t a a -> a -> s -> t
l .&.~ n = over l (.&. n)
{-# INLINE (.&.~) #-}
-- | Modify the target(s) of a 'Lens'', 'Setter'' or 'Traversal'' by computing its bitwise '.&.' with another value.
--
-- >>> execState (do _1 .&.= 15; _2 .&.= 3) (7,7)
-- (7,3)
--
-- @
-- ('.&.=') :: ('MonadState' s m, 'Bits' a) => 'Setter'' s a -> a -> m ()
-- ('.&.=') :: ('MonadState' s m, 'Bits' a) => 'Iso'' s a -> a -> m ()
-- ('.&.=') :: ('MonadState' s m, 'Bits' a) => 'Lens'' s a -> a -> m ()
-- ('.&.=') :: ('MonadState' s m, 'Bits' a) => 'Traversal'' s a -> a -> m ()
-- @
(.&.=):: (MonadState s m, Bits a) => ASetter' s a -> a -> m ()
l .&.= a = modify (l .&.~ a)
{-# INLINE (.&.=) #-}
-- | Modify the target(s) of a 'Lens'', 'Setter' or 'Traversal' by computing its bitwise '.|.' with another value.
--
-- >>> execState (do _1 .|.= 15; _2 .|.= 3) (7,7)
-- (15,7)
--
-- @
-- ('.|.=') :: ('MonadState' s m, 'Bits' a) => 'Setter'' s a -> a -> m ()
-- ('.|.=') :: ('MonadState' s m, 'Bits' a) => 'Iso'' s a -> a -> m ()
-- ('.|.=') :: ('MonadState' s m, 'Bits' a) => 'Lens'' s a -> a -> m ()
-- ('.|.=') :: ('MonadState' s m, 'Bits' a) => 'Traversal'' s a -> a -> m ()
-- @
(.|.=) :: (MonadState s m, Bits a) => ASetter' s a -> a -> m ()
l .|.= a = modify (l .|.~ a)
{-# INLINE (.|.=) #-}
-- | Bitwise '.|.' the target(s) of a 'Lens' (or 'Traversal'), returning the result
-- (or a monoidal summary of all of the results).
--
-- >>> _2 <.|.~ 6 $ ("hello",3)
-- (7,("hello",7))
--
-- @
-- ('<.|.~') :: 'Bits' a => 'Iso' s t a a -> a -> s -> (a, t)
-- ('<.|.~') :: 'Bits' a => 'Lens' s t a a -> a -> s -> (a, t)
-- ('<.|.~') :: ('Bits' a, 'Data.Monoid.Monoid' a) => 'Traversal' s t a a -> a -> s -> (a, t)
-- @
(<.|.~):: Bits a => LensLike ((,) a) s t a a -> a -> s -> (a, t)
l <.|.~ n = l <%~ (.|. n)
{-# INLINE (<.|.~) #-}
-- | Bitwise '.&.' the target(s) of a 'Lens' or 'Traversal', returning the result
-- (or a monoidal summary of all of the results).
--
-- >>> _2 <.&.~ 7 $ ("hello",254)
-- (6,("hello",6))
--
-- @
-- ('<.&.~') :: 'Bits' a => 'Iso' s t a a -> a -> s -> (a, t)
-- ('<.&.~') :: 'Bits' a => 'Lens' s t a a -> a -> s -> (a, t)
-- ('<.&.~') :: ('Bits' a, 'Data.Monoid.Monoid' a) => 'Traversal' s t a a -> a -> s -> (a, t)
-- @
(<.&.~) :: Bits a => LensLike ((,) a) s t a a -> a -> s -> (a, t)
l <.&.~ n = l <%~ (.&. n)
{-# INLINE (<.&.~) #-}
-- | Modify the target(s) of a 'Lens'' (or 'Traversal'') by computing its bitwise '.&.' with another value,
-- returning the result (or a monoidal summary of all of the results traversed).
--
-- >>> runState (_1 <.&.= 15) (31,0)
-- (15,(15,0))
--
-- @
-- ('<.&.=') :: ('MonadState' s m, 'Bits' a) => 'Lens'' s a -> a -> m a
-- ('<.&.=') :: ('MonadState' s m, 'Bits' a, 'Data.Monoid.Monoid' a) => 'Traversal'' s a -> a -> m a
-- @
(<.&.=):: (MonadState s m, Bits a) => LensLike' ((,)a) s a -> a -> m a
l <.&.= b = l <%= (.&. b)
{-# INLINE (<.&.=) #-}
-- | Modify the target(s) of a 'Lens'', (or 'Traversal') by computing its bitwise '.|.' with another value,
-- returning the result (or a monoidal summary of all of the results traversed).
--
-- >>> runState (_1 <.|.= 7) (28,0)
-- (31,(31,0))
--
-- @
-- ('<.|.=') :: ('MonadState' s m, 'Bits' a) => 'Lens'' s a -> a -> m a
-- ('<.|.=') :: ('MonadState' s m, 'Bits' a, 'Data.Monoid.Monoid' a) => 'Traversal'' s a -> a -> m a
-- @
(<.|.=) :: (MonadState s m, Bits a) => LensLike' ((,)a) s a -> a -> m a
l <.|.= b = l <%= (.|. b)
{-# INLINE (<.|.=) #-}
(<<.&.~) :: Bits a => Optical' (->) q ((,)a) s a -> a -> q s (a, s)
l <<.&.~ b = l $ \a -> (a, a .&. b)
{-# INLINE (<<.&.~) #-}
(<<.|.~) :: Bits a => Optical' (->) q ((,)a) s a -> a -> q s (a, s)
l <<.|.~ b = l $ \a -> (a, a .|. b)
{-# INLINE (<<.|.~) #-}
(<<.&.=) :: (MonadState s m, Bits a) => LensLike' ((,) a) s a -> a -> m a
l <<.&.= b = l %%= \a -> (a, a .&. b)
{-# INLINE (<<.&.=) #-}
(<<.|.=) :: (MonadState s m, Bits a) => LensLike' ((,) a) s a -> a -> m a
l <<.|.= b = l %%= \a -> (a, a .|. b)
{-# INLINE (<<.|.=) #-}
-- | This 'Lens' can be used to access the value of the nth bit in a number.
--
-- @'bitAt' n@ is only a legal 'Lens' into @b@ if @0 '<=' n '<' 'bitSize' ('undefined' :: b)@.
--
-- >>> 16^.bitAt 4
-- True
--
-- >>> 15^.bitAt 4
-- False
--
-- >>> 15 & bitAt 4 .~ True
-- 31
--
-- >>> 16 & bitAt 4 .~ False
-- 0
bitAt :: Bits b => Int -> IndexedLens' Int b Bool
bitAt n f b = indexed f n (testBit b n) <&> \x -> if x then setBit b n else clearBit b n
{-# INLINE bitAt #-}
-- | Get the nth byte, counting from the low end.
--
-- @'byteAt' n@ is a legal 'Lens' into @b@ iff @0 '<=' n '<' 'div' ('bitSize' ('undefined' :: b)) 8@
--
-- >>> (0xff00 :: Word16)^.byteAt 0
-- 0
--
-- >>> (0xff00 :: Word16)^.byteAt 1
-- 255
--
-- >>> byteAt 1 .~ 0 $ 0xff00 :: Word16
-- 0
--
-- >>> byteAt 0 .~ 0xff $ 0 :: Word16
-- 255
byteAt :: (Integral b, Bits b) => Int -> IndexedLens' Int b Word8
byteAt i f b = back <$> indexed f i (forward b) where
back w8 = (fromIntegral w8 `shiftL` (i * 8))
.|. (complement (255 `shiftL` (i * 8)) .&. b)
forward = fromIntegral . (.&.) 0xff . flip shiftR (i * 8)
-- | Traverse over all bits in a numeric type.
--
-- The bit position is available as the index.
--
-- >>> toListOf bits (5 :: Word8)
-- [True,False,True,False,False,False,False,False]
--
-- If you supply this an 'Integer', the result will be an infinite 'Traversal', which
-- can be productively consumed, but not reassembled.
bits :: (Num b, Bits b) => IndexedTraversal' Int b Bool
bits f b = Prelude.foldr step 0 <$> traverse g bs where
g n = (,) n <$> indexed f n (testBit b n)
bs = Prelude.takeWhile hasBit [0..]
hasBit n = complementBit b n /= b -- test to make sure that complementing this bit actually changes the value
step (n,True) r = setBit r n
step _ r = r
{-# INLINE bits #-}
| hvr/lens | src/Data/Bits/Lens.hs | bsd-3-clause | 7,823 | 0 | 14 | 1,921 | 1,528 | 897 | 631 | 67 | 2 |
import XMonad
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Layout.NoBorders
import XMonad.Util.Run(spawnPipe)
import XMonad.Util.EZConfig(additionalKeys)
import System.IO
main = do
xmobarProcess <- spawnPipe "start-xmobar"
trayerProcess <- spawnPipe "start-trayer"
xmonad $ defaultConfig {
manageHook = manageDocks <+> manageHook defaultConfig
, layoutHook = avoidStruts $ smartBorders $ layoutHook defaultConfig
, handleEventHook = docksEventHook <+> handleEventHook defaultConfig
, logHook = dynamicLogWithPP xmobarPP {
ppOutput = hPutStrLn xmobarProcess
, ppTitle = xmobarColor "green" "" . shorten 50
}
, modMask = mod4Mask
} `additionalKeys` [
((mod4Mask, xK_b), sendMessage ToggleStruts)
]
| justinlynn/monadix | src/Main.hs | bsd-3-clause | 949 | 1 | 15 | 304 | 197 | 108 | 89 | 20 | 1 |
module ClassInContext where
class FFF a where
fff :: a -> a
data S a = S a
instance FFF Int where
fff x = x
instance (Eq a, FFF a) => Eq (S a) where
(S x) == (S y) = (fff x) == (fff y)
cmpr :: S Int -> S Int -> Bool
cmpr = (==)
| phischu/fragnix | tests/quick/ClassInContext/ClassInContext.hs | bsd-3-clause | 246 | 0 | 8 | 77 | 142 | 74 | 68 | 10 | 1 |
{-# Language BangPatterns #-}
{-|
Module : Irc.Modes
Description : Operations for interpreting mode changes
Copyright : (c) Eric Mertens, 2016
License : ISC
Maintainer : [email protected]
This module provides support for interpreting the modes changed by
a MODE command.
-}
module Irc.Modes
(
-- * Interpretation of modes
ModeTypes(..)
, modesLists
, modesAlwaysArg
, modesSetArg
, modesNeverArg
, modesPrefixModes
, defaultModeTypes
, defaultUmodeTypes
-- * Operations for working with MODE command parameters
, splitModes
, unsplitModes
) where
import Data.Text (Text)
import qualified Data.Text as Text
import View
-- | Settings that describe how to interpret channel modes
data ModeTypes = ModeTypes
{ _modesLists :: [Char] -- ^ modes for channel lists (e.g. ban)
, _modesAlwaysArg :: [Char] -- ^ modes that always have an argument
, _modesSetArg :: [Char] -- ^ modes that have an argument when set
, _modesNeverArg :: [Char] -- ^ modes that never have arguments
, _modesPrefixModes :: [(Char,Char)] -- ^ modes requiring a nickname argument (mode,sigil)
}
deriving Show
-- | Lens for '_modesList'
modesLists :: Functor f => ([Char] -> f [Char]) -> ModeTypes -> f ModeTypes
modesLists f m = (\x -> m { _modesLists = x }) <$> f (_modesLists m)
-- | Lens for '_modesAlwaysArg'
modesAlwaysArg :: Functor f => ([Char] -> f [Char]) -> ModeTypes -> f ModeTypes
modesAlwaysArg f m = (\x -> m { _modesAlwaysArg = x }) <$> f (_modesAlwaysArg m)
-- | Lens for '_modesSetArg'
modesSetArg :: Functor f => ([Char] -> f [Char]) -> ModeTypes -> f ModeTypes
modesSetArg f m = (\x -> m { _modesSetArg = x }) <$> f (_modesSetArg m)
-- | Lens for '_modesNeverArg'
modesNeverArg :: Functor f => ([Char] -> f [Char]) -> ModeTypes -> f ModeTypes
modesNeverArg f m = (\x -> m { _modesNeverArg = x }) <$> f (_modesNeverArg m)
-- | Lens for '_modesPrefixModes'
modesPrefixModes :: Functor f => ([(Char,Char)] -> f [(Char,Char)]) -> ModeTypes -> f ModeTypes
modesPrefixModes f m = (\x -> m { _modesPrefixModes = x }) <$> f (_modesPrefixModes m)
-- | The channel modes used by Solanum
defaultModeTypes :: ModeTypes
defaultModeTypes = ModeTypes
{ _modesLists = "eIbq"
, _modesAlwaysArg = "k"
, _modesSetArg = "flj"
, _modesNeverArg = "CFLMPQScgimnprstz"
, _modesPrefixModes = [('o','@'),('v','+')]
}
-- | The default UMODE used by Solanum
defaultUmodeTypes :: ModeTypes
defaultUmodeTypes = ModeTypes
{ _modesLists = ""
, _modesAlwaysArg = ""
, _modesSetArg = "s"
, _modesNeverArg = "DQRZgiow"
, _modesPrefixModes = []
}
-- | Split up a mode change command and arguments into individual changes
-- given a configuration.
splitModes ::
ModeTypes {- ^ mode interpretation -} ->
Text {- ^ modes -} ->
[Text] {- ^ arguments -} ->
Maybe [(Bool,Char,Text)] {- ^ (set, mode, parameter) -}
splitModes !icm = computeMode True . Text.unpack
where
computeMode ::
Bool {- current polarity -} ->
[Char] {- remaining modes -} ->
[Text] {- remaining arguments -} ->
Maybe [(Bool,Char,Text)]
computeMode polarity modes args =
case modes of
[] | null args -> Just []
| otherwise -> Nothing
'+':ms -> computeMode True ms args
'-':ms -> computeMode False ms args
m:ms
| m `elem` view modesAlwaysArg icm
|| polarity && m `elem` view modesSetArg icm
|| m `elem` map fst (view modesPrefixModes icm)
|| m `elem` view modesLists icm ->
let (arg,args') =
case args of
[] -> (Text.empty,[])
x:xs -> (x,xs)
in ((polarity,m,arg):) <$> computeMode polarity ms args'
| not polarity && m `elem` view modesSetArg icm
|| m `elem` view modesNeverArg icm ->
do res <- computeMode polarity ms args
return ((polarity,m,Text.empty) : res)
| otherwise -> Nothing
-- | Construct the arguments to a MODE command corresponding to the given
-- mode changes.
unsplitModes ::
[(Bool,Char,Text)] {- ^ (set,mode,parameter) -} ->
[Text]
unsplitModes modes
= Text.pack (foldr combineModeChars (const "") modes True)
: args
where
args = [arg | (_,_,arg) <- modes, not (Text.null arg)]
combineModeChars (q,m,_) rest p
| p == q = m : rest p
| q = '+' : m : rest True
| otherwise = '-' : m : rest False
| glguy/irc-core | lib/src/Irc/Modes.hs | isc | 4,540 | 0 | 21 | 1,192 | 1,267 | 703 | 564 | 90 | 5 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE PatternGuards #-}
module AddHandler (addHandler) where
import Prelude hiding (readFile)
import System.IO (hFlush, stdout)
import Data.Char (isLower, toLower, isSpace)
import Data.List (isPrefixOf, isSuffixOf, stripPrefix)
import Data.Maybe (fromMaybe, listToMaybe)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
#if MIN_VERSION_Cabal(2, 2, 0)
import Distribution.PackageDescription.Parsec (readGenericPackageDescription)
#elif MIN_VERSION_Cabal(2, 0, 0)
import Distribution.PackageDescription.Parse (readGenericPackageDescription)
#else
import Distribution.PackageDescription.Parse (readPackageDescription)
#endif
import Distribution.PackageDescription.Configuration (flattenPackageDescription)
import Distribution.PackageDescription (allBuildInfo, hsSourceDirs)
import Distribution.Verbosity (normal)
import System.Directory (getDirectoryContents, doesFileExist)
import Control.Monad (unless)
data RouteError = EmptyRoute
| RouteCaseError
| RouteExists FilePath
deriving Eq
instance Show RouteError where
show EmptyRoute = "No name entered. Quitting ..."
show RouteCaseError = "Name must start with an upper case letter"
show (RouteExists file) = "File already exists: " ++ file
-- strict readFile
readFile :: FilePath -> IO String
readFile = fmap T.unpack . TIO.readFile
cmdLineArgsError :: String
cmdLineArgsError = "You have to specify a route name if you want to add handler with command line arguments."
addHandler :: Maybe String -> Maybe String -> [String] -> IO ()
addHandler (Just route) pat met = do
cabal <- getCabal
checked <- checkRoute route cabal
let routePair = case checked of
Left err@EmptyRoute -> (error . show) err
Left err@RouteCaseError -> (error . show) err
Left err@(RouteExists _) -> (error . show) err
Right p -> p
addHandlerFiles cabal routePair pattern methods
where
pattern = fromMaybe "" pat -- pattern defaults to ""
methods = unwords met -- methods default to none
addHandler Nothing (Just _) _ = error cmdLineArgsError
addHandler Nothing _ (_:_) = error cmdLineArgsError
addHandler _ _ _ = addHandlerInteractive
addHandlerInteractive :: IO ()
addHandlerInteractive = do
cabal <- getCabal
let routeInput = do
putStr "Name of route (without trailing R): "
hFlush stdout
name <- getLine
checked <- checkRoute name cabal
case checked of
Left err@EmptyRoute -> (error . show) err
Left err@RouteCaseError -> print err >> routeInput
Left err@(RouteExists _) -> do
print err
putStrLn "Try another name or leave blank to exit"
routeInput
Right p -> return p
routePair <- routeInput
putStr "Enter route pattern (ex: /entry/#EntryId): "
hFlush stdout
pattern <- getLine
putStr "Enter space-separated list of methods (ex: GET POST): "
hFlush stdout
methods <- getLine
addHandlerFiles cabal routePair pattern methods
getRoutesFilePath :: IO FilePath
getRoutesFilePath = do
let oldPath = "config/routes"
oldExists <- doesFileExist oldPath
pure $ if oldExists
then oldPath
else "config/routes.yesodroutes"
addHandlerFiles :: FilePath -> (String, FilePath) -> String -> String -> IO ()
addHandlerFiles cabal (name, handlerFile) pattern methods = do
src <- getSrcDir cabal
let applicationFile = concat [src, "/Application.hs"]
modify applicationFile $ fixApp name
modify cabal $ fixCabal name
routesPath <- getRoutesFilePath
modify routesPath $ fixRoutes name pattern methods
writeFile handlerFile $ mkHandler name pattern methods
specExists <- doesFileExist specFile
unless specExists $
writeFile specFile $ mkSpec name pattern methods
where
specFile = "test/Handler/" ++ name ++ "Spec.hs"
modify fp f = readFile fp >>= writeFile fp . f
getCabal :: IO FilePath
getCabal = do
allFiles <- getDirectoryContents "."
case filter (".cabal" `isSuffixOf`) allFiles of
[x] -> return x
[] -> error "No cabal file found"
_ -> error "Too many cabal files found"
checkRoute :: String -> FilePath -> IO (Either RouteError (String, FilePath))
checkRoute name cabal =
case name of
[] -> return $ Left EmptyRoute
c:_
| isLower c -> return $ Left RouteCaseError
| otherwise -> do
-- Check that the handler file doesn't already exist
src <- getSrcDir cabal
let handlerFile = concat [src, "/Handler/", name, ".hs"]
exists <- doesFileExist handlerFile
if exists
then (return . Left . RouteExists) handlerFile
else return $ Right (name, handlerFile)
fixApp :: String -> String -> String
fixApp name =
unlines . reverse . go . reverse . lines
where
l spaces = "import " ++ spaces ++ "Handler." ++ name
go [] = [l ""]
go (x:xs)
| Just y <- stripPrefix "import " x, "Handler." `isPrefixOf` dropWhile (== ' ') y = l (takeWhile (== ' ') y) : x : xs
| otherwise = x : go xs
fixCabal :: String -> String -> String
fixCabal name orig =
unlines $ (reverse $ go $ reverse libraryLines) ++ restLines
where
origLines = lines orig
(libraryLines, restLines) = break isExeTestBench origLines
isExeTestBench x = any
(\prefix -> prefix `isPrefixOf` x)
[ "executable"
, "test-suite"
, "benchmark"
]
l = " Handler." ++ name
go [] = [l]
go (x:xs)
| "Handler." `isPrefixOf` x' = (spaces ++ "Handler." ++ name) : x : xs
| otherwise = x : go xs
where
(spaces, x') = span isSpace x
fixRoutes :: String -> String -> String -> String -> String
fixRoutes name pattern methods fileContents =
fileContents ++ l
where
l = concat
[ startingCharacter
, pattern
, " "
, name
, "R "
, methods
, "\n"
]
startingCharacter = if "\n" `isSuffixOf` fileContents then "" else "\n"
mkSpec :: String -> String -> String -> String
mkSpec name _ methods = unlines
$ ("module Handler." ++ name ++ "Spec (spec) where")
: ""
: "import TestImport"
: ""
: "spec :: Spec"
: "spec = withApp $ do"
: concatMap go (words methods)
where
go method =
[ ""
, " describe \"" ++ func ++ "\" $ do"
, " error \"Spec not implemented: " ++ func ++ "\""
, ""]
where
func = concat [map toLower method, name, "R"]
mkHandler :: String -> String -> String -> String
mkHandler name pattern methods = unlines
$ ("module Handler." ++ name ++ " where")
: ""
: "import Import"
: concatMap go (words methods)
where
go method =
[ ""
, concat $ func : " :: " : map toArrow types ++ ["Handler Html"]
, concat
[ func
, " "
, concatMap toArgument types
, "= error \"Not yet implemented: "
, func
, "\""
]
]
where
func = concat [map toLower method, name, "R"]
types = getTypes pattern
toArrow t = concat [t, " -> "]
toArgument t = concat [uncapitalize t, " "]
getTypes "" = []
getTypes ('/':rest) = getTypes rest
getTypes (c:rest) | c `elem` "#*" =
typ : getTypes rest'
where
(typ, rest') = break (== '/') rest
getTypes rest = getTypes $ dropWhile (/= '/') rest
uncapitalize :: String -> String
uncapitalize (x:xs) = toLower x : xs
uncapitalize "" = ""
getSrcDir :: FilePath -> IO FilePath
getSrcDir cabal = do
#if MIN_VERSION_Cabal(2, 0, 0)
pd <- flattenPackageDescription <$> readGenericPackageDescription normal cabal
#else
pd <- flattenPackageDescription <$> readPackageDescription normal cabal
#endif
let buildInfo = allBuildInfo pd
srcDirs = concatMap hsSourceDirs buildInfo
return $ fromMaybe "." $ listToMaybe srcDirs
| geraldus/yesod | yesod-bin/AddHandler.hs | mit | 8,191 | 0 | 18 | 2,293 | 2,280 | 1,160 | 1,120 | 194 | 4 |
module QualGenerator where
f :: Int -> [Int]
f x = [ a | a <- x ]
| roberth/uu-helium | test/typeerrors/Examples/QualGenerator.hs | gpl-3.0 | 67 | 0 | 7 | 19 | 35 | 20 | 15 | 3 | 1 |
module RecursiveRef where
{-# ANN module "HLint: ignore Eta reduce" #-}
-- Recursive function call without type signature targets the monomorphic
-- binding. This verifies that we handle the case.
-- - @recNoSig defines/binding FunRNS
recNoSig x =
-- - @recNoSig ref FunRNS
recNoSig x
-- - @localRecNoSig ref FunLRNS
dummy = localRecNoSig
where
-- - @localRecNoSig defines/binding FunLRNS
localRecNoSig x =
-- - @localRecNoSig ref FunLRNS
localRecNoSig x
-- Recursive call to function with type signature targets the polymorphic
-- binding.
recWithSig :: Int -> Int
-- - @recWithSig defines/binding FunRWS
recWithSig x =
-- - @recWithSig ref FunRWS
recWithSig x
-- - @mutualNoSigA defines/binding FunMA
-- - @mutualNoSigB ref FunMB
mutualNoSigA = mutualNoSigB
-- - @mutualNoSigB defines/binding FunMB
-- - @mutualNoSigA ref FunMA
mutualNoSigB = mutualNoSigA
-- - @etaNoSig defines/binding FunENS
etaNoSig =
-- - @etaNoSig ref FunENS
etaNoSig
| google/haskell-indexer | kythe-verification/testdata/basic/RecursiveRef.hs | apache-2.0 | 997 | 0 | 7 | 196 | 86 | 54 | 32 | 14 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
-- |
-- Module : Documentation.Haddock.Parser
-- Copyright : (c) Mateusz Kowalczyk 2013-2014,
-- Simon Hengel 2013
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Parser used for Haddock comments. For external users of this
-- library, the most commonly used combination of functions is going
-- to be
--
-- @'toRegular' . '_doc' . 'parseParas'@
module Documentation.Haddock.Parser ( parseString, parseParas
, overIdentifier, toRegular, Identifier
) where
import Control.Applicative
import Control.Arrow (first)
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import Data.Char (chr, isAsciiUpper)
import Data.List (stripPrefix, intercalate, unfoldr, elemIndex)
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Monoid
import qualified Data.Set as Set
import Documentation.Haddock.Doc
import Documentation.Haddock.Parser.Monad hiding (take, endOfLine)
import Documentation.Haddock.Parser.Util
import Documentation.Haddock.Types
import Documentation.Haddock.Utf8
import Prelude hiding (takeWhile)
import qualified Prelude as P
-- $setup
-- >>> :set -XOverloadedStrings
-- | Identifier string surrounded with opening and closing quotes/backticks.
type Identifier = (Char, String, Char)
-- | Drops the quotes/backticks around all identifiers, as if they
-- were valid but still 'String's.
toRegular :: DocH mod Identifier -> DocH mod String
toRegular = fmap (\(_, x, _) -> x)
-- | Maps over 'DocIdentifier's over 'String' with potentially failing
-- conversion using user-supplied function. If the conversion fails,
-- the identifier is deemed to not be valid and is treated as a
-- regular string.
overIdentifier :: (String -> Maybe a)
-> DocH mod Identifier
-> DocH mod a
overIdentifier f d = g d
where
g (DocIdentifier (o, x, e)) = case f x of
Nothing -> DocString $ o : x ++ [e]
Just x' -> DocIdentifier x'
g DocEmpty = DocEmpty
g (DocAppend x x') = DocAppend (g x) (g x')
g (DocString x) = DocString x
g (DocParagraph x) = DocParagraph $ g x
g (DocIdentifierUnchecked x) = DocIdentifierUnchecked x
g (DocModule x) = DocModule x
g (DocWarning x) = DocWarning $ g x
g (DocEmphasis x) = DocEmphasis $ g x
g (DocMonospaced x) = DocMonospaced $ g x
g (DocBold x) = DocBold $ g x
g (DocUnorderedList x) = DocUnorderedList $ fmap g x
g (DocOrderedList x) = DocOrderedList $ fmap g x
g (DocDefList x) = DocDefList $ fmap (\(y, z) -> (g y, g z)) x
g (DocCodeBlock x) = DocCodeBlock $ g x
g (DocHyperlink x) = DocHyperlink x
g (DocPic x) = DocPic x
g (DocMathInline x) = DocMathInline x
g (DocMathDisplay x) = DocMathDisplay x
g (DocAName x) = DocAName x
g (DocProperty x) = DocProperty x
g (DocExamples x) = DocExamples x
g (DocHeader (Header l x)) = DocHeader . Header l $ g x
g (DocTable (Table h b)) = DocTable (Table (map (fmap g) h) (map (fmap g) b))
parse :: Parser a -> BS.ByteString -> (ParserState, a)
parse p = either err id . parseOnly (p <* endOfInput)
where
err = error . ("Haddock.Parser.parse: " ++)
-- | Main entry point to the parser. Appends the newline character
-- to the input string.
parseParas :: String -- ^ String to parse
-> MetaDoc mod Identifier
parseParas input = case parseParasState input of
(state, a) -> MetaDoc { _meta = Meta { _version = parserStateSince state }
, _doc = a
}
parseParasState :: String -> (ParserState, DocH mod Identifier)
parseParasState =
parse (p <* skipSpace) . encodeUtf8 . (++ "\n") . filter (/= '\r')
where
p :: Parser (DocH mod Identifier)
p = docConcat <$> paragraph `sepBy` many (skipHorizontalSpace *> "\n")
parseParagraphs :: String -> Parser (DocH mod Identifier)
parseParagraphs input = case parseParasState input of
(state, a) -> setParserState state >> return a
-- | Parse a text paragraph. Actually just a wrapper over 'parseStringBS' which
-- drops leading whitespace and encodes the string to UTF8 first.
parseString :: String -> DocH mod Identifier
parseString = parseStringBS . encodeUtf8 . dropWhile isSpace . filter (/= '\r')
parseStringBS :: BS.ByteString -> DocH mod Identifier
parseStringBS = snd . parse p
where
p :: Parser (DocH mod Identifier)
p = docConcat <$> many (monospace <|> anchor <|> identifier <|> moduleName
<|> picture <|> mathDisplay <|> mathInline
<|> markdownImage
<|> hyperlink <|> bold
<|> emphasis <|> encodedChar <|> string'
<|> skipSpecialChar)
-- | Parses and processes
-- <https://en.wikipedia.org/wiki/Numeric_character_reference Numeric character references>
--
-- >>> parseString "A"
-- DocString "A"
encodedChar :: Parser (DocH mod a)
encodedChar = "&#" *> c <* ";"
where
c = DocString . return . chr <$> num
num = hex <|> decimal
hex = ("x" <|> "X") *> hexadecimal
-- | List of characters that we use to delimit any special markup.
-- Once we have checked for any of these and tried to parse the
-- relevant markup, we can assume they are used as regular text.
specialChar :: [Char]
specialChar = "_/<@\"&'`# "
-- | Plain, regular parser for text. Called as one of the last parsers
-- to ensure that we have already given a chance to more meaningful parsers
-- before capturing their characers.
string' :: Parser (DocH mod a)
string' = DocString . unescape . decodeUtf8 <$> takeWhile1_ (notInClass specialChar)
where
unescape "" = ""
unescape ('\\':x:xs) = x : unescape xs
unescape (x:xs) = x : unescape xs
-- | Skips a single special character and treats it as a plain string.
-- This is done to skip over any special characters belonging to other
-- elements but which were not deemed meaningful at their positions.
skipSpecialChar :: Parser (DocH mod a)
skipSpecialChar = DocString . return <$> satisfy (inClass specialChar)
-- | Emphasis parser.
--
-- >>> parseString "/Hello world/"
-- DocEmphasis (DocString "Hello world")
emphasis :: Parser (DocH mod Identifier)
emphasis = DocEmphasis . parseStringBS <$>
mfilter ('\n' `BS.notElem`) ("/" *> takeWhile1_ (/= '/') <* "/")
-- | Bold parser.
--
-- >>> parseString "__Hello world__"
-- DocBold (DocString "Hello world")
bold :: Parser (DocH mod Identifier)
bold = DocBold . parseStringBS <$> disallowNewline ("__" *> takeUntil "__")
disallowNewline :: Parser BS.ByteString -> Parser BS.ByteString
disallowNewline = mfilter ('\n' `BS.notElem`)
-- | Like `takeWhile`, but unconditionally take escaped characters.
takeWhile_ :: (Char -> Bool) -> Parser BS.ByteString
takeWhile_ p = scan False p_
where
p_ escaped c
| escaped = Just False
| not $ p c = Nothing
| otherwise = Just (c == '\\')
-- | Like `takeWhile1`, but unconditionally take escaped characters.
takeWhile1_ :: (Char -> Bool) -> Parser BS.ByteString
takeWhile1_ = mfilter (not . BS.null) . takeWhile_
-- | Text anchors to allow for jumping around the generated documentation.
--
-- >>> parseString "#Hello world#"
-- DocAName "Hello world"
anchor :: Parser (DocH mod a)
anchor = DocAName . decodeUtf8 <$>
disallowNewline ("#" *> takeWhile1_ (/= '#') <* "#")
-- | Monospaced strings.
--
-- >>> parseString "@cruel@"
-- DocMonospaced (DocString "cruel")
monospace :: Parser (DocH mod Identifier)
monospace = DocMonospaced . parseStringBS
<$> ("@" *> takeWhile1_ (/= '@') <* "@")
-- | Module names: we try our reasonable best to only allow valid
-- Haskell module names, with caveat about not matching on technically
-- valid unicode symbols.
moduleName :: Parser (DocH mod a)
moduleName = DocModule <$> (char '"' *> modid <* char '"')
where
modid = intercalate "." <$> conid `sepBy1` "."
conid = (:)
<$> satisfy isAsciiUpper
-- NOTE: According to Haskell 2010 we should actually only
-- accept {small | large | digit | ' } here. But as we can't
-- match on unicode characters, this is currently not possible.
-- Note that we allow ‘#’ to suport anchors.
<*> (decodeUtf8 <$> takeWhile (notInClass " .&[{}(=*)+]!|@/;,^?\"\n"))
-- | Picture parser, surrounded by \<\< and \>\>. It's possible to specify
-- a title for the picture.
--
-- >>> parseString "<<hello.png>>"
-- DocPic (Picture {pictureUri = "hello.png", pictureTitle = Nothing})
-- >>> parseString "<<hello.png world>>"
-- DocPic (Picture {pictureUri = "hello.png", pictureTitle = Just "world"})
picture :: Parser (DocH mod a)
picture = DocPic . makeLabeled Picture . decodeUtf8
<$> disallowNewline ("<<" *> takeUntil ">>")
-- | Inline math parser, surrounded by \\( and \\).
--
-- >>> parseString "\\(\\int_{-\\infty}^{\\infty} e^{-x^2/2} = \\sqrt{2\\pi}\\)"
-- DocMathInline "\\int_{-\\infty}^{\\infty} e^{-x^2/2} = \\sqrt{2\\pi}"
mathInline :: Parser (DocH mod a)
mathInline = DocMathInline . decodeUtf8
<$> disallowNewline ("\\(" *> takeUntil "\\)")
-- | Display math parser, surrounded by \\[ and \\].
--
-- >>> parseString "\\[\\int_{-\\infty}^{\\infty} e^{-x^2/2} = \\sqrt{2\\pi}\\]"
-- DocMathDisplay "\\int_{-\\infty}^{\\infty} e^{-x^2/2} = \\sqrt{2\\pi}"
mathDisplay :: Parser (DocH mod a)
mathDisplay = DocMathDisplay . decodeUtf8
<$> ("\\[" *> takeUntil "\\]")
markdownImage :: Parser (DocH mod a)
markdownImage = fromHyperlink <$> ("!" *> linkParser)
where
fromHyperlink (Hyperlink url label) = DocPic (Picture url label)
-- | Paragraph parser, called by 'parseParas'.
paragraph :: Parser (DocH mod Identifier)
paragraph = examples <|> table <|> do
indent <- takeIndent
choice
[ since
, unorderedList indent
, orderedList indent
, birdtracks
, codeblock
, property
, header
, textParagraphThatStartsWithMarkdownLink
, definitionList indent
, docParagraph <$> textParagraph
]
-- | Provides support for grid tables.
--
-- Tables are composed by an optional header and body. The header is composed by
-- a single row. The body is composed by a non-empty list of rows.
--
-- Example table with header:
--
-- > +----------+----------+
-- > | /32bit/ | 64bit |
-- > +==========+==========+
-- > | 0x0000 | @0x0000@ |
-- > +----------+----------+
--
-- Algorithms loosely follows ideas in
-- http://docutils.sourceforge.net/docutils/parsers/rst/tableparser.py
--
table :: Parser (DocH mod Identifier)
table = do
-- first we parse the first row, which determines the width of the table
firstRow <- parseFirstRow
let len = BS.length firstRow
-- then we parse all consequtive rows starting and ending with + or |,
-- of the width `len`.
restRows <- many (parseRestRows len)
-- Now we gathered the table block, the next step is to split the block
-- into cells.
DocTable <$> tableStepTwo len (firstRow : restRows)
where
parseFirstRow :: Parser BS.ByteString
parseFirstRow = do
skipHorizontalSpace
-- upper-left corner is +
c <- char '+'
cs <- many1 (char '-' <|> char '+')
-- upper right corner is + too
guard (last cs == '+')
-- trailing space
skipHorizontalSpace
_ <- char '\n'
return (BS.cons c $ BS.pack cs)
parseRestRows :: Int -> Parser BS.ByteString
parseRestRows l = do
skipHorizontalSpace
c <- char '|' <|> char '+'
bs <- scan (l - 2) predicate
c2 <- char '|' <|> char '+'
-- trailing space
skipHorizontalSpace
_ <- char '\n'
return (BS.cons c (BS.snoc bs c2))
where
predicate n c
| n <= 0 = Nothing
| c == '\n' = Nothing
| otherwise = Just (n - 1)
-- Second step searchs for row of '+' and '=' characters, records it's index
-- and changes to '=' to '-'.
tableStepTwo
:: Int -- ^ width
-> [BS.ByteString] -- ^ rows
-> Parser (Table (DocH mod Identifier))
tableStepTwo width = go 0 [] where
go _ left [] = tableStepThree width (reverse left) Nothing
go n left (r : rs)
| BS.all (`elem` ['+', '=']) r =
tableStepThree width (reverse left ++ r' : rs) (Just n)
| otherwise =
go (n + 1) (r : left) rs
where
r' = BS.map (\c -> if c == '=' then '-' else c) r
-- Third step recognises cells in the table area, returning a list of TC, cells.
tableStepThree
:: Int -- ^ width
-> [BS.ByteString] -- ^ rows
-> Maybe Int -- ^ index of header separator
-> Parser (Table (DocH mod Identifier))
tableStepThree width rs hdrIndex = do
cells <- loop (Set.singleton (0, 0))
tableStepFour rs hdrIndex cells
where
height = length rs
loop :: Set.Set (Int, Int) -> Parser [TC]
loop queue = case Set.minView queue of
Nothing -> return []
Just ((y, x), queue')
| y + 1 >= height || x + 1 >= width -> loop queue'
| otherwise -> case scanRight x y of
Nothing -> loop queue'
Just (x2, y2) -> do
let tc = TC y x y2 x2
fmap (tc :) $ loop $ queue' `Set.union` Set.fromList
[(y, x2), (y2, x), (y2, x2)]
-- scan right looking for +, then try scan down
--
-- do we need to record + saw on the way left and down?
scanRight :: Int -> Int -> Maybe (Int, Int)
scanRight x y = go (x + 1) where
bs = rs !! y
go x' | x' >= width = fail "overflow right "
| BS.index bs x' == '+' = scanDown x y x' <|> go (x' + 1)
| BS.index bs x' == '-' = go (x' + 1)
| otherwise = fail $ "not a border (right) " ++ show (x,y,x')
-- scan down looking for +
scanDown :: Int -> Int -> Int -> Maybe (Int, Int)
scanDown x y x2 = go (y + 1) where
go y' | y' >= height = fail "overflow down"
| BS.index (rs !! y') x2 == '+' = scanLeft x y x2 y' <|> go (y' + 1)
| BS.index (rs !! y') x2 == '|' = go (y' + 1)
| otherwise = fail $ "not a border (down) " ++ show (x,y,x2,y')
-- check that at y2 x..x2 characters are '+' or '-'
scanLeft :: Int -> Int -> Int -> Int -> Maybe (Int, Int)
scanLeft x y x2 y2
| all (\x' -> BS.index bs x' `elem` ['+', '-']) [x..x2] = scanUp x y x2 y2
| otherwise = fail $ "not a border (left) " ++ show (x,y,x2,y2)
where
bs = rs !! y2
-- check that at y2 x..x2 characters are '+' or '-'
scanUp :: Int -> Int -> Int -> Int -> Maybe (Int, Int)
scanUp x y x2 y2
| all (\y' -> BS.index (rs !! y') x `elem` ['+', '|']) [y..y2] = return (x2, y2)
| otherwise = fail $ "not a border (up) " ++ show (x,y,x2,y2)
-- | table cell: top left bottom right
data TC = TC !Int !Int !Int !Int
deriving Show
tcXS :: TC -> [Int]
tcXS (TC _ x _ x2) = [x, x2]
tcYS :: TC -> [Int]
tcYS (TC y _ y2 _) = [y, y2]
-- | Fourth step. Given the locations of cells, forms 'Table' structure.
tableStepFour :: [BS.ByteString] -> Maybe Int -> [TC] -> Parser (Table (DocH mod Identifier))
tableStepFour rs hdrIndex cells = case hdrIndex of
Nothing -> return $ Table [] rowsDoc
Just i -> case elemIndex i yTabStops of
Nothing -> return $ Table [] rowsDoc
Just i' -> return $ uncurry Table $ splitAt i' rowsDoc
where
xTabStops = sortNub $ concatMap tcXS cells
yTabStops = sortNub $ concatMap tcYS cells
sortNub :: Ord a => [a] -> [a]
sortNub = Set.toList . Set.fromList
init' :: [a] -> [a]
init' [] = []
init' [_] = []
init' (x : xs) = x : init' xs
rowsDoc = (fmap . fmap) parseStringBS rows
rows = map makeRow (init' yTabStops)
where
makeRow y = TableRow $ mapMaybe (makeCell y) cells
makeCell y (TC y' x y2 x2)
| y /= y' = Nothing
| otherwise = Just $ TableCell xts yts (extract (x + 1) (y + 1) (x2 - 1) (y2 - 1))
where
xts = length $ P.takeWhile (< x2) $ dropWhile (< x) xTabStops
yts = length $ P.takeWhile (< y2) $ dropWhile (< y) yTabStops
-- extract cell contents given boundaries
extract :: Int -> Int -> Int -> Int -> BS.ByteString
extract x y x2 y2 = BS.intercalate "\n"
[ BS.take (x2 - x + 1) $ BS.drop x $ rs !! y'
| y' <- [y .. y2]
]
-- | Parse \@since annotations.
since :: Parser (DocH mod a)
since = ("@since " *> version <* skipHorizontalSpace <* endOfLine) >>= setSince >> return DocEmpty
where
version = decimal `sepBy1'` "."
-- | Headers inside the comment denoted with @=@ signs, up to 6 levels
-- deep.
--
-- >>> snd <$> parseOnly header "= Hello"
-- Right (DocHeader (Header {headerLevel = 1, headerTitle = DocString "Hello"}))
-- >>> snd <$> parseOnly header "== World"
-- Right (DocHeader (Header {headerLevel = 2, headerTitle = DocString "World"}))
header :: Parser (DocH mod Identifier)
header = do
let psers = map (string . encodeUtf8 . concat . flip replicate "=") [6, 5 .. 1]
pser = foldl1 (<|>) psers
delim <- decodeUtf8 <$> pser
line <- skipHorizontalSpace *> nonEmptyLine >>= return . parseString
rest <- paragraph <|> return DocEmpty
return $ DocHeader (Header (length delim) line) `docAppend` rest
textParagraph :: Parser (DocH mod Identifier)
textParagraph = parseString . intercalate "\n" <$> many1 nonEmptyLine
textParagraphThatStartsWithMarkdownLink :: Parser (DocH mod Identifier)
textParagraphThatStartsWithMarkdownLink = docParagraph <$> (docAppend <$> markdownLink <*> optionalTextParagraph)
where
optionalTextParagraph :: Parser (DocH mod Identifier)
optionalTextParagraph = (docAppend <$> whitespace <*> textParagraph) <|> pure DocEmpty
whitespace :: Parser (DocH mod a)
whitespace = DocString <$> (f <$> takeHorizontalSpace <*> optional "\n")
where
f :: BS.ByteString -> Maybe BS.ByteString -> String
f xs (fromMaybe "" -> x)
| BS.null (xs <> x) = ""
| otherwise = " "
-- | Parses unordered (bullet) lists.
unorderedList :: BS.ByteString -> Parser (DocH mod Identifier)
unorderedList indent = DocUnorderedList <$> p
where
p = ("*" <|> "-") *> innerList indent p
-- | Parses ordered lists (numbered or dashed).
orderedList :: BS.ByteString -> Parser (DocH mod Identifier)
orderedList indent = DocOrderedList <$> p
where
p = (paren <|> dot) *> innerList indent p
dot = (decimal :: Parser Int) <* "."
paren = "(" *> decimal <* ")"
-- | Generic function collecting any further lines belonging to the
-- list entry and recursively collecting any further lists in the
-- same paragraph. Usually used as
--
-- > someListFunction = listBeginning *> innerList someListFunction
innerList :: BS.ByteString -> Parser [DocH mod Identifier]
-> Parser [DocH mod Identifier]
innerList indent item = do
c <- takeLine
(cs, items) <- more indent item
let contents = docParagraph . parseString . dropNLs . unlines $ c : cs
return $ case items of
Left p -> [contents `docAppend` p]
Right i -> contents : i
-- | Parses definition lists.
definitionList :: BS.ByteString -> Parser (DocH mod Identifier)
definitionList indent = DocDefList <$> p
where
p = do
label <- "[" *> (parseStringBS <$> takeWhile1_ (notInClass "]\n")) <* ("]" <* optional ":")
c <- takeLine
(cs, items) <- more indent p
let contents = parseString . dropNLs . unlines $ c : cs
return $ case items of
Left x -> [(label, contents `docAppend` x)]
Right i -> (label, contents) : i
-- | Drops all trailing newlines.
dropNLs :: String -> String
dropNLs = reverse . dropWhile (== '\n') . reverse
-- | Main worker for 'innerList' and 'definitionList'.
-- We need the 'Either' here to be able to tell in the respective functions
-- whether we're dealing with the next list or a nested paragraph.
more :: Monoid a => BS.ByteString -> Parser a
-> Parser ([String], Either (DocH mod Identifier) a)
more indent item = innerParagraphs indent
<|> moreListItems indent item
<|> moreContent indent item
<|> pure ([], Right mempty)
-- | Used by 'innerList' and 'definitionList' to parse any nested paragraphs.
innerParagraphs :: BS.ByteString
-> Parser ([String], Either (DocH mod Identifier) a)
innerParagraphs indent = (,) [] . Left <$> ("\n" *> indentedParagraphs indent)
-- | Attempts to fetch the next list if possibly. Used by 'innerList' and
-- 'definitionList' to recursively grab lists that aren't separated by a whole
-- paragraph.
moreListItems :: BS.ByteString -> Parser a
-> Parser ([String], Either (DocH mod Identifier) a)
moreListItems indent item = (,) [] . Right <$> indentedItem
where
indentedItem = string indent *> skipSpace *> item
-- | Helper for 'innerList' and 'definitionList' which simply takes
-- a line of text and attempts to parse more list content with 'more'.
moreContent :: Monoid a => BS.ByteString -> Parser a
-> Parser ([String], Either (DocH mod Identifier) a)
moreContent indent item = first . (:) <$> nonEmptyLine <*> more indent item
-- | Parses an indented paragraph.
-- The indentation is 4 spaces.
indentedParagraphs :: BS.ByteString -> Parser (DocH mod Identifier)
indentedParagraphs indent =
(concat <$> dropFrontOfPara indent') >>= parseParagraphs
where
indent' = string $ BS.append indent " "
-- | Grab as many fully indented paragraphs as we can.
dropFrontOfPara :: Parser BS.ByteString -> Parser [String]
dropFrontOfPara sp = do
currentParagraph <- some (sp *> takeNonEmptyLine)
followingParagraphs <-
skipHorizontalSpace *> nextPar -- we have more paragraphs to take
<|> skipHorizontalSpace *> nlList -- end of the ride, remember the newline
<|> endOfInput *> return [] -- nothing more to take at all
return (currentParagraph ++ followingParagraphs)
where
nextPar = (++) <$> nlList <*> dropFrontOfPara sp
nlList = "\n" *> return ["\n"]
nonSpace :: BS.ByteString -> Parser BS.ByteString
nonSpace xs
| not $ any (not . isSpace) $ decodeUtf8 xs = fail "empty line"
| otherwise = return xs
-- | Takes a non-empty, not fully whitespace line.
--
-- Doesn't discard the trailing newline.
takeNonEmptyLine :: Parser String
takeNonEmptyLine = do
(++ "\n") . decodeUtf8 <$> (takeWhile1 (/= '\n') >>= nonSpace) <* "\n"
-- | Takes indentation of first non-empty line.
--
-- More precisely: skips all whitespace-only lines and returns indentation
-- (horizontal space, might be empty) of that non-empty line.
takeIndent :: Parser BS.ByteString
takeIndent = do
indent <- takeHorizontalSpace
"\n" *> takeIndent <|> return indent
-- | Blocks of text of the form:
--
-- >> foo
-- >> bar
-- >> baz
--
birdtracks :: Parser (DocH mod a)
birdtracks = DocCodeBlock . DocString . intercalate "\n" . stripSpace <$> many1 line
where
line = skipHorizontalSpace *> ">" *> takeLine
stripSpace :: [String] -> [String]
stripSpace = fromMaybe <*> mapM strip'
where
strip' (' ':xs') = Just xs'
strip' "" = Just ""
strip' _ = Nothing
-- | Parses examples. Examples are a paragraph level entitity (separated by an empty line).
-- Consecutive examples are accepted.
examples :: Parser (DocH mod a)
examples = DocExamples <$> (many (skipHorizontalSpace *> "\n") *> go)
where
go :: Parser [Example]
go = do
prefix <- decodeUtf8 <$> takeHorizontalSpace <* ">>>"
expr <- takeLine
(rs, es) <- resultAndMoreExamples
return (makeExample prefix expr rs : es)
where
resultAndMoreExamples :: Parser ([String], [Example])
resultAndMoreExamples = moreExamples <|> result <|> pure ([], [])
where
moreExamples :: Parser ([String], [Example])
moreExamples = (,) [] <$> go
result :: Parser ([String], [Example])
result = first . (:) <$> nonEmptyLine <*> resultAndMoreExamples
makeExample :: String -> String -> [String] -> Example
makeExample prefix expression res =
Example (strip expression) result
where
result = map (substituteBlankLine . tryStripPrefix) res
tryStripPrefix xs = fromMaybe xs (stripPrefix prefix xs)
substituteBlankLine "<BLANKLINE>" = ""
substituteBlankLine xs = xs
nonEmptyLine :: Parser String
nonEmptyLine = mfilter (any (not . isSpace)) takeLine
takeLine :: Parser String
takeLine = decodeUtf8 <$> takeWhile (/= '\n') <* endOfLine
endOfLine :: Parser ()
endOfLine = void "\n" <|> endOfInput
-- | Property parser.
--
-- >>> snd <$> parseOnly property "prop> hello world"
-- Right (DocProperty "hello world")
property :: Parser (DocH mod a)
property = DocProperty . strip . decodeUtf8 <$> ("prop>" *> takeWhile1 (/= '\n'))
-- |
-- Paragraph level codeblock. Anything between the two delimiting \@ is parsed
-- for markup.
codeblock :: Parser (DocH mod Identifier)
codeblock =
DocCodeBlock . parseStringBS . dropSpaces
<$> ("@" *> skipHorizontalSpace *> "\n" *> block' <* "@")
where
dropSpaces xs =
let rs = decodeUtf8 xs
in case splitByNl rs of
[] -> xs
ys -> case last ys of
' ':_ -> case mapM dropSpace ys of
Nothing -> xs
Just zs -> encodeUtf8 $ intercalate "\n" zs
_ -> xs
-- This is necessary because ‘lines’ swallows up a trailing newline
-- and we lose information about whether the last line belongs to @ or to
-- text which we need to decide whether we actually want to be dropping
-- anything at all.
splitByNl = unfoldr (\x -> case x of
'\n':s -> Just (span (/= '\n') s)
_ -> Nothing)
. ('\n' :)
dropSpace "" = Just ""
dropSpace (' ':xs) = Just xs
dropSpace _ = Nothing
block' = scan False p
where
p isNewline c
| isNewline && c == '@' = Nothing
| isNewline && isSpace c = Just isNewline
| otherwise = Just $ c == '\n'
hyperlink :: Parser (DocH mod a)
hyperlink = DocHyperlink . makeLabeled Hyperlink . decodeUtf8
<$> disallowNewline ("<" *> takeUntil ">")
<|> autoUrl
<|> markdownLink
markdownLink :: Parser (DocH mod a)
markdownLink = DocHyperlink <$> linkParser
linkParser :: Parser Hyperlink
linkParser = flip Hyperlink <$> label <*> (whitespace *> url)
where
label :: Parser (Maybe String)
label = Just . strip . decode <$> ("[" *> takeUntil "]")
whitespace :: Parser ()
whitespace = skipHorizontalSpace <* optional ("\n" *> skipHorizontalSpace)
url :: Parser String
url = rejectWhitespace (decode <$> ("(" *> takeUntil ")"))
rejectWhitespace :: MonadPlus m => m String -> m String
rejectWhitespace = mfilter (all (not . isSpace))
decode :: BS.ByteString -> String
decode = removeEscapes . decodeUtf8
-- | Looks for URL-like things to automatically hyperlink even if they
-- weren't marked as links.
autoUrl :: Parser (DocH mod a)
autoUrl = mkLink <$> url
where
url = mappend <$> ("http://" <|> "https://" <|> "ftp://") <*> takeWhile1 (not . isSpace)
mkLink :: BS.ByteString -> DocH mod a
mkLink s = case unsnoc s of
Just (xs, x) | inClass ",.!?" x -> DocHyperlink (Hyperlink (decodeUtf8 xs) Nothing) `docAppend` DocString [x]
_ -> DocHyperlink (Hyperlink (decodeUtf8 s) Nothing)
-- | Parses strings between identifier delimiters. Consumes all input that it
-- deems to be valid in an identifier. Note that it simply blindly consumes
-- characters and does no actual validation itself.
parseValid :: Parser String
parseValid = p some
where
idChar =
satisfy (\c -> isAlpha_ascii c
|| isDigit c
-- N.B. '-' is placed first otherwise attoparsec thinks
-- it belongs to a character class
|| inClass "-_.!#$%&*+/<=>?@\\|~:^" c)
p p' = do
vs' <- p' $ utf8String "⋆" <|> return <$> idChar
let vs = concat vs'
c <- peekChar'
case c of
'`' -> return vs
'\'' -> (\x -> vs ++ "'" ++ x) <$> ("'" *> p many') <|> return vs
_ -> fail "outofvalid"
-- | Parses UTF8 strings from ByteString streams.
utf8String :: String -> Parser String
utf8String x = decodeUtf8 <$> string (encodeUtf8 x)
-- | Parses identifiers with help of 'parseValid'. Asks GHC for
-- 'String' from the string it deems valid.
identifier :: Parser (DocH mod Identifier)
identifier = do
o <- idDelim
vid <- parseValid
e <- idDelim
return $ DocIdentifier (o, vid, e)
where
idDelim = satisfy (\c -> c == '\'' || c == '`')
| Fuuzetsu/haddock | haddock-library/src/Documentation/Haddock/Parser.hs | bsd-2-clause | 29,240 | 0 | 22 | 7,519 | 7,925 | 4,119 | 3,806 | 473 | 25 |
module Settings.Packages.Base (basePackageArgs) where
import Expression
import Settings
basePackageArgs :: Args
basePackageArgs = package base ? do
integerLibraryName <- pkgName <$> getIntegerPackage
mconcat [ builder GhcCabal ? arg ("--flags=" ++ integerLibraryName)
-- This fixes the 'unknown symbol stat' issue.
-- See: https://github.com/snowleopard/hadrian/issues/259.
, builder (Ghc CompileCWithGhc) ? arg "-optc-O2" ]
| bgamari/shaking-up-ghc | src/Settings/Packages/Base.hs | bsd-3-clause | 471 | 0 | 13 | 96 | 93 | 49 | 44 | 8 | 1 |
module Graphics.Gnuplot.Frame (
Frame.T,
cons, simple, empty,
) where
import qualified Graphics.Gnuplot.Frame.OptionSet as OptionSet
import qualified Graphics.Gnuplot.Private.Frame as Frame
import qualified Graphics.Gnuplot.Private.Plot as Plot
import qualified Graphics.Gnuplot.Private.GraphEmpty as Empty
import qualified Graphics.Gnuplot.Private.Graph as Graph
cons :: OptionSet.T graph -> Plot.T graph -> Frame.T graph
cons = Frame.Cons
simple :: Graph.C graph => Plot.T graph -> Frame.T graph
simple = cons OptionSet.deflt
empty :: Frame.T Empty.T
empty = simple $ Plot.pure []
| wavewave/gnuplot | src/Graphics/Gnuplot/Frame.hs | bsd-3-clause | 597 | 0 | 8 | 85 | 175 | 105 | 70 | 14 | 1 |
{-# LANGUAGE DeriveDataTypeable, PatternGuards #-}
module Tim.Smallpt.Render(
Context(..),
Refl(..),
Sphere(..),
Vec(..),
Work(..),
(|*|),
(|+|),
(|-|),
clamp,
cross,
dot,
line,
makeWork,
norm,
vmult) where
import Control.Applicative
import Control.Monad.State
import Data.Data
import Data.Ord
import Data.List
import Data.Typeable
import Random
data Vec a = Vec a a a
deriving (Data, Typeable)
instance Functor Vec where
fmap f (Vec x y z) = Vec (f x) (f y) (f z)
(|+|) :: Num a => Vec a -> Vec a -> Vec a
(Vec x1 y1 z1) |+| (Vec x2 y2 z2) = Vec (x1 + x2) (y1 + y2) (z1 + z2)
(|-|) :: Num a => Vec a -> Vec a -> Vec a
(Vec x1 y1 z1) |-| (Vec x2 y2 z2) = Vec (x1 - x2) (y1 - y2) (z1 - z2)
(|*|) :: Num a => Vec a -> a -> Vec a
v |*| n = fmap (* n) v
vmult :: Num a => Vec a -> Vec a -> Vec a
(Vec x1 y1 z1) `vmult` (Vec x2 y2 z2) = Vec (x1 * x2) (y1 * y2) (z1 * z2)
norm :: Floating a => Vec a -> Vec a
norm v = let Vec x y z = v in v |*| (1 / sqrt ((x * x) + (y * y) + (z * z)))
dot :: Num a => Vec a -> Vec a -> a
(Vec x1 y1 z1) `dot` (Vec x2 y2 z2) = (x1 * x2) + (y1 * y2) + (z1 * z2)
cross :: Num a => Vec a -> Vec a -> Vec a
(Vec x1 y1 z1) `cross` (Vec x2 y2 z2) = Vec (y1 * z2 - z1 * y2) (z1 * x2 - x1 * z2) (x1 * y2 - y1 * x2)
infixl 6 |+|
infixl 6 |-|
infixl 7 |*|
data Ray a = Ray (Vec a) (Vec a)
data Refl = DIFF
| SPEC
| REFR
deriving (Data, Typeable)
data Sphere a = Sphere { radius :: a,
position :: Vec a,
emission :: Vec a,
colour :: Vec a,
refl :: Refl }
deriving (Data, Typeable)
intersectSphere :: (Floating a, Ord a) => Ray a -> Sphere a -> Maybe a
intersectSphere (Ray o d) s | det < 0 = Nothing
| t > eps = Just t
| t' > eps = Just t'
| otherwise = Nothing
where op = position s |-| o -- Solve t^2*d.d + 2*t*(o-p).d + (o-p).(o-p)-R^2 = 0
eps = 1e-4
b = op `dot` d
det = (b * b) - (op `dot` op) + (radius s * radius s)
det' = sqrt det
t = b - det'
t' = b + det'
maybeMinimumBy :: (a -> a -> Ordering) -> [a] -> Maybe a
maybeMinimumBy _ [] = Nothing
maybeMinimumBy f l = Just (minimumBy f l)
intersectScene :: (Floating a, Ord a) => [Sphere a] -> Ray a -> Maybe (Sphere a, a)
intersectScene scene r = maybeMinimumBy (comparing snd) [(s, t) | (s, Just t) <- map ((,) <*> intersectSphere r) scene]
radiance' :: (Floating a, Ord a, Random a, RandomGen g) => [Sphere a] -> Ray a -> Int -> Sphere a -> a -> State g (Vec a)
radiance' scene r depth obj t | depth >= 5 = return (emission obj) --R.R.
| otherwise = do p' <- State (randomR (0, 1))
if p' >= p
then return (emission obj) --R.R.
else let f = colour obj |*| (1.0 / p) in ((emission obj) |+|) . (f `vmult`) <$> reflect (refl obj)
where Ray raypos raydir = r
x = raypos |+| (raydir |*| t)
n = norm (x |-| position obj)
nl | (n `dot` raydir) < 0 = n
| otherwise = n |*| (-1)
p = let Vec fx fy fz = colour obj in maximum [fx, fy, fz]
reflRay = Ray x (raydir |-| (n |*| (2 * (n `dot` raydir))))
reflect DIFF = let w = nl -- Ideal DIFFUSE reflection
Vec wx _ _ = w
u | abs wx > 0.1 = norm (Vec 0 1 0 `cross` w)
| otherwise = norm (Vec 1 0 0 `cross` w)
v = w `cross` u
in do r1 <- State (randomR (0, 2 * pi))
r2 <- State (randomR (0, 1))
let r2s = sqrt r2
d = norm ((u |*| (cos r1 * r2s)) |+|
(v |*| (sin r1 * r2s)) |+|
(w |*| sqrt (1 - r2)))
radiance scene (Ray x d) (depth + 1)
reflect SPEC = radiance scene reflRay (depth + 1) -- Ideal SPECULAR reflection
reflect REFR | cos2t < 0 = radiance scene reflRay (depth + 1) -- Total internal reflection
| depth >= 2 = do pp' <- State (randomR (0, 1))
if pp' < pp
then (|*| rp) <$> radiance scene reflRay (depth + 1)
else (|*| tp) <$> radiance scene (Ray x tdir) (depth + 1)
| otherwise = do re' <- (|*| re) <$> radiance scene reflRay (depth + 1)
tr' <- (|*| tr) <$> radiance scene (Ray x tdir) (depth + 1)
return (re' |+| tr') -- Ideal dielectric REFRACTION
where into = (n `dot` nl) > 0 -- Ray from outside going in?
nc = 1
nt = 1.5
nnt | into = nc / nt
| otherwise = nt / nc
ddn = raydir `dot` nl
cos2t = 1 - (nnt * nnt * (1 - (ddn * ddn)))
tdir = norm ((raydir |*| nnt) |-| (n |*| ((if into then 1 else (-1)) * (ddn * nnt + sqrt cos2t))))
a = nt - nc
b = nt + nc
r0 = a * a / (b * b)
c | into = 1 + ddn
| otherwise = 1 - tdir `dot` n
re = r0 + ((1 - r0) * c * c * c * c * c)
tr = 1 - re
pp = 0.25 + (0.5 * re)
rp = re / p
tp = tr / (1 - pp)
radiance :: (Floating a, Ord a, Random a, RandomGen g) => [Sphere a] -> Ray a -> Int -> State g (Vec a)
radiance scene r depth | Just (obj, t) <- intersectScene scene r = radiance' scene r depth obj t
| otherwise = return (Vec 0 0 0)
data Context a = Context { ctxw :: Int,
ctxh :: Int,
ctxsamp :: Int,
ctxcx :: Vec a,
ctxcy :: Vec a,
ctxcamdir :: Vec a,
ctxcampos :: Vec a,
ctxscene :: [Sphere a] }
deriving (Data, Typeable)
clamp :: (Num a, Ord a) => a -> a
clamp x | x < 0 = 0
| x > 1 = 1
| otherwise = x
line :: (Floating a, Ord a, Random a) => Context a -> Int -> [Vec a]
line context y = evalState (mapM (pixel . subtract 1) [1..w]) (mkStdGen (y * y * y))
where Context { ctxw = w, ctxh = h, ctxsamp = samp, ctxcx = cx, ctxcy = cy, ctxcamdir = camdir, ctxcampos = campos, ctxscene = scene } = context
pixel x = (|*| 0.25) . foldl1 (|+|) <$> sequence [subpixel x sx sy | sy <- [0 :: Int, 1], sx <- [0 :: Int, 1]]
subpixel x sx sy = fmap clamp . (|*| (1 / fromIntegral samp)) . foldl1 (|+|) <$> replicateM samp (sample x sx sy)
sample x sx sy = do r1 <- State (randomR (0, 4))
r2 <- State (randomR (0, 4))
let dx | r1 < 2 = sqrt r1 - 2
| otherwise = 2 - sqrt (4 - r1)
dy | r2 < 2 = sqrt r2 - 2
| otherwise = 2 - sqrt (4 - r2)
d = (cx |*| ((((fromIntegral sx + 0.5 + dx) / 2 + fromIntegral x) / fromIntegral w) - 0.5)) |+|
(cy |*| ((((fromIntegral sy + 0.5 + dy) / 2 + fromIntegral y) / fromIntegral h) - 0.5)) |+| camdir
ray = Ray (campos |+| (d |*| 140.0)) (norm d)
radiance scene ray 0
data Work a = RenderLine a Int
deriving (Data, Typeable)
makeWork :: Floating a => Int -> Int -> Int -> [Sphere a] -> [Work (Context a)]
makeWork w h samp scene = map (RenderLine context . (h -)) [1..h]
where context = Context { ctxw = w, ctxh = h, ctxsamp = samp, ctxcx = cx, ctxcy = cy, ctxcampos = Vec 50 52 295.6, ctxcamdir = camdir, ctxscene = scene }
camdir = norm (Vec 0 (-0.042612) (-1))
cx = Vec (0.5135 * fromIntegral w / fromIntegral h) 0 0
cy = norm (cx `cross` camdir) |*| 0.5135
| timrobinson/smallpt-haskell | Tim/Smallpt/Render.hs | bsd-3-clause | 10,331 | 0 | 26 | 5,544 | 3,678 | 1,928 | 1,750 | 164 | 6 |
{- |
Module : Database.HDBC.PostgreSQL
Copyright : Copyright (C) 2005-2011 John Goerzen
License : BSD3
Maintainer : John Goerzen <[email protected]>
Stability : provisional
Portability: portable
HDBC driver interface for PostgreSQL 8.x
Written by John Goerzen, jgoerzen\@complete.org
/NOTE ON DATES AND TIMES/
The recommended correspondence between PostgreSQL date and time types and HDBC SqlValue
types is:
* SqlLocalDate: DATE
* SqlLocalTimeOfDay: TIME WITHOUT TIME ZONE
* SqlZonedLocalTimeOfDay: TIME WITH TIME ZONE
* SqlLocalTime: TIMESTAMP WITHOUT TIME ZONE
* SqlZonedTime: TIMESTAMP WITH TIME ZONE
* SqlUTCTime: TIMESTAMP WITH TIME ZONE
* SqlDiffTime: INTERVAL
* SqlPOSIXTime: NUMERIC
* SqlEpochTime: INTEGER
* SqlTimeDiff: INTERVAL
Other combinations are possible, and may even be converted automatically.
The above simply represents the types that seem the most logical correspondence,
and thus are tested by the HDBC-PostgreSQL test suite.
-}
module Database.HDBC.PostgreSQL
(
-- * Connecting to Databases
connectPostgreSQL, withPostgreSQL,
connectPostgreSQL', withPostgreSQL',
Connection,
-- * Transactions
begin,
-- * PostgreSQL Error Codes
--
-- |When an @SqlError@ is thrown, the field @seState@ is set to one of the following
-- error codes.
module Database.HDBC.PostgreSQL.ErrorCodes,
-- * Threading
-- $threading
)
where
import Database.HDBC.PostgreSQL.Connection(connectPostgreSQL, withPostgreSQL,
connectPostgreSQL', withPostgreSQL',
begin, Connection())
import Database.HDBC.PostgreSQL.ErrorCodes
{- $threading
Provided the local libpq library is thread-safe, multiple 'Connection's may be used
to have concurrent database queries. Concurrent queries issued on a single
'Connection' will be performed serially.
When the local libpq library is not thread-safe (ie. it has not been compiled with
--enable-thread-safety), only a single database function will be performed at a time.
-}
| cabrera/hdbc-postgresql | Database/HDBC/PostgreSQL.hs | bsd-3-clause | 2,143 | 0 | 6 | 487 | 83 | 60 | 23 | 11 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : RefacSlicing
-- Copyright : (c) Christopher Brown 2005
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This module contains a transformation for HaRe.
-- Symoblic Evaluation on tuples.
-- creates functions which evaluate tha expressions
-- within the return value of a function.
-- e.g.
--
-- @ f x y = (x, y) @
--
-- @ f1 x = x @
--
-- @ f2 y = y @
--
-----------------------------------------------------------------------------
module RefacSlicing where
import AbstractIO
import Data.Maybe
import Data.List
import RefacUtils
import RefacRedunDec
import SlicingUtils
data Patt = Match HsMatchP | MyPat HsDeclP | Def [Char]
refacSlicing args
= do let
fileName = args!!0
beginRow = read (args!!1)::Int
beginCol = read (args!!2)::Int
endRow = read (args!!3)::Int
endCol = read (args!!4)::Int
AbstractIO.putStrLn "refacSlicing"
-- Parse the input file.
modInfo@(inscps, exps, mod, tokList) <- parseSourceFile fileName
-- Find the function that's been highlighted as the refactree
let (loc, pnt, pats, exp, wh, p)
= findDefNameAndExp tokList
(beginRow, beginCol)
(endRow, endCol)
mod
let newExp = locToExp (beginRow, beginCol) (endRow, endCol) tokList mod
let transExp = rewriteExpression exp newExp
if newExp == defaultExp
then do
error "Program slicing can only be performed on an expression."
else do
(wh', newExp') <- doRefac wh transExp
-- ((_,_), (tokList', mod')) <- applyRefac (checkCase exp newExp wh') (Just inscps, exps, mod, tokList) fileName
-- AbstractIO.putStrLn $ show (newExp, wh'')
(_,refWh) <- checkCase exp newExp wh'
-- (mod',((tokList',modified),_))<-doRemovingWhere fileName mod tokList exp newExp' wh'
((_,_), (tokList', mod')) <- applyRefac (doRemovingWhere exp newExp' refWh) (Just (inscps, exps, mod, tokList)) fileName
((_,m), (tokList'', mod'')) <- applyRefac (doRemoving1 exp newExp' wh) (Just (inscps, exps, mod', tokList')) fileName
-- ((_,_), (newTokList, newMod)) <- applyRefac (doTranslation exp transExp) (Just (inscps, exps, mod'', tokList'')) fileName
-- AbstractIO.putStrLn $ show tokList''
writeRefactoredFiles False [((fileName, True), (tokList'', mod''))]
AbstractIO.putStrLn "Completed.\n"
doTranslation e nT (_,_,mod)
= do
newMod <- update e nT mod
return newMod
sliceSubExp p old exp wh loc pnt pats (_,_, mod) = do
(decls, newExp) <- removeRedun wh exp
mod' <- updating p mod loc pnt pats newExp decls
return mod'
changeName newName (PNT (PN (UnQual _) (G modName _ optSrc)) Value s)
= PNT (PN (UnQual newName) (G modName newName optSrc)) Value s
updating (match@(Match x)) mod loc pnt pats rhs ds = do
mod' <- update x (newMatch loc pnt pats rhs ds) mod
return mod'
updating (pat@(MyPat x)) mod loc pnt pats rhs ds = do
mod' <- update x (newDecl loc pnt pats rhs ds) mod
return mod'
newMatch loc pnt pats rhs ds = HsMatch loc pnt pats (HsBody rhs) ds
newDecl loc pnt pats rhs ds = Dec (HsFunBind loc [HsMatch loc pnt pats (HsBody rhs) ds] )
checkFreeInWhere :: [PName] -> [HsDeclP] -> [HsDeclP]
checkFreeInWhere [] _ = []
checkFreeInWhere _ [] = []
checkFreeInWhere (p:ps) list = (checkSinInWhere p list) ++ (checkFreeInWhere ps list)
where
checkSinInWhere :: PName -> [HsDeclP] -> [HsDeclP]
checkSinInWhere p [] = []
checkSinInWhere p (x:xs)
| defines p x = [x]
| otherwise = checkSinInWhere p xs
rewriteExpression :: HsExpP -> HsExpP -> HsExpP
rewriteExpression e@(Exp (HsInfixApp e1 o e2)) newExp
| findEntity newExp e1 = (rewriteExpression e1 newExp)
| findEntity newExp e2 = (rewriteExpression e2 newExp)
| otherwise = e
rewriteExpression (Exp (HsLet ds e)) newExp = (Exp (HsLet ds (rewriteExpression e newExp)))
rewriteExpression (Exp (HsLambda ds e)) newExp = (Exp (HsLambda ds newExp))
rewriteExpression (Exp (HsParen e1)) newExp = rewriteExpression e1 newExp
rewriteExpression e1 e2 = e2
{-|
Takes the position of the highlighted code and returns
the function name, the list of arguments, the expression that has been
highlighted by the user, and any where\/let clauses associated with the
function.
-}
{-findDefNameAndExp :: Term t => [PosToken] -- ^ The token stream for the
-- file to be
-- refactored.
-> (Int, Int) -- ^ The beginning position of the highlighting.
-> (Int, Int) -- ^ The end position of the highlighting.
-> t -- ^ The abstract syntax tree.
-> (SrcLoc, PNT, FunctionPats, HsExpP, WhereDecls) -- ^ A tuple of,
-- (the function name, the list of arguments,
-- the expression highlighted, any where\/let clauses
-- associated with the function).
-}
findDefNameAndExp toks beginPos endPos t
= fromMaybe ([], defaultPNT, [], defaultExp, [], Def [])
(applyTU (once_tdTU (failTU `adhocTU` inMatch `adhocTU` inPat)) t)
where
--The selected sub-expression is in the rhs of a match
inMatch (match@(HsMatch loc1 pnt pats (rhs@(HsBody e)) ds)::HsMatchP)
| locToExp beginPos endPos toks rhs /= defaultExp
= Just ([loc1], pnt, pats, e, ds, (Match match))
inMatch _ = Nothing
--The selected sub-expression is in the rhs of a pattern-binding
inPat (pat@(Dec (HsPatBind loc1 ps (rhs@(HsBody e)) ds))::HsDeclP)
| locToExp beginPos endPos toks rhs /= defaultExp
= if isSimplePatBind pat
then Just ([loc1], patToPNT ps, [], e, ds, (MyPat pat))
else error "A complex pattern binding can not be generalised!"
inPat _ = Nothing
| kmate/HaRe | old/refactorer/RefacSlicing.hs | bsd-3-clause | 6,790 | 0 | 18 | 2,285 | 1,605 | 848 | 757 | -1 | -1 |
module Poly4 () where
import Language.Haskell.Liquid.Prelude
x = choose 0
baz y = y
prop = liquidAssertB (baz True)
| abakst/liquidhaskell | tests/pos/poly4.hs | bsd-3-clause | 125 | 0 | 7 | 28 | 44 | 25 | 19 | 5 | 1 |
module Test10 where
f x = x + y where y = 37
g = 1 + 37
| SAdams601/HaRe | old/testing/refacFunDef/Test10_AstOut.hs | bsd-3-clause | 59 | 0 | 6 | 21 | 32 | 18 | 14 | 3 | 1 |
{-# LANGUAGE TemplateHaskell, FlexibleInstances, MultiParamTypeClasses #-}
module Graphics.UI.Bottle.Animation
( R, Size, Layer
, PositionedImage(..), piImage, piRect
, Frame(..), fSubImages, onImages
, draw, nextFrame, mapIdentities
, unitSquare, backgroundColor
, translate, scale, onDepth
, unitIntoRect
, simpleFrame, simpleFrameDownscale
, joinId, subId
, weaker, stronger
, module Graphics.UI.Bottle.Animation.Id
) where
import Control.Applicative(Applicative(..), liftA2)
import Control.Lens.Operators
import Control.Monad(void)
import Data.List(isPrefixOf)
import Data.List.Utils(groupOn, sortOn)
import Data.Map(Map, (!))
import Data.Maybe(isJust)
import Data.Monoid(Monoid(..))
import Data.Vector.Vector2 (Vector2(..))
import Graphics.DrawingCombinators(R, (%%))
import Graphics.UI.Bottle.Animation.Id (AnimId)
import Graphics.UI.Bottle.Rect(Rect(Rect))
import qualified Control.Lens as Lens
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Vector.Vector2 as Vector2
import qualified Graphics.DrawingCombinators as Draw
import qualified Graphics.DrawingCombinators.Utils as DrawUtils
import qualified Graphics.UI.Bottle.Rect as Rect
type Layer = Int
type Size = Vector2 R
data PositionedImage = PositionedImage {
_piImage :: Draw.Image (), -- Image always occupies (0,0)..(1,1), the translation/scaling occurs when drawing
_piRect :: Rect
}
Lens.makeLenses ''PositionedImage
newtype Frame = Frame {
_fSubImages :: Map AnimId [(Layer, PositionedImage)]
}
Lens.makeLenses ''Frame
joinId :: AnimId -> AnimId -> AnimId
joinId = (++)
subId :: AnimId -> AnimId -> Maybe AnimId
subId folder path
| folder `isPrefixOf` path = Just $ drop (length folder) path
| otherwise = Nothing
simpleFrame :: AnimId -> Draw.Image () -> Frame
simpleFrame animId image =
Frame $ Map.singleton animId [(0, PositionedImage image (Rect 0 1))]
simpleFrameDownscale :: AnimId -> Size -> Draw.Image () -> Frame
simpleFrameDownscale animId size@(Vector2 w h) =
scale size .
simpleFrame animId .
(Draw.scale (1 / w) (1 / h) %%)
inFrame2
:: (Map AnimId [(Layer, PositionedImage)]
-> Map AnimId [(Layer, PositionedImage)]
-> Map AnimId [(Layer, PositionedImage)])
-> Frame -> Frame -> Frame
inFrame2 f (Frame x) (Frame y) = Frame (f x y)
stronger :: Frame -> Frame -> Frame
stronger = inFrame2 Map.union
weaker :: Frame -> Frame -> Frame
weaker = flip stronger
instance Monoid Frame where
mempty = Frame mempty
mappend = inFrame2 $ Map.unionWith (++)
unitX :: Draw.Image ()
unitX = void $ mconcat
[ Draw.line (0, 0) (1, 1)
, Draw.line (1, 0) (0, 1)
]
red :: Draw.Color
red = Draw.Color 1 0 0 1
draw :: Frame -> Draw.Image ()
draw =
mconcat . map (posImages . map snd) .
sortOn (fst . head) . Map.elems .
(^. fSubImages)
where
putXOn (PositionedImage img r) = PositionedImage (mappend (Draw.tint red unitX) img) r
posImages [x] = posImage x
posImages xs = mconcat $ map (posImage . putXOn) xs
posImage
(PositionedImage img
(Rect
{ Rect._topLeft = Vector2 t l
, Rect._size = Vector2 w h
})) =
Draw.translate (t, l) %% Draw.scale w h %% img
prefixRects :: Map AnimId (Layer, PositionedImage) -> Map AnimId Rect
prefixRects src =
Map.fromList . filter (not . null . fst) . map perGroup $ groupOn fst $ sortOn fst prefixItems
where
perGroup xs =
(fst (head xs), List.foldl1' joinRects (map snd xs))
prefixItems = do
(key, (_, PositionedImage _ rect)) <- Map.toList src
prefix <- List.inits key
return (prefix, rect)
joinRects a b =
Rect {
Rect._topLeft = tl,
Rect._size = br - tl
}
where
tl =
liftA2 min (a ^. Rect.topLeft) (b ^. Rect.topLeft)
br =
liftA2 max (a ^. Rect.bottomRight) (b ^. Rect.bottomRight)
findPrefix :: Ord a => [a] -> Map [a] b -> Maybe [a]
findPrefix key dict =
List.find (`Map.member` dict) . reverse $ List.inits key
relocateSubRect :: Rect -> Rect -> Rect -> Rect
relocateSubRect srcSubRect srcSuperRect dstSuperRect =
Rect {
Rect._topLeft =
dstSuperRect ^. Rect.topLeft +
sizeRatio *
(srcSubRect ^. Rect.topLeft -
srcSuperRect ^. Rect.topLeft),
Rect._size = sizeRatio * srcSubRect ^. Rect.size
}
where
sizeRatio =
dstSuperRect ^. Rect.size /
fmap (max 1) (srcSuperRect ^. Rect.size)
isVirtuallySame :: Frame -> Frame -> Bool
isVirtuallySame (Frame a) (Frame b) =
Map.keysSet a == Map.keysSet b &&
diffRects < equalityThreshold
where
equalityThreshold = 0.2
diffRects =
maximum . Map.elems $
Map.intersectionWith subtractRect
(rectMap a) (rectMap b)
subtractRect ra rb =
Vector2.uncurry max $
liftA2 max
(fmap abs (ra ^. Rect.topLeft - rb ^. Rect.topLeft))
(fmap abs (ra ^. Rect.bottomRight - rb ^. Rect.bottomRight))
rectMap = Map.map (^?! Lens.traversed . Lens._2 . piRect)
mapIdentities :: (AnimId -> AnimId) -> Frame -> Frame
mapIdentities f = fSubImages %~ Map.mapKeys f
nextFrame :: R -> Frame -> Frame -> Maybe Frame
nextFrame movement dest cur
| isVirtuallySame dest cur = Nothing
| otherwise = Just $ makeNextFrame movement dest cur
makeNextFrame :: R -> Frame -> Frame -> Frame
makeNextFrame movement (Frame dests) (Frame curs) =
Frame . Map.map (:[]) . Map.mapMaybe id $
mconcat [
Map.mapWithKey add $ Map.difference dest cur,
Map.mapWithKey del $ Map.difference cur dest,
Map.intersectionWith modify dest cur
]
where
dest = Map.map head dests
cur = Map.map head curs
animSpeed = pure movement
curPrefixMap = prefixRects cur
destPrefixMap = prefixRects dest
add key (layer, PositionedImage img r) =
Just (layer, PositionedImage img rect)
where
rect =
maybe (Rect (r ^. Rect.center) 0) genRect $
findPrefix key curPrefixMap
genRect prefix = relocateSubRect r (destPrefixMap ! prefix) (curPrefixMap ! prefix)
del key (layer, PositionedImage img (Rect pos size))
| isJust (findPrefix key destPrefixMap)
|| Vector2.sqrNorm size < 1 = Nothing
| otherwise = Just (layer, PositionedImage img (Rect (pos + size/2 * animSpeed) (size * (1 - animSpeed))))
modify
(layer, PositionedImage destImg (Rect destTopLeft destSize))
(_, PositionedImage _ (Rect curTopLeft curSize)) =
Just (
layer,
PositionedImage destImg
(Rect
(animSpeed * destTopLeft + (1 - animSpeed) * curTopLeft)
(animSpeed * destSize + (1 - animSpeed) * curSize)))
unitSquare :: AnimId -> Frame
unitSquare animId = simpleFrame animId DrawUtils.square
backgroundColor :: AnimId -> Layer -> Draw.Color -> Vector2 R -> Frame -> Frame
backgroundColor animId layer color size =
flip mappend . onDepth (+layer) . scale size .
onImages (Draw.tint color) $ unitSquare animId
eachFrame :: Lens.Traversal' Frame (Layer, PositionedImage)
eachFrame = fSubImages . Lens.traversed . Lens.traversed
images :: Lens.Traversal' Frame PositionedImage
images = eachFrame . Lens._2
translate :: Vector2 R -> Frame -> Frame
translate pos = images %~ moveImage
where
moveImage (PositionedImage img (Rect tl size)) =
PositionedImage img (Rect (tl + pos) size)
scale :: Vector2 R -> Frame -> Frame
scale factor = images %~ scaleImage
where
scaleImage (PositionedImage img (Rect tl size)) =
PositionedImage img (Rect (tl * factor) (size * factor))
-- Scale/translate a Unit-sized frame into a given rect
unitIntoRect :: Rect -> Frame -> Frame
unitIntoRect r =
translate (r ^. Rect.topLeft) .
scale (r ^. Rect.size)
onDepth :: (Int -> Int) -> Frame -> Frame
onDepth = (eachFrame . Lens._1 %~)
-- TODO: Export a lens?
onImages :: (Draw.Image () -> Draw.Image ()) -> Frame -> Frame
onImages = (images . piImage %~)
| aleksj/lamdu | bottlelib/Graphics/UI/Bottle/Animation.hs | gpl-3.0 | 7,916 | 0 | 16 | 1,724 | 2,864 | 1,526 | 1,338 | 199 | 2 |
{-# LANGUAGE TemplateHaskell #-}
-- test the representation of unboxed literals
module Main
where
$(
[d|
foo :: Int -> Int
foo x
| x == 5 = 6
foo x = 7
|]
)
$(
[d|
bar :: Maybe Int -> Int
bar x
| Just y <- x = y
bar _ = 9
|]
)
main :: IO ()
main = do putStrLn $ show $ foo 5
putStrLn $ show $ foo 8
putStrLn $ show $ bar (Just 2)
putStrLn $ show $ bar Nothing
| danse/ghcjs | test/ghc/th/tH_repGuardOutput.hs | mit | 484 | 0 | 10 | 214 | 105 | 54 | 51 | 19 | 1 |
-- There was a lot of discussion about various ways of computing
-- Bernouilli numbers (whatever they are) on haskell-cafe in March 2003
-- Here's one of the programs.
-- It's not a very good test, I suspect, because it manipulates big integers,
-- and so probably spends most of its time in GMP.
import Data.Ratio
import System.Environment
-- powers = [[r^n | r<-[2..]] | n<-1..]
-- type signature required for compilers lacking the monomorphism restriction
powers :: [[Integer]]
powers = [2..] : map (zipWith (*) (head powers)) powers
-- powers = [[(-1)^r * r^n | r<-[2..]] | n<-1..]
-- type signature required for compilers lacking the monomorphism restriction
neg_powers :: [[Integer]]
neg_powers =
map (zipWith (\n x -> if n then x else -x) (iterate not True)) powers
pascal:: [[Integer]]
pascal = [1,2,1] : map (\line -> zipWith (+) (line++[0]) (0:line)) pascal
bernoulli 0 = 1
bernoulli 1 = -(1%2)
bernoulli n | odd n = 0
bernoulli n =
(-1)%2
+ sum [ fromIntegral ((sum $ zipWith (*) powers (tail $ tail combs)) -
fromIntegral k) %
fromIntegral (k+1)
| (k,combs)<- zip [2..n] pascal]
where powers = (neg_powers!!(n-1))
main = do
[arg] <- getArgs
let n = (read arg)::Int
putStr $ "Bernoulli of " ++ (show n) ++ " is "
print (bernoulli n)
| beni55/ghcjs | test/nofib/imaginary/bernouilli/Main.hs | mit | 1,320 | 6 | 16 | 292 | 446 | 233 | 213 | 24 | 2 |
{-# LANGUAGE RankNTypes #-}
module T9196 where
f :: (forall a. Eq a) => a -> a
f x = x
g :: (Eq a => Ord a) => a -> a
g x = x
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/typecheck/should_fail/T9196.hs | bsd-3-clause | 128 | 0 | 7 | 38 | 69 | 38 | 31 | -1 | -1 |
module Channel where
import qualified Data.ByteString as Bs
import qualified Data.Set as S
import qualified Control.Concurrent as C (ThreadId)
import qualified Network.Socket as So hiding (send, sendTo, recv, recvFrom)
-- | Holds the configuration of a channel.
data ChannelConfig = ChannelConfig {
socket :: So.Socket,
-- ^ The UDP Socket from Network.Socket that the channel will use to send and receive
-- messages.
resendTimeout :: Integer,
-- ^ Picoseconds after a package is re-send if no ACK for it is received.
maxResends :: Int,
-- ^ Times that the same package can be re-sended without ACK after considerating it
-- lost.
allowed :: So.SockAddr -> IO(Bool),
-- ^ Function used to determinate if accept or not incomming packages from the given
-- address.
maxPacketSize :: Int,
-- ^ Max bytes that can be sent on this channel packages, larger packages will throw
-- and exception.
recvRetention :: Integer
-- ^ Time that a received and delivired package will remain on memory in order to avoid
-- duplicated receptions.
-- The packages will be stored @recvRetention *resendTimeout * maxResends@ picoseconds after
-- reception and after that, will be freed on the next getReceived call.
}
data ChannelStatus = ChannelStatus {
nextId :: !Int,
sentMsgs :: !(S.Set Message),
unsentMsgs :: !(S.Set Message),
recvMsgs :: !(S.Set Message),
deliveredMsgs :: !(S.Set Message),
receivingThread :: !C.ThreadId,
sendingThread :: !C.ThreadId,
closed :: !Bool
} deriving (Show)
data Message = Message {
msgId :: !Int,
address :: !So.SockAddr,
string :: !Bs.ByteString,
lastSend :: !Integer, -- or reception time in case of incomming messages.
resends :: !Int
} deriving (Show)
instance Eq Message where
(==) m1 m2 = msgId m1 == msgId m2 && address m1 == address m2
instance Ord Message where
compare m1 m2 = compare (msgId m1, address m1) (msgId m2, address m2)
emptyChannel :: C.ThreadId -> C.ThreadId -> ChannelStatus
emptyChannel rtid stid = ChannelStatus 0 S.empty S.empty S.empty S.empty rtid stid False
receiveMsg :: Message -> ChannelStatus -> ChannelStatus
-- ^ Queues a message that has been received.
receiveMsg msg chst =
if S.notMember msg (deliveredMsgs chst) then
chst {recvMsgs = S.insert msg (recvMsgs chst)}
else chst
registerACK :: So.SockAddr -> Int -> ChannelStatus -> ChannelStatus
-- ^ Informs the ChannelStatus that it no longer needs to store the package from the address with
-- the given id , since it was ACKed from the remote host.
registerACK addr mId chst = chst {
sentMsgs = S.delete (Message mId addr Bs.empty 0 0) (sentMsgs chst)
}
queueMsg :: (So.SockAddr,Bs.ByteString) -> ChannelStatus -> ChannelStatus
-- ^ Puts a new message to be sent on the ChannelStatus.
queueMsg (addr,str) chst = chst {
nextId = max 0 $ (nextId chst) + 1,
sentMsgs = S.insert (Message (nextId chst) addr str 0 0) (sentMsgs chst)
}
nextForSending :: ChannelConfig -> Integer -> ChannelStatus -> (S.Set Message, ChannelStatus)
-- ^ Receives the current CPUTime and a ChannelStatus, returns the messages to be sent and updates
-- the ChannelStatus, assuming that they will be sent.
nextForSending chcfg time chst = let
touted = S.filter (\m -> time >= (lastSend m) + (resendTimeout chcfg)) (sentMsgs chst)
touted' = S.map (\m -> m {lastSend = time, resends = resends m + 1}) touted
(ready,failed) = S.partition (\m -> resends m <= maxResends chcfg) touted'
updatedMsgs = S.union ready $ S.difference (sentMsgs chst) failed
chst' = chst {sentMsgs = updatedMsgs, unsentMsgs = S.union failed (unsentMsgs chst)}
in seq touted' $ (ready,chst')
nextForDeliver :: ChannelConfig -> Integer -> ChannelStatus -> (S.Set Message, ChannelStatus)
-- ^ Receives the current CPUTime and a ChannelStatus, returns the messages that can be delivered
-- and cleans the old ones that where retained.
nextForDeliver chcfg time chst = let
retenTime = recvRetention chcfg * resendTimeout chcfg * fromIntegral (maxResends chcfg)
survives m = time <= lastSend m + retenTime
newDelivered = S.difference (S.filter survives (deliveredMsgs chst)) (recvMsgs chst)
in (recvMsgs chst, chst {deliveredMsgs = newDelivered, recvMsgs = S.empty})
| Autopawn/haskell-secureUDP | Channel.hs | mit | 4,347 | 0 | 15 | 886 | 1,074 | 587 | 487 | 87 | 2 |
module GroupCreator.Evaluation
( Condition(..)
, fitness
) where
import GroupCreator.Groupings
import GroupCreator.People
import Data.List
import Data.Ord
import Data.Map
import Data.Hash (hash, asWord64)
data Condition = SizeRestriction { groupSize :: Int }
--make groups of this many people
| Friends { person :: Int, friend :: Int }
--these two want to be in the same group
| Enemies { person :: Int, enemy :: Int }
--these two don't want to be in the same group
| Peers { attributeIndex :: Int }
--if attribute is "sex", groups should try to be either all-males or all-females (only for ENUM attributes)
| Mixed { attributeIndex :: Int }
--if attribute is "sex", groups should try to be mixed (3 M 2 F is better than 4 M 1 F)
deriving (Show)
fitness :: [Condition] -> People -> Grouping -> Double
fitness conditions people grouping = run 0 conditions people grouping
where
run conditionIndex [] people (Grouping grouping) = fromIntegral (asWord64 $ hash grouping) / 1e25
run conditionIndex (cond:conds) people (Grouping grouping) = (1 + conditionIndex) * (eval cond people grouping) + (run (conditionIndex + 1) conds people (Grouping grouping))
-- The higher the number, the worse the result of this evaluation
eval :: Condition -> People -> [[Int]] -> Double
eval (SizeRestriction groupSize) people [] = 0
eval (SizeRestriction groupSize) people (group:groups) = 0.5 * fromIntegral (abs (length group - groupSize)) + (eval (SizeRestriction groupSize) people groups)
eval (Friends person friend) people [] = 0
eval (Friends person friend) people (group:groups)
| intersection == 2 = 0 --person and friend are in the same group
| intersection == 1 = 5 --either person or friend are alone in the group
| otherwise = eval (Friends person friend) people groups
where
intersection = length $ intersect group [person, friend]
eval (Enemies person enemy) people grouping = 5 - eval (Friends person enemy) people grouping
eval (Peers attributeIndex) people [] = 0
eval (Peers attributeIndex) people (firstGroup:groups) = (theRest / majorityCount) + (eval (Peers attributeIndex) people groups)
where
groupAttributeValues = Data.List.map (enumValue . (! attributeIndex)) $ Data.List.map (people !) firstGroup
majorityCount = fromIntegral $ length $ head . sortBy (flip $ comparing length) . group . sort $ groupAttributeValues
theRest = fromIntegral (length firstGroup) - majorityCount
| cambraca/group-creator | GroupCreator/Evaluation.hs | mit | 2,580 | 0 | 14 | 573 | 741 | 397 | 344 | 34 | 2 |
{-# htermination index :: Ix a => (a,a) -> a -> Int #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_index_1.hs | mit | 56 | 0 | 2 | 13 | 3 | 2 | 1 | 1 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.