code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Main where
import Safe.Length (safeLength)
import Data.Proxy (Proxy(..))
main :: IO ()
main = print $ safeLength (Proxy :: Proxy (Char, Char)) ('a', 'b')
| stepcut/safe-length | example/GoodTuple.hs | bsd-3-clause | 164 | 0 | 9 | 28 | 73 | 43 | 30 | 5 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Ivory.ModelCheck.CVC4 where
import Prelude ()
import Prelude.Compat hiding (exp)
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid.Compat ((<>))
#endif
import qualified Data.ByteString.Char8 as B
import Data.Int
import Data.List (intersperse)
import Data.Word
import Ivory.Language.Syntax.Concrete.Location
import Ivory.Language.Syntax.Concrete.Pretty
--------------------------------------------------------------------------------
type Var = String
type Func = String
--------------------------------------------------------------------------------
-- Concrete syntax
class Concrete a where
concrete :: a -> B.ByteString
instance Concrete B.ByteString where
concrete = id
instance Concrete String where
concrete = B.pack
instance Concrete SrcLoc where
concrete = concrete . prettyPrint . pretty
data ConcreteList = forall a. Concrete a => CL a
-- Specialization
clBS :: B.ByteString -> ConcreteList
clBS = CL
--------------------------------------------------------------------------------
-- Statements
data Statement = TypeDecl String [(Var, Type)]
| VarDecl Var Type
| Assert (Located Expr)
| Query (Located Expr)
-- Arbitrary statement constructed by-hand.
| forall a . Concrete a => Statement a
instance Concrete Statement where
concrete (TypeDecl ty [])
= statement [CL ty, clBS ":", clBS "TYPE"]
concrete (TypeDecl ty fs)
= statement [CL ty, clBS ":", clBS "TYPE", clBS "= [#", fieldList fs, clBS "#]"]
concrete (VarDecl v ty) = statement [CL v, clBS ":", CL ty]
concrete (Assert (Located loc exp))
= statement [clBS "ASSERT", CL exp, clBS ";\t %", CL loc]
concrete (Query (Located loc exp))
= statement [clBS "QUERY", CL exp, clBS ";\t %", CL loc]
concrete (Statement a) = statement [CL a]
statement :: [ConcreteList] -> B.ByteString
statement as =
let unList (CL a) = concrete a in
let toks = B.unwords (map unList as) in
B.snoc toks ';'
fieldList :: [(Var,Type)] -> ConcreteList
fieldList fs = clBS $ B.intercalate ", "
[concrete v <> " : " <> concrete t | (v,t) <- fs]
typeDecl :: String -> [(Var,Type)] -> Statement
typeDecl = TypeDecl
varDecl :: Var -> Type -> Statement
varDecl = VarDecl
assert :: Located Expr -> Statement
assert = Assert
query :: Located Expr -> Statement
query = Query
--------------------------------------------------------------------------------
-- Expressions and literals
instance Concrete Float where
concrete = concrete . show
instance Concrete Double where
concrete = concrete . show
instance Concrete Integer where
concrete = concrete . show
instance Concrete Int where
concrete = concrete . show
data Type = Void
| Integer
| Real
| Char
| Bool
| Struct String
| Array Type
| Opaque
deriving (Show, Read, Eq)
instance Concrete Type where
concrete Bool = "BOOLEAN"
concrete Real = "REAL"
concrete Integer = "INT"
concrete (Array t) = "ARRAY INT OF " <> concrete t
concrete (Struct name) = B.pack name
concrete _ = "INT" -- error $ "unexpected type: " ++ show t
data Expr = Var Var
-- Boolean expressions
| T
| F
| Not Expr
| And Expr Expr
| Or Expr Expr
| Impl Expr Expr
| Equiv Expr Expr
| Eq Expr Expr
| Le Expr Expr
| Leq Expr Expr
| Ge Expr Expr
| Geq Expr Expr
-- Numeric expressions
| forall a . (Show a, Concrete a, Num a) => NumLit a
| Add Expr Expr
| Sub Expr Expr
| Mod Expr Integer -- CVC4 can handle mod-by-constant
| Call Func [Expr]
| Store Expr Expr
| StoreMany Expr [(Expr,Expr)]
| Field Expr Expr
| Index Expr Expr
-- Store (Index 4 (Field "bFoo" "var0")) 5
-- var0 WITH .bFoo[4] := 5
-- Index 5 (Index 1) "var0")
-- var0[1][5]
deriving instance Show Expr
substExpr :: [(Var, Expr)] -> Expr -> Expr
substExpr su = go
where
go (Var v) = case lookup v su of
Nothing -> Var v
Just e -> e
go (Not e) = Not (go e)
go (And x y) = And (go x) (go y)
go (Or x y) = Or (go x) (go y)
go (Impl x y) = Impl (go x) (go y)
go (Equiv x y) = Equiv (go x) (go y)
go (Eq x y) = Eq (go x) (go y)
go (Le x y) = Le (go x) (go y)
go (Leq x y) = Leq (go x) (go y)
go (Ge x y) = Ge (go x) (go y)
go (Geq x y) = Geq (go x) (go y)
go (Add x y) = Add (go x) (go y)
go (Sub x y) = Sub (go x) (go y)
go (Mod x y) = Mod (go x) y
go (Call f es) = Call f (map go es)
go (Store s e) = Store (go s) (go e)
go (StoreMany a ies) = StoreMany (go a) (map (\(i,e) -> (go i, go e)) ies)
go (Field f e) = Field (go f) (go e)
go (Index i e) = Index (go i) (go e)
go e = e
leaf :: Expr -> Bool
leaf exp =
case exp of
(Var _) -> True
T -> True
F -> True
(NumLit _) -> True
_ -> False
parens :: Expr -> B.ByteString
parens exp =
if leaf exp
then concrete exp
else '(' `B.cons` (concrete exp `B.snoc` ')')
instance Concrete Expr where
concrete (Var v) = concrete v
concrete T = "TRUE"
concrete F = "FALSE"
concrete (Not e) = B.unwords ["NOT", parens e]
concrete (And e0 e1) = B.unwords [parens e0, "AND", parens e1]
concrete (Or e0 e1) = B.unwords [parens e0, "OR" , parens e1]
concrete (Impl e0 e1) = B.unwords [parens e0, "=>" , parens e1]
concrete (Equiv e0 e1) = B.unwords [parens e0, "<=>", parens e1]
concrete (Eq e0 e1) = B.unwords [parens e0, "=" , parens e1]
concrete (Le e0 e1) = B.unwords [parens e0, "<" , parens e1]
concrete (Leq e0 e1) = B.unwords [parens e0, "<=" , parens e1]
concrete (Ge e0 e1) = B.unwords [parens e0, ">" , parens e1]
concrete (Geq e0 e1) = B.unwords [parens e0, ">=" , parens e1]
concrete (NumLit n) = concrete n
concrete (Add e0 e1) = B.unwords [parens e0, "+", parens e1]
concrete (Sub e0 e1) = B.unwords [parens e0, "-", parens e1]
concrete (Mod e x) = B.unwords [parens e, "MOD", concrete x]
concrete (Call f args) = concrete f
`B.append` ('(' `B.cons` (args' `B.snoc` ')'))
where
args' = B.unwords $ intersperse "," (map concrete args)
concrete (Store s e) = v <> " WITH " <> f <> " := " <> concrete e
where
(v,f) = B.break (`elem` (".[" :: String)) (concrete s)
-- concrete (Store a i e) = concrete a <> " WITH "
-- <> B.concat (map concrete i)
-- <> " := " <> concrete e
concrete (StoreMany a ies)
= concrete a <> " WITH " <>
B.intercalate ", " [ f <> " := " <> concrete e
| (i,e) <- ies
, let f = B.dropWhile (not . (`elem` (".[" :: String)))
(concrete i)
]
concrete (Field f e) = concrete e <> "." <> concrete f
concrete (Index i e) = concrete e <> "[" <> concrete i <> "]"
-- concrete (Select e ss) = concrete e <> B.concat (map concrete ss)
-- concrete (Load a i) = concrete a <> "[" <> concrete i <> "]"
-- instance Concrete Selector where
-- concrete (Field f) = "." <> concrete f
-- concrete (Index i) = "[" <> concrete i <> "]"
var :: Var -> Expr
var = Var
true :: Expr
true = T
false :: Expr
false = F
not' :: Expr -> Expr
not' = Not
(.&&) :: Expr -> Expr -> Expr
(.&&) = And
(.||) :: Expr -> Expr -> Expr
(.||) = Or
(.=>) :: Expr -> Expr -> Expr
(.=>) T e = e
(.=>) x y = Impl x y
(.<=>) :: Expr -> Expr -> Expr
(.<=>) = Equiv
(.==) :: Expr -> Expr -> Expr
(.==) = Eq
(.<) :: Expr -> Expr -> Expr
(.<) = Le
(.<=) :: Expr -> Expr -> Expr
(.<=) = Leq
(.>) :: Expr -> Expr -> Expr
(.>) = Ge
(.>=) :: Expr -> Expr -> Expr
(.>=) = Geq
(.+) :: Expr -> Expr -> Expr
(.+) = Add
(.-) :: Expr -> Expr -> Expr
(.-) = Sub
(.%) :: Expr -> Integer -> Expr
(.%) = Mod
lit :: (Show a, Concrete a, Num a) => a -> Expr
lit = NumLit
intLit :: Integer -> Expr
intLit = lit
realLit :: Double -> Expr
realLit = lit
call :: Func -> [Expr] -> Expr
call = Call
store :: Expr -> Expr -> Expr
store = Store
storeMany :: Expr -> [(Expr,Expr)] -> Expr
storeMany = StoreMany
field :: Expr -> Expr -> Expr
field = Field
index :: Expr -> Expr -> Expr
index = Index
--------------------------------------------------------------------------------
-- CVC4 Lib
----------------------------------------
-- Bounded int types
boundedFunc :: forall a . (Integral a, Bounded a)
=> Func -> a -> Statement
boundedFunc f _sz = Statement $ B.unwords
[ B.pack f, ":", "INT", "->", "BOOLEAN"
, "=", "LAMBDA", "(x:INT)", ":"
, exp (toInt minBound) (toInt maxBound)
]
where
toInt a = fromIntegral (a :: a)
x = var "x"
exp l h = concrete $ (intLit l .<= x) .&& (x .<= intLit h)
word8, word16, word32, word64, int8, int16, int32, int64 :: Func
word8 = "word8"
word16 = "word16"
word32 = "word32"
word64 = "word64"
int8 = "int8"
int16 = "int16"
int32 = "int32"
int64 = "int64"
word8Bound :: Statement
word8Bound = boundedFunc word8 (0 :: Word8)
word16Bound :: Statement
word16Bound = boundedFunc word16 (0 :: Word16)
word32Bound :: Statement
word32Bound = boundedFunc word32 (0 :: Word32)
word64Bound :: Statement
word64Bound = boundedFunc word64 (0 :: Word64)
int8Bound :: Statement
int8Bound = boundedFunc int8 (0 :: Int8)
int16Bound :: Statement
int16Bound = boundedFunc int16 (0 :: Int16)
int32Bound :: Statement
int32Bound = boundedFunc int32 (0 :: Int32)
int64Bound :: Statement
int64Bound = boundedFunc int64 (0 :: Int64)
----------------------------------------
-- Mod
modAbs :: Func
modAbs = "mod"
-- | Abstraction: a % b (C semantics) implies
--
-- ( ((a >= 0) && (a % b >= 0) && (a % b < b) && (a % b <= a))
-- || ((a < 0) && (a % b <= 0) && (a % b > b) && (a % b >= a)))
--
-- a % b is abstracted with a fresh var v.
modFunc :: Statement
modFunc = Statement $ B.unwords
[ B.pack modAbs, ":", "(INT, INT)", "->", "INT" ]
modExp :: Expr -> Expr -> Expr -> Expr
modExp v a b
= ((a .>= z) .&& (v .>= z) .&& (v .< b) .&& (v .<= a))
.|| ((a .< z) .&& (v .<= z) .&& (v .> b) .&& (v .>= a))
where
z = intLit 0
----------------------------------------
-- Mul
mulAbs :: Func
mulAbs = "mul"
mulFunc :: Statement
mulFunc = Statement $ B.unwords
[ B.pack mulAbs, ":", "(INT, INT)", "->", "INT" ]
mulExp :: Expr -> Expr -> Expr -> Expr
mulExp v a b
= (((a .== z) .|| (b .== z)) .=> (v .== z))
.&& ((a .== o) .=> (v .== b))
.&& ((b .== o) .=> (v .== a))
.&& (((a .> o) .&& (b .> o)) .=> ((v .> a) .&& (v .> b)))
where
z = intLit 0
o = intLit 1
----------------------------------------
-- Div
divAbs :: Func
divAbs = "div"
divFunc :: Statement
divFunc = Statement $ B.unwords
[ B.pack divAbs, ":", "(INT, INT)", "->", "INT" ]
divExp :: Expr -> Expr -> Expr -> Expr
divExp v a b
= ((b .== o) .=> (v .== a))
.&& ((a .== z) .=> (v .== z))
.&& (((a .>= o) .&& (b .> o)) .=> ((v .>= z) .&& (v .< a)))
where
z = intLit 0
o = intLit 1
cvc4Lib :: [Statement]
cvc4Lib =
[ word8Bound, word16Bound, word32Bound, word64Bound
, int8Bound, int16Bound, int32Bound, int64Bound
, modFunc, mulFunc, divFunc
]
--------------------------------------------------------------------------------
-- Testing
foo :: Statement
foo = assert . noLoc $ (intLit 3 .== var "x") .&& (var "x" .< intLit 4)
| GaloisInc/ivory | ivory-model-check/src/Ivory/ModelCheck/CVC4.hs | bsd-3-clause | 11,972 | 0 | 17 | 3,375 | 4,419 | 2,402 | 2,017 | 301 | 21 |
{-# LANGUAGE Safe #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module
Control.Arrow.Machine.Misc.Discrete
(
-- * Discrete type
-- $type
T(),
updates,
value,
arr,
arr2,
arr3,
arr4,
arr5,
constant,
unsafeConstant,
hold,
accum,
fromEq,
edge,
asUpdater,
kSwitch,
dkSwitch,
-- * Discrete algebra
-- $alg
Alg(Alg),
eval,
refer
)
where
import Prelude hiding (id, (.))
import Control.Category
import Control.Arrow hiding (arr)
import Control.Applicative
import qualified Control.Arrow as Arr
import qualified Control.Arrow.Machine as P
import Data.Monoid (mconcat, mappend)
{-$type
This module should be imported manually. Qualified import is recommended.
This module provides an abstraction that continuous values with
finite number of changing points.
>>> import qualified Control.Arrow.Machine.Misc.Discrete as D
>>> P.run (D.hold "apple" >>> D.arr reverse >>> D.edge) ["orange", "grape"]
["elppa","egnaro","eparg"]
In above example, input data of "reverse" is continuous.
But the "D.edge" transducer extracts changing points without calling string comparison.
This is possible because the intermediate type `T` has the information of changes
together with the value information.
-}
-- |The discrete signal type.
data T a = T {
updates :: (P.Event ()),
value :: a
}
makeT ::
Monad m =>
P.ProcessT m (P.Event (), b) (T b)
makeT = Arr.arr $ uncurry T
stimulate ::
Monad m =>
P.ProcessT m b (T c) ->
P.ProcessT m b (T c)
stimulate sf = P.dgSwitch (id &&& id) sf body $ \sf' _ -> sf'
where
body = proc (dy, _) ->
do
n <- P.now -< ()
disc <- makeT -< (updates dy `mappend` n, value dy)
returnA -< (disc, updates disc)
arr ::
Monad m =>
(b->c) ->
P.ProcessT m (T b) (T c)
arr f =
Arr.arr $ \(T ev x) ->
T ev (f x)
arr2 ::
Monad m =>
(b1->b2->c) ->
P.ProcessT m (T b1, T b2) (T c)
arr2 f =
Arr.arr $ \(T ev1 x1, T ev2 x2) ->
T (mconcat [ev1, ev2]) (f x1 x2)
arr3 ::
Monad m =>
(b1->b2->b3->c) ->
P.ProcessT m (T b1, T b2, T b3) (T c)
arr3 f =
Arr.arr $ \(T ev1 x1, T ev2 x2, T ev3 x3) ->
T (mconcat [ev1, ev2, ev3]) (f x1 x2 x3)
arr4 ::
Monad m =>
(b1->b2->b3->b4->c) ->
P.ProcessT m (T b1, T b2, T b3, T b4) (T c)
arr4 f =
Arr.arr $ \(T ev1 x1, T ev2 x2, T ev3 x3, T ev4 x4) ->
T (mconcat [ev1, ev2, ev3, ev4]) (f x1 x2 x3 x4)
arr5 ::
Monad m =>
(b1->b2->b3->b4->b5->c) ->
P.ProcessT m (T b1, T b2, T b3, T b4, T b5) (T c)
arr5 f =
Arr.arr $ \(T ev1 x1, T ev2 x2, T ev3 x3, T ev4 x4, T ev5 x5) ->
T (mconcat [ev1, ev2, ev3, ev4, ev5]) (f x1 x2 x3 x4 x5)
constant::
Monad m =>
c ->
P.ProcessT m b (T c)
constant x =
(P.now &&& Arr.arr (const x)) >>> makeT
-- |Constant without initial notifications.
-- Users must manage initialization manually.
unsafeConstant::
Monad m =>
c ->
P.ProcessT m b (T c)
unsafeConstant x =
(pure P.noEvent &&& Arr.arr (const x)) >>> makeT
onUpdate ::
Monad m =>
P.ProcessT m (P.Event b) (P.Event ())
onUpdate = proc ev ->
do
n <- P.now -< ()
returnA -< n `mappend` P.collapse ev
hold ::
Monad m =>
b ->
P.ProcessT m (P.Event b) (T b)
hold i =
(onUpdate &&& P.hold i) >>> makeT
accum ::
Monad m =>
b ->
P.ProcessT m (P.Event (b->b)) (T b)
accum i =
(onUpdate &&& P.accum i) >>> makeT
fromEq ::
(Monad m, Eq b) =>
P.ProcessT m b (T b)
fromEq = proc x ->
do
ev <- P.edge -< x
returnA -< T (P.collapse ev) x
edge ::
Monad m =>
P.ProcessT m (T b) (P.Event b)
edge = Arr.arr $ \(T ev x) -> x <$ ev
asUpdater ::
Monad m =>
(b -> m c) ->
P.ProcessT m (T b) (P.Event c)
asUpdater fmx = edge >>> P.fire fmx
kSwitch ::
Monad m =>
P.ProcessT m b (T c) ->
P.ProcessT m (b, T c) (P.Event t) ->
(P.ProcessT m b (T c) -> t -> P.ProcessT m b (T c)) ->
P.ProcessT m b (T c)
kSwitch sf test k = P.kSwitch sf test (\sf' x -> stimulate (k sf' x))
dkSwitch ::
Monad m =>
P.ProcessT m b (T c) ->
P.ProcessT m (b, T c) (P.Event t) ->
(P.ProcessT m b (T c) -> t -> P.ProcessT m b (T c)) ->
P.ProcessT m b (T c)
dkSwitch sf test k = P.dkSwitch sf test (\sf' x -> stimulate (k sf' x))
{-$alg
Calculations between discrete types.
An example is below.
@
holdAdd ::
(Monad m, Num b) =>
ProcessT m (Event b, Event b) (Discrete b)
holdAdd = proc (evx, evy) ->
do
x <- D.hold 0 -< evx
y <- D.hold 0 -< evy
D.eval (refer fst + refer snd) -< (x, y)
@
The last line is equivalent to "arr2 (+) -< (x, y)".
Using Alg, you can construct more complex calculations
between discrete signals.
-}
-- |Discrete algebra type.
newtype Alg m i o =
Alg { eval :: P.ProcessT m i (T o) }
refer ::
Monad m =>
(e -> T b) -> Alg m e b
refer = Alg . Arr.arr
instance
Monad m => Functor (Alg m i)
where
fmap f alg = Alg $ eval alg >>> arr f
instance
Monad m => Applicative (Alg m i)
where
pure = Alg . constant
af <*> aa = Alg $ (eval af &&& eval aa) >>> arr2 ($)
instance
(Monad m, Num o) =>
Num (Alg m i o)
where
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
(+) = liftA2 (+)
(-) = liftA2 (-)
(*) = liftA2 (*)
| as-capabl/machinecell | src/Control/Arrow/Machine/Misc/Discrete.hs | bsd-3-clause | 5,610 | 3 | 14 | 1,667 | 2,248 | 1,169 | 1,079 | 176 | 1 |
module Generics.GPAH.Date.Base where
import Control.DeepSeq
import Data.Monoid
import qualified Data.Map as M
data Analysis = Analysis {
yearUpdatesToSyb :: M.Map Int Int
, yearUpdatesToUpl :: M.Map Int Int
, yearNewToSyb :: M.Map Int Int
, yearNewToUpl :: M.Map Int Int
, hackageTime :: String
, yearUpdatesToDerive :: M.Map Int Int
, yearNewToDerive :: M.Map Int Int
}
deriving (Show)
instance Monoid Analysis where
mempty = Analysis mempty mempty mempty mempty mempty mempty mempty
(Analysis x1 x2 x3 x4 x5 x6 x7) `mappend` (Analysis y1 y2 y3 y4 y5 y6 y7) = Analysis
(M.unionWith (+) x1 y1)
(M.unionWith (+) x2 y2)
(M.unionWith (+) x3 y3)
(M.unionWith (+) x4 y4)
(x5 ++ y5)
(M.unionWith (+) x6 y6)
(M.unionWith (+) x7 y7)
instance NFData Analysis where
rnf (Analysis a1 a2 a3 a4 a5 a6 a7) = a1
`deepseq` a2
`deepseq` a3
`deepseq` a4
`deepseq` a5
`deepseq` a6
`deepseq` a7
`deepseq` ()
| bezirg/gpah | src/Generics/GPAH/Date/Base.hs | bsd-3-clause | 1,716 | 0 | 12 | 976 | 387 | 217 | 170 | 32 | 0 |
module Input
( AppInput
, parseWinInput
, mousePos
, lbp
, lbpPos
, lbDown
, rbp
, rbpPos
, rbDown
, quitEvent
) where
import Data.Maybe
import FRP.Yampa
import Linear (V2(..))
import Linear.Affine (Point(..))
import qualified SDL
import Types
-- <| Signal Functions |> --
-- | Current mouse position
mousePos :: SF AppInput (Double,Double)
mousePos = arr inpMousePos
-- | Events that indicate left button click
lbp :: SF AppInput (Event ())
lbp = lbpPos >>^ tagWith ()
-- | Events that indicate left button click and are tagged with mouse position
lbpPos :: SF AppInput (Event (Double,Double))
lbpPos = inpMouseLeft ^>> edgeJust
-- | Is left button down
lbDown :: SF AppInput Bool
lbDown = arr (isJust . inpMouseLeft)
-- | Events that indicate right button click
rbp :: SF AppInput (Event ())
rbp = rbpPos >>^ tagWith ()
-- | Events that indicate right button click and are tagged with mouse position
rbpPos :: SF AppInput (Event (Double,Double))
rbpPos = inpMouseRight ^>> edgeJust
-- | Is right button down
rbDown :: SF AppInput Bool
rbDown = arr (isJust . inpMouseRight)
quitEvent :: SF AppInput (Event ())
quitEvent = arr inpQuit >>> edge
-- | Exported as abstract type. Fields are accessed with signal functions.
data AppInput = AppInput
{ inpMousePos :: (Double, Double) -- ^ Current mouse position
, inpMouseLeft :: Maybe (Double, Double) -- ^ Left button currently down
, inpMouseRight :: Maybe (Double, Double) -- ^ Right button currently down
, inpQuit :: Bool -- ^ SDL's QuitEvent
}
initAppInput :: AppInput
initAppInput = AppInput { inpMousePos = (0, 0)
, inpMouseLeft = Nothing
, inpMouseRight = Nothing
, inpQuit = False
}
-- | Filter and transform SDL events into events which are relevant to our
-- application
parseWinInput :: SF WinInput AppInput
parseWinInput = accumHoldBy nextAppInput initAppInput
-- | Compute next input
-- FIXME: I am reinventing lenses once again
nextAppInput :: AppInput -> SDL.EventPayload -> AppInput
nextAppInput inp SDL.QuitEvent = inp { inpQuit = True }
nextAppInput inp (SDL.MouseMotionEvent { SDL.mouseMotionEventPos = P (V2 x y) }) =
inp { inpMousePos = (fromIntegral x, fromIntegral y) }
nextAppInput inp ev@(SDL.MouseButtonEvent{}) = inp { inpMouseLeft = lmb
, inpMouseRight = rmb }
where motion = SDL.mouseButtonEventMotion ev
button = SDL.mouseButtonEventButton ev
pos = inpMousePos inp
inpMod = case (motion,button) of
(SDL.MouseButtonUp, SDL.ButtonLeft) -> first (const Nothing)
(SDL.MouseButtonDown, SDL.ButtonLeft) -> first (const (Just pos))
(SDL.MouseButtonUp, SDL.ButtonRight) -> second (const Nothing)
(SDL.MouseButtonDown, SDL.ButtonRight) -> second (const (Just pos))
_ -> id
(lmb,rmb) = inpMod $ (inpMouseLeft &&& inpMouseRight) inp
nextAppInput inp _ = inp
| pyrtsa/yampa-demos-template | src/Input.hs | bsd-3-clause | 3,210 | 0 | 14 | 915 | 772 | 435 | 337 | 62 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Yahoofinance
( getHistoricalData
, HistoricalQuote (..)
, QuoteMap
, buildHistoricalDataQuery
, QuoteSymbol
) where
import Control.Applicative
import Data.Aeson
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Lazy.Char8 as BS
import qualified Data.Function as Func (on)
import Data.List (groupBy, intercalate, sort)
import Data.List.Split
import qualified Data.Map as Map
import Data.Time
import Data.Time
import Data.Time.Calendar (Day (..), fromGregorian)
import Network.HTTP.Conduit (simpleHttp)
import qualified Network.URI.Encode as Enc
import System.Locale (defaultTimeLocale)
testFrom :: Day
testFrom = fromGregorian 2015 10 15
testTo :: Day
testTo = fromGregorian 2015 10 16
testQuotes :: [QuoteSymbol]
testQuotes = ["AIR.DE"]
today = fmap utctDay getCurrentTime
yesterday = addDays (-1) <$> today
type QuoteSymbol = String
type QuoteMap = Map.Map QuoteSymbol [HistoricalQuote]
newtype QuoteList = QuoteList [HistoricalQuote] deriving (Show)
data HistoricalQuote = HistoricalQuote {
symbol :: QuoteSymbol,
date :: String,
open :: Float,
high :: Float,
low :: Float,
close :: Float
-- adjclose :: QuoteCurrency,
-- volume :: Int
} deriving (Show)
instance Eq HistoricalQuote where
x == y = symbol x == symbol y && date x == date y
instance Ord HistoricalQuote where
HistoricalQuote {date = d} <= HistoricalQuote {date = d2} = d <= d2
instance FromJSON QuoteList where
parseJSON (Object v) =
QuoteList <$> (res >>= (.: "quote"))
where res = (v .: "query") >>= (.: "results")
toFloat :: String -> Float
toFloat = read
instance FromJSON HistoricalQuote where
parseJSON (Object v) = HistoricalQuote <$>
(v .: "Symbol") <*>
(v .: "Date") <*>
fmap toFloat (v .: "Open") <*>
fmap toFloat (v .: "High") <*>
fmap toFloat (v .: "Low") <*>
fmap toFloat (v .: "Close")
-- where res = (v .: "query") >>= (.: "results") >>= (.: "quote")
-- parseJSON _ = mzero
baseUrl :: String
baseUrl = "http://query.yahooapis.com/v1/public/yql"
getHistoricalData :: [QuoteSymbol] -> Day -> Day -> IO QuoteMap
getHistoricalData symbols from to
| len > 500 = Map.unions <$> mapM (\s -> getHistoricalData s from to) (chunk 500 symbols)
| otherwise = do
content <- getJSON $ buildHistoricalDataQuery from to symbols
let parsed = decode content :: Maybe QuoteList
return $ transformHistoricalData parsed
where len = length symbols
transformHistoricalData :: Maybe QuoteList -> QuoteMap
transformHistoricalData (Just (QuoteList q)) =
Map.fromList $ map (\a -> (symbol (head a), sort a)) groupedBySymbol
where
groupedBySymbol = groupBy (Func.on (==) symbol) q
transformHistoricalData Nothing = Map.empty
buildHistoricalDataQuery :: Day -> Day -> [QuoteSymbol] -> String
buildHistoricalDataQuery from to symbols =
let
dbFormat = formatTime defaultTimeLocale "%F"
symbolsFormatted = "'" ++ intercalate "','" symbols ++ "'"
query = "select * from yahoo.finance.historicaldata \
\where symbol in (" ++ symbolsFormatted ++ ") and \
\startDate = '" ++ dbFormat from ++ "' and \
\endDate = '" ++ dbFormat to ++ "'"
env = "&env=store%3A%2F%2Fdatatables.org%2Falltableswithkeys"
in
baseUrl ++ "?q=" ++ Enc.encode query ++ "&format=json" ++ env
getJSON :: String -> IO B.ByteString
getJSON = simpleHttp
testJson = BS.pack "{\"query\":{\"results\":{\"quote\":[{\"Symbol\":\"YHOO\",\"Date\":\"2015-04-24\",\"Open\":\"43.73\",\"High\":\"44.71\",\"Low\":\"43.69\",\"Close\":\"44.52\",\"Volume\":\"11267500\",\"Adj_Close\":\"44.52\"}]}}}"
testmain :: IO ()
testmain = do
content <- getJSON $ buildHistoricalDataQuery testFrom testTo testQuotes
let historicalData = eitherDecode content :: Either String [HistoricalQuote]
case historicalData of
Left err -> putStrLn err
Right ps -> print ps
parseJson :: IO ()
parseJson = do
let historicalData = eitherDecode testJson :: Either String QuoteList
print historicalData
| vimster/stocker | src/Yahoofinance.hs | bsd-3-clause | 4,259 | 0 | 14 | 946 | 1,135 | 609 | 526 | 98 | 2 |
{-# LANGUAGE TupleSections #-}
module GenoEquiv (uniformEquivMat,allEquivMats,uniformList,linearCombMats) where
import Common
import Prelude hiding (replicate, map, foldl1, zipWith, map, concat, reverse, length, head, map, concatMap, (++), zipWith)
import Data.List.Stream as DL
import Numeric.Container as NC
import Data.Function (on)
import Control.Monad (ap)
uniformList :: Int -> [Double]
uniformList n = replicate n (1 / fromIntegral n)
uniformEquivMat n = linearCombMats (allEquivMats n) (uniformList n)
allEquivMats n = let states = allStates n
in map (permMat states) (allBoolLists (div n 2))
linearCombMats :: [Matrix Double] -> [Double] -> Matrix Double
linearCombMats mats coefs = foldl1 add $ zipWith scale coefs mats
permMat states bs = fromRows $ map (destIndicator states bs) states
standardBasisVec :: Int -> Int -> Vector Double
standardBasisVec n i = let coord = [(i,1.0)]
in assoc n 0 coord
--destIndicator :: [IbdState] -> [Bool] -> IbdState -> NC.Vector Double
destIndicator states bs xs = let flipped = reCode ( concat ( flipPairs bs (pairOff xs) reverse))
coord = (,1.0)
n = length states
in head $ map (standardBasisVec n) $ DL.elemIndices flipped states
allBoolLists 0 = [[]]
allBoolLists n = concatMap extend (allBoolLists (n-1))
where extend xs = map((xs++).(:[])) [True,False]
flipPairs :: [Bool] -> [a] -> (a -> a) -> [a]
flipPairs bs xs f = if (length bs `mod` 2) == 0
then zipWith g bs xs
else error "Not an even list"
where g b x = if b then f x else x
| cglazner/ibd_stitch | src/GenoEquiv.hs | bsd-3-clause | 1,735 | 1 | 15 | 487 | 609 | 328 | 281 | 31 | 3 |
{-# language FlexibleInstances, ScopedTypeVariables #-}
module LibSpec where
import qualified Data.Vector as V
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import Lib
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Numerical functions" $ do
-- it "works" $
-- True `shouldBe` True
it "Inner product of two orthogonal vectors (R^3) is ~ 0" $
nearZero (v2a <.> v2b) `shouldBe` True
it "Inner product of two orthogonal vectors (R^67, after centering) is ~ 0" $
nearZero (centerData v3a <.> centerData v3b) `shouldBe` True
it "Mean of an odd-symmetry sequence (R^23) is ~ 0" $
nearZero (meanV v1) `shouldBe` True
prop "Pearson R of a random vector (R^100) with itself is ~ 1" $
\(x :: V.Vector Double) -> nearOne $ pearsonR x x
describe "Constants" $ do
it "Number of days == 183" $ nDaysTot `shouldBe` 183
it " '', first period == 91" $ obsLen `shouldBe` 91
it " '', second period == 92" $ obsLen2 `shouldBe` 92
-- | Arbitrary instances for QuickCheck
instance Arbitrary (V.Vector Double) where
arbitrary = (V.fromList <$> vector 100) `suchThat` (nonZero . V.sum)
-- | Test data
v1, v2a, v2b, v3a, v3b :: V.Vector Double
v1 = V.fromList [-11 .. 11]
v2a = V.fromList [1, 0, 0]
v2b = V.fromList [0, 1, 0]
v3a = V.fromList [0..66]
v3b = V.fromList [66..0]
| ocramz/test-ru-stream-regression | test/LibSpec.hs | bsd-3-clause | 1,380 | 0 | 15 | 311 | 403 | 217 | 186 | 32 | 1 |
{-# LANGUAGE ConstraintKinds, CPP, DeriveDataTypeable, FlexibleContexts, OverloadedStrings #-}
module Facebook.Graph
( getObject
, getObjectRec
, postObject
, postForm
, postFormVideo
, deleteForm
, deleteObject
, searchObjects
, (#=)
, SimpleType(..)
, Place(..)
, Location(..)
, GeoCoordinates(..)
, Tag(..)
) where
import Control.Monad.IO.Class
import Control.Applicative
import qualified Control.Exception.Lifted as E
import Control.Monad (mzero)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.ByteString.Char8 (ByteString)
import Data.Int (Int8, Int16, Int32, Int64)
import Data.List (intersperse)
import Data.Text (Text)
import Data.Typeable (Typeable)
import Data.Word (Word, Word8, Word16, Word32, Word64)
#if MIN_VERSION_time(1,5,0)
import Data.Time (defaultTimeLocale)
#else
import System.Locale (defaultTimeLocale)
#endif
import qualified Control.Monad.Trans.Resource as R
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as A
import qualified Data.Aeson.Encode as AE (fromValue)
import qualified Data.ByteString.Char8 as B
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Builder as TLB
import qualified Data.Time as TI
import qualified Network.HTTP.Conduit as H
import qualified Network.HTTP.Types as HT
import Network.HTTP.Client.MultipartFormData
import Facebook.Auth
import Facebook.Base
import Facebook.Monad
import Facebook.Types
import Facebook.Pager
import Facebook.Records
-- import Debug.Trace
trace a b = b
-- | Make a raw @GET@ request to Facebook's Graph API.
getObjectRec :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON rec, ToBS rec) =>
Text -- ^ Path (should begin with a slash @\/@)
-> [(ByteString, rec)] -- ^ Arguments to be passed to Facebook
-> Maybe (AccessToken anyKind) -- ^ Optional access token
-> FacebookT anyAuth m rec
getObjectRec path query mtoken =
let query' = map (\(bs, rec) -> (bs, toBS rec)) query
in trace (concat $ map show query') $ getObject path query' mtoken
-- | Make a raw @GET@ request to Facebook's Graph API.
getObject :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
Text -- ^ Path (should begin with a slash @\/@)
-> [Argument] -- ^ Arguments to be passed to Facebook
-> Maybe (AccessToken anyKind) -- ^ Optional access token
-> FacebookT anyAuth m a
getObject path query mtoken =
runResourceInFb $
asJson =<< fbhttp =<< ((trace $ show path) $ fbreq path mtoken query)
-- | Make a raw @POST@ request to Facebook's Graph API.
postForm :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
Text -- ^ Path (should begin with a slash @\/@)
-> [Part] -- ^ Arguments to be passed to Facebook
-> AccessToken anyKind -- ^ Access token
-> FacebookT Auth m (Either FacebookException a)
postForm a b c = do
#if DEBUG
liftIO $ print "postForm"
liftIO $ print (a,b)
#endif
methodForm HT.methodPost a b c
postFormVideo :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
Text -- ^ Path (should begin with a slash @\/@)
-> [Part] -- ^ Arguments to be passed to Facebook
-> AccessToken anyKind -- ^ Access token
-> FacebookT Auth m (Either FacebookException a)
postFormVideo = methodFormVideo HT.methodPost
-- | Make a raw @DELETE@ request to Facebook's Graph API.
deleteForm :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
Text -- ^ Path (should begin with a slash @\/@)
-> [Part] -- ^ Arguments to be passed to Facebook
-> AccessToken anyKind -- ^ Access token
-> FacebookT Auth m (Either FacebookException a)
deleteForm = methodForm HT.methodDelete
instance A.FromJSON a => A.FromJSON (Either FacebookException a) where
parseJSON json@(A.Object v) = do
val <- v A..:? "error" :: A.Parser (Maybe A.Value)
case val of
Nothing -> do
rec <- A.parseJSON json
pure $ Right rec
Just _ -> do
rec <- A.parseJSON json
pure $ Left rec
-- | Make a raw @POST@ request to Facebook's Graph API.
methodForm :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
HT.Method
-> Text -- ^ Path (should begin with a slash @\/@)
-> [Part] -- ^ Arguments to be passed to Facebook
-> AccessToken anyKind -- ^ Access token
-> FacebookT Auth m (Either FacebookException a)
methodForm method path parts token = runResourceInFb $ do
req <- fbreq path (Just token) []
req' <- formDataBody parts req
asJson =<< fbhttp req' { H.method = method}
--req'' <- fbhttp req' { H.method = method}
--val <- E.try $ asJson req''
--case val :: Either E.SomeException a of
-- Right json -> return $ Right json
-- Left _ -> do
-- val' <- E.try $ asJson req''
-- case val' :: Either E.SomeException FacebookException of
-- Right fbe -> return $ Left fbe
-- Left err -> error $ show err -- FIXME
methodFormVideo :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
HT.Method
-> Text -- ^ Path (should begin with a slash @\/@)
-> [Part] -- ^ Arguments to be passed to Facebook
-> AccessToken anyKind -- ^ Access token
-> FacebookT Auth m (Either FacebookException a)
methodFormVideo method path parts token = runResourceInFb $ do
req <- fbreqVideo path (Just token) []
req' <- formDataBody parts req
asJson =<< fbhttp req' { H.method = method}
--req'' <- fbhttp req' { H.method = method}
--val <- E.try $ asJson req''
--case val :: Either E.SomeException a of
-- Right json -> return $ Right json
-- Left _ -> do
-- val' <- E.try $ asJson req''
-- case val' :: Either E.SomeException FacebookException of
-- Right fbe -> return $ Left fbe
-- Left err -> error $ show err -- FIXME
-- | Make a raw @POST@ request to Facebook's Graph API.
postObject :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
Text -- ^ Path (should begin with a slash @\/@)
-> [Argument] -- ^ Arguments to be passed to Facebook
-> AccessToken anyKind -- ^ Access token
-> FacebookT Auth m a
postObject = methodObject HT.methodPost
-- | Make a raw @DELETE@ request to Facebook's Graph API.
deleteObject :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
Text -- ^ Path (should begin with a slash @\/@)
-> [Argument] -- ^ Arguments to be passed to Facebook
-> AccessToken anyKind -- ^ Access token
-> FacebookT Auth m a
deleteObject = methodObject HT.methodDelete
-- | Helper function used by 'postObject' and 'deleteObject'.
methodObject :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a) =>
HT.Method
-> Text -- ^ Path (should begin with a slash @\/@)
-> [Argument] -- ^ Arguments to be passed to Facebook
-> AccessToken anyKind -- ^ Access token
-> FacebookT Auth m a
methodObject method path query token =
runResourceInFb $ do
req <- fbreq path (Just token) query
asJson =<< fbhttp req { H.method = method }
-- | Make a raw @GET@ request to the /search endpoint of Facebook’s
-- Graph API. Returns a raw JSON 'A.Value'.
searchObjects :: (R.MonadResource m, MonadBaseControl IO m, A.FromJSON a)
=> Text -- ^ A Facebook object type to search for
-> Text -- ^ The keyword to search for
-> [Argument] -- ^ Additional arguments to pass
-> Maybe UserAccessToken -- ^ Optional access token
-> FacebookT anyAuth m (Pager a)
searchObjects objectType keyword query = getObject "/search" query'
where query' = ("q" #= keyword) : ("type" #= objectType) : query
----------------------------------------------------------------------
-- | Create an 'Argument' with a 'SimpleType'. See the docs on
-- 'createAction' for an example.
(#=) :: SimpleType a => ByteString -> a -> Argument
p #= v = (p, encodeFbParam v)
-- | Class for data types that may be represented as a Facebook
-- simple type. (see
-- <https://developers.facebook.com/docs/opengraph/simpletypes/>).
class SimpleType a where
encodeFbParam :: a -> B.ByteString
-- | Facebook's simple type @Boolean@.
instance SimpleType Bool where
encodeFbParam b = if b then "1" else "0"
-- | Facebook's simple type @DateTime@ with only the date.
instance SimpleType TI.Day where
encodeFbParam = B.pack . TI.formatTime defaultTimeLocale "%Y-%m-%d"
-- | Facebook's simple type @DateTime@.
instance SimpleType TI.UTCTime where
encodeFbParam = B.pack . TI.formatTime defaultTimeLocale "%Y%m%dT%H%MZ"
-- | Facebook's simple type @DateTime@.
instance SimpleType TI.ZonedTime where
encodeFbParam = encodeFbParam . TI.zonedTimeToUTC
-- @Enum@ doesn't make sense to support as a Haskell data type.
-- | Facebook's simple type @Float@ with less precision than supported.
instance SimpleType Float where
encodeFbParam = showBS
-- | Facebook's simple type @Float@.
instance SimpleType Double where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Int where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Word where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Int8 where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Word8 where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Int16 where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Word16 where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Int32 where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Word32 where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Int64 where
encodeFbParam = showBS
-- | Facebook's simple type @Integer@.
instance SimpleType Word64 where
encodeFbParam = showBS
-- | Facebook's simple type @String@.
instance SimpleType Text where
encodeFbParam = TE.encodeUtf8
-- | Facebook's simple type @String@.
instance SimpleType ByteString where
encodeFbParam = id
-- | An object's 'Id' code.
instance SimpleType Id where
encodeFbParam = TE.encodeUtf8 . idCode
-- | 'Permission' is a @newtype@ of 'Text'
instance SimpleType Permission where
encodeFbParam = encodeFbParam . unPermission
-- | A comma-separated list of simple types. This definition
-- doesn't work everywhere, just for a few combinations that
-- Facebook uses (e.g. @[Int]@). Also, encoding a list of lists
-- is the same as encoding the concatenation of all lists. In
-- other words, this instance is here more for your convenience
-- than to make sure your code is correct.
instance SimpleType a => SimpleType [a] where
encodeFbParam = B.concat . intersperse "," . map encodeFbParam
showBS :: Show a => a -> B.ByteString
showBS = B.pack . show
----------------------------------------------------------------------
-- | Information about a place. This is not a Graph Object,
-- instead it's just a field of a Object. (Not to be confused
-- with the @Page@ object.)
data Place =
Place { placeId :: Id -- ^ @Page@ ID.
, placeName :: Maybe Text -- ^ @Page@ name.
, placeLocation :: Maybe Location
}
deriving (Eq, Ord, Show, Read, Typeable)
instance A.FromJSON Place where
parseJSON (A.Object v) =
Place <$> v A..: "id"
<*> v A..:? "name"
<*> v A..:? "location"
parseJSON _ = mzero
-- | A geographical location.
data Location =
Location { locationStreet :: Maybe Text
, locationCity :: Maybe Text
, locationState :: Maybe Text
, locationCountry :: Maybe Text
, locationZip :: Maybe Text
, locationCoords :: Maybe GeoCoordinates
}
deriving (Eq, Ord, Show, Read, Typeable)
instance A.FromJSON Location where
parseJSON obj@(A.Object v) =
Location <$> v A..:? "street"
<*> v A..:? "city"
<*> v A..:? "state"
<*> v A..:? "country"
<*> v A..:? "zip"
<*> A.parseJSON obj
parseJSON _ = mzero
-- | Geographical coordinates.
data GeoCoordinates =
GeoCoordinates { latitude :: !Double
, longitude :: !Double
}
deriving (Eq, Ord, Show, Read, Typeable)
instance A.FromJSON GeoCoordinates where
parseJSON (A.Object v) =
GeoCoordinates <$> v A..: "latitude"
<*> v A..: "longitude"
parseJSON _ = mzero
instance SimpleType GeoCoordinates where
encodeFbParam c =
let obj = A.object [ "latitude" A..= latitude c
, "longitude" A..= longitude c]
toBS = TE.encodeUtf8 . TL.toStrict . TLB.toLazyText . AE.fromValue
in toBS obj
-- | A tag (i.e. \"I'll /tag/ you on my post\").
data Tag =
Tag { tagId :: Id -- ^ Who is tagged.
, tagName :: Text -- ^ Name of the tagged person.
}
deriving (Eq, Ord, Show, Read, Typeable)
instance A.FromJSON Tag where
parseJSON (A.Object v) =
Tag <$> v A..: "id"
<*> v A..: "name"
parseJSON _ = mzero
| BeautifulDestinations/fb | src/Facebook/Graph.hs | bsd-3-clause | 13,935 | 2 | 16 | 3,676 | 2,833 | 1,554 | 1,279 | 246 | 1 |
module Utils where
import System.Directory (doesFileExist)
import System.Environment (getEnv)
import System.FilePath ((</>))
import System.IO.Error (tryIOError)
import qualified Paths_elm_server as This
-- |The absolute path to a data file
getDataFile :: FilePath -> IO FilePath
getDataFile name = do
path <- This.getDataFileName name
exists <- doesFileExist path
if exists
then return path
else do
environment <- tryIOError (getEnv "ELM_HOME")
case environment of
Right env -> return (env </> name)
Left _ ->
fail $ unlines
[ "Unable to find the ELM_HOME environment variable when searching"
, "for the " ++ name ++ " file."
, ""
, "If you installed Elm Platform with the Mac or Windows installer, it looks like"
, "ELM_HOME was not set automatically. Look up how to set environment variables"
, "on your platform and set ELM_HOME to the directory that contains Elm's static"
, "files:"
, ""
, " * On Mac it is /usr/local/share/elm"
, " * On Windows it is one of the following:"
, " C:/Program Files/Elm Platform/0.13/share"
, " C:/Program Files (x86)/Elm Platform/0.13/share"
, ""
, "If it seems like a more complex issue, please report it here:"
, " <https://github.com/elm-lang/elm-platform/issues>"
]
| alisheikh/elm-server | server/Utils.hs | bsd-3-clause | 1,467 | 0 | 17 | 451 | 221 | 122 | 99 | 33 | 3 |
module Idris.Unlit(unlit) where
import Idris.Core.TT
import Data.Char
unlit :: FilePath -> String -> TC String
unlit f s = do let s' = map ulLine (lines s)
check f 1 s'
return $ unlines (map snd s')
data LineType = Prog | Blank | Comm
ulLine :: String -> (LineType, String)
ulLine ('>':' ':xs) = (Prog, xs)
ulLine ('>':xs) = (Prog, xs)
ulLine xs | all isSpace xs = (Blank, "")
-- make sure it's not a doc comment
| otherwise = (Comm, '-':'-':' ':'>':xs)
check :: FilePath -> Int -> [(LineType, String)] -> TC ()
check f l (a:b:cs) = do chkAdj f l (fst a) (fst b)
check f (l+1) (b:cs)
check f l [x] = return ()
check f l [] = return ()
chkAdj :: FilePath -> Int -> LineType -> LineType -> TC ()
chkAdj f l Prog Comm = tfail $ At (FC f l 0) ProgramLineComment --TODO: Include column?
chkAdj f l Comm Prog = tfail $ At (FC f l 0) ProgramLineComment --TODO: Include column?
chkAdj f l _ _ = return ()
| ctford/Idris-Elba-dev | src/Idris/Unlit.hs | bsd-3-clause | 998 | 0 | 12 | 282 | 481 | 247 | 234 | 22 | 1 |
module Main where
import Suck
import Spew
import System.Environment
-- maximally naive approach to command line input
main :: IO ()
main = do
[input] <- getArgs
if input == "load-model"
then suckDefaultsToFile
else spew (read input) >>= putStrLn
| rudyardrichter/sokal | main.hs | mit | 272 | 0 | 11 | 65 | 69 | 38 | 31 | 10 | 2 |
module CO4.Prefixfree
(numeric, invNumeric, discriminates)
where
import qualified Control.Exception as Exception
import Satchmo.Core.Primitive (Primitive,isConstant)
import CO4.Util (bitWidth)
numeric :: Integral i => i -> [Bool] -> i
numeric n' xs' = go 0 n' xs'
where
go sum 1 _ = sum
go sum n xs = case xs of
(False : ys) -> go sum c ys
(True : ys) -> go (sum + c) f ys
[] -> error $ "Prefixfree.numeric: not enough flags"
where
c = ceiling $ fromIntegral n / 2
f = floor $ fromIntegral n / 2
invNumeric :: Integral i => i -> i -> [Bool]
invNumeric n i = Exception.assert (i < n)
$ go [] n i
where
go flags 1 0 = flags
go flags n i = if i < c
then go (flags ++ [False]) c i
else go (flags ++ [True]) f $ i - c
where
c = ceiling $ fromIntegral n / 2
f = floor $ fromIntegral n / 2
-- |@discriminates n xs@ checks whether @xs@ can
-- discriminate @n@ different states
discriminates :: (Primitive p, Integral i) => i -> [p] -> Bool
discriminates n xs = case all isConstant xs of
False -> length xs >= w
True -> length xs >= w - 1
where
w = bitWidth n
| apunktbau/co4 | src/CO4/Prefixfree.hs | gpl-3.0 | 1,251 | 0 | 13 | 419 | 474 | 248 | 226 | 28 | 4 |
module Ex3 where
m f (h:t) = f h : f t
| roberth/uu-helium | test/typeerrors/Edinburgh/Ex3.hs | gpl-3.0 | 40 | 0 | 7 | 13 | 31 | 16 | 15 | 2 | 1 |
-- |
-- Lens are immensely useful for working with state but I don't want
-- to pull in full Kmettoverse for small set of combinators.
--
-- Here we redefine all necessary combinators. Full compatibility with
-- lens is maintained.
{-# LANGUAGE RankNTypes #-}
module DNA.Lens where
import Control.Applicative
import Control.Monad.State
import Data.Functor.Identity
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
type Lens' s a = forall f. Functor f => (a -> f a) -> (s -> f s)
lens :: (s -> a) -> (a -> s -> s) -> Lens' s a
lens getf putf = \f s -> flip putf s <$> f (getf s)
-- Get value from object
(^.) :: s -> Lens' s a -> a
s ^. l = getConst $ l Const s
-- Put value into object
set :: Lens' s a -> a -> s -> s
set l a s = runIdentity $ l (\_ -> Identity a) s
over :: Lens' s a -> (a -> a) -> s -> s
over l f s = runIdentity $ l (Identity . f) s
(.=) :: MonadState s m => Lens' s a -> a -> m ()
l .= b = modify' $ set l b
(%=) :: MonadState s m => Lens' s a -> (a -> a) -> m ()
l %= b = modify' $ over l b
infix 4 .=, %=
use :: MonadState s m => Lens' s a -> m a
use l = do
s <- get
return $ s ^. l
at :: (Ord k) => k -> Lens' (Map k v) (Maybe v)
at k f m = f mv <&> \r -> case r of
Nothing -> maybe m (const (Map.delete k m)) mv
Just v' -> Map.insert k v' m
where mv = Map.lookup k m
{-# INLINE at #-}
(<&>) :: Functor f => f a -> (a -> b) -> f b
(<&>) = flip (<$>)
failure :: MonadIO m => String -> m a
failure msg = do
liftIO $ putStrLn $ "FAILED: " ++ msg
error msg
zoom :: Monad m => Lens' s a -> StateT a m b -> StateT s m b
zoom l action = do
s <- get
(b,a') <- lift $ runStateT action (s ^. l)
put $ set l a' s
return b
| SKA-ScienceDataProcessor/RC | MS3/lib/DNA/Lens.hs | apache-2.0 | 1,731 | 0 | 15 | 475 | 820 | 419 | 401 | 43 | 2 |
-- | Re-export the common parts of the server framework.
--
module Distribution.Server.Framework (
module Happstack.Server.Routing,
module Happstack.Server.Response,
module Happstack.Server.RqData,
module Happstack.Server.FileServe,
module Happstack.Server.Error,
module Happstack.Server.Monads,
module Happstack.Server.Types,
module Data.Acid,
module Distribution.Server.Framework.MemState,
module Distribution.Server.Framework.Cache,
module Distribution.Server.Framework.MemSize,
module Distribution.Server.Framework.Auth,
module Distribution.Server.Framework.Feature,
module Distribution.Server.Framework.ServerEnv,
module Distribution.Server.Framework.Resource,
module Distribution.Server.Framework.RequestContentTypes,
module Distribution.Server.Framework.ResponseContentTypes,
module Distribution.Server.Framework.CacheControl,
module Distribution.Server.Framework.Hook,
module Distribution.Server.Framework.Error,
module Distribution.Server.Framework.Logging,
module Distribution.Server.Framework.HappstackUtils,
module Data.Monoid,
module Control.Applicative,
module Control.Monad,
module Control.Monad.Trans,
module System.FilePath,
) where
import Happstack.Server.Routing
import Happstack.Server.Response
import Happstack.Server.RqData
import Happstack.Server.FileServe
import Happstack.Server.Error
import Happstack.Server.Monads
import Happstack.Server.Types
import Data.Acid
import Distribution.Server.Framework.MemState
import Distribution.Server.Framework.Cache
import Distribution.Server.Framework.MemSize
import Distribution.Server.Framework.Auth (PrivilegeCondition(..))
import Distribution.Server.Framework.Feature
import Distribution.Server.Framework.ServerEnv
import Distribution.Server.Framework.Resource
import Distribution.Server.Framework.RequestContentTypes
import Distribution.Server.Framework.ResponseContentTypes
import Distribution.Server.Framework.CacheControl
import Distribution.Server.Framework.Hook
import Distribution.Server.Framework.Error
import Distribution.Server.Framework.Logging
import Distribution.Server.Framework.HappstackUtils
import Data.Monoid (Monoid(..))
import Control.Applicative (Applicative(..), (<$>))
import Control.Monad
import Control.Monad.Trans (MonadIO, liftIO)
import System.FilePath ((</>), (<.>))
| haskell-infra/hackage-server | Distribution/Server/Framework.hs | bsd-3-clause | 2,380 | 0 | 6 | 250 | 433 | 306 | 127 | 55 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
import Prelude ()
import Prelude.Compat
import Data.Text (Text, pack)
import Data.Text.IO as T (putStrLn)
import qualified GitHub.Endpoints.Users.Followers as GitHub
main :: IO ()
main = do
possibleUsers <- GitHub.usersFollowing "mike-burns"
T.putStrLn $ either (("Error: " <>) . pack . show)
(foldMap ((<> "\n") . formatUser))
possibleUsers
formatUser :: GitHub.SimpleUser -> Text
formatUser = GitHub.untagName . GitHub.simpleUserLogin
| jwiegley/github | samples/Users/Followers/Example.hs | bsd-3-clause | 576 | 1 | 13 | 133 | 152 | 86 | 66 | 15 | 1 |
module CodeGen.Header(generateHeader) where
import Control.Arrow ((&&&))
import Data.List (partition)
import CodeGen.Typeclasses
import CodeGen.CCodeNames
import CodeGen.Function
import CodeGen.Type ()
import CCode.Main
import CCode.PrettyCCode ()
import qualified AST.AST as A
import qualified Identifiers as ID
import qualified Types as Ty
-- | Generates the C header file for the translated program
-- | This function generates all the common code, generateHeaderRecurser generates class specific code
generateHeader :: A.Program -> CCode FIN
generateHeader p =
Program $
IfNDefine "HEADER_H" $
Concat $
HashDefine "HEADER_H" :
HashDefine "_XOPEN_SOURCE 800" :
(Includes [
"pthread.h", -- Needed because of the use of locks in future code, remove if we choose to remove lock-based futures
"pony.h",
"pool.h",
"stdlib.h",
"closure.h",
"stream.h",
"array.h",
"tuple.h",
"range.h",
"future.h",
"task.h",
"option.h",
"party.h",
"string.h",
"stdio.h",
"stdarg.h",
"dtrace_enabled.h",
"dtrace_encore.h"
]) :
HashDefine "UNIT ((void*) -1)" :
[commentSection "Shared messages"] ++
sharedMessages ++
[commentSection "Embedded code"] ++
map Embed embedded ++
[commentSection "Class type decls"] ++
classTypeDecls ++
[commentSection "Trait type decls"] ++
traitTypeDecls ++
[commentSection "Passive class types"] ++
passiveTypes ++
[commentSection "Runtime types"] ++
runtimeTypeDecls ++
[commentSection "Message IDs"] ++
[messageEnums] ++
[commentSection "Message types"] ++
ponyMsgTTypedefs ++
ponyMsgTImpls ++
[commentSection "Global functions"] ++
globalFunctions ++
[commentSection "Class IDs"] ++
[classEnums] ++
[commentSection "Trace functions"] ++
traceFnDecls ++
[commentSection "Runtime type init functions"] ++
runtimeTypeFnDecls ++
[commentSection "Methods"] ++
concatMap methodFwds classes ++
concatMap wrapperMethods classes ++
[commentSection "Constructors"] ++
concatMap constructors classes ++
[commentSection "Main actor rtti"] ++
[externMainRtti] ++
[commentSection "Trait types"] ++
[traitMethodEnums] ++
traitTypes
where
externMainRtti = DeclTL (Typ "extern pony_type_t", Var "_enc__active_Main_type")
sharedMessages =
[DeclTL (ponyMsgT, Var "m_MSG_alloc"),
DeclTL (ponyMsgT, Var "m_resume_get"),
DeclTL (ponyMsgT, Var "m_resume_suspend"),
DeclTL (ponyMsgT, Var "m_resume_await"),
DeclTL (ponyMsgT, Var "m_run_closure")
]
traits = A.traits p
classes = A.classes p
functions = A.functions p
embedded = A.allEmbedded p
ponyMsgTTypedefs :: [CCode Toplevel]
ponyMsgTTypedefs = map ponyMsgTTypedefClass classes
where
ponyMsgTTypedefClass A.Class{A.cname, A.cmethods} =
Concat $ concatMap ponyMsgTTypedef cmethods
where
ponyMsgTTypedef mdecl =
[Typedef (Struct $ futMsgTypeName cname (A.methodName mdecl)) (futMsgTypeName cname (A.methodName mdecl)),
Typedef (Struct $ oneWayMsgTypeName cname (A.methodName mdecl)) (oneWayMsgTypeName cname (A.methodName mdecl))]
ponyMsgTImpls :: [CCode Toplevel]
ponyMsgTImpls = map ponyMsgTImplsClass classes
where
ponyMsgTImplsClass A.Class{A.cname, A.cmethods} =
Concat $ map ponyMsgTImpl cmethods
where
ponyMsgTImpl :: A.MethodDecl -> CCode Toplevel
ponyMsgTImpl mdecl =
let argrttys = map (translate . A.getType) (A.methodParams mdecl)
argspecs = zip argrttys (argnamesWComments mdecl):: [CVarSpec]
argspecsWithTypeParams = argspecs ++ argMethodTypeParamsSpecs mdecl
encoreMsgTSpec = (encMsgT, Var "")
encoreMsgTSpecOneway = (encOnewayMsgT, Var "msg")
in Concat
[StructDecl (AsType $ futMsgTypeName cname (A.methodName mdecl))
(encoreMsgTSpec :
argspecsWithTypeParams)
,StructDecl (AsType $ oneWayMsgTypeName cname (A.methodName mdecl))
(encoreMsgTSpecOneway :
argspecsWithTypeParams)]
argnamesWComments mdecl =
zipWith (\n name -> (Annotated (show name) (Var ("f"++show n))))
([1..]:: [Int])
(map A.pname $ A.methodParams mdecl)
argMethodTypeParamsSpecs mdecl =
zip (argMethodTypeParamsTypes mdecl)
(argMethodTypeParamsWComments mdecl)
argMethodTypeParamsTypes mdecl =
let l = length (A.methodTypeParams mdecl)
in replicate l (Ptr ponyTypeT)
argMethodTypeParamsWComments mdecl =
map (\name -> (Annotated (show name) (Var (show name))))
$ map typeVarRefName (A.methodTypeParams mdecl)
globalFunctions =
[globalFunctionDecl f | f <- functions] ++
[functionWrapperDecl f | f <- functions] ++
[globalFunctionClosureDecl f | f <- functions]
messageEnums =
let
meta = concatMap (\cdecl -> zip (repeat $ A.cname cdecl) (map A.methodName (A.cmethods cdecl))) classes
methodMsgNames = map (show . (uncurry futMsgId)) meta
oneWayMsgNames = map (show . (uncurry oneWayMsgId)) meta
in
Enum $ (Nam "_MSG_DUMMY__ = 1024") : map Nam (methodMsgNames ++ oneWayMsgNames)
classEnums =
let
classIds = map (refTypeId . A.getType) classes
traitIds = map (refTypeId . A.getType) traits
in
Enum $ (Nam "__ID_DUMMY__ = 1024") : classIds ++ traitIds
traceFnDecls = map traceFnDecl classes
where
traceFnDecl A.Class{A.cname} =
FunctionDecl void (classTraceFnName cname) [Ptr encoreCtxT,Ptr void]
runtimeTypeFnDecls = map runtimeTypeFnDecl classes
where
runtimeTypeFnDecl A.Class{A.cname} =
FunctionDecl void (runtimeTypeInitFnName cname) [Ptr . AsType $ classTypeName cname, Embed "..."]
classTypeDecls = map classTypeDecl classes
where
classTypeDecl A.Class{A.cname} =
Typedef (Struct $ classTypeName cname) (classTypeName cname)
passiveTypes = map passiveType $ filter (A.isPassive) classes
where
passiveType A.Class{A.cname, A.cfields} =
let typeParams = Ty.getTypeParameters cname in
StructDecl (AsType $ classTypeName cname)
((Ptr ponyTypeT, AsLval $ selfTypeField) :
map (\ty -> (Ptr ponyTypeT, AsLval $ typeVarRefName ty)) typeParams ++
zip
(map (translate . A.ftype) cfields)
(map (AsLval . fieldName . A.fname) cfields))
traitMethodEnums =
let
dicts = map (A.getType &&& A.traitInterface) traits
pairs = concatMap (\(t, hs) -> zip (repeat t) (map A.hname hs)) dicts
syncs = map (show . uncurry msgId) pairs
futs = map (show . uncurry futMsgId) pairs
oneways = map (show . uncurry oneWayMsgId) pairs
in Enum $ Nam "__TRAIT_METHOD_DUMMY__ = 1024" :
map Nam syncs ++
map Nam futs ++
map Nam oneways
traitTypeDecls = map traitTypeDecl traits
where
traitTypeDecl A.Trait{A.tname} =
let ty = refTypeName tname in Typedef (Struct $ ty) ty
traitTypes = map traitType traits
where
traitType A.Trait{A.tname} =
let
formal = Ty.getTypeParameters tname
self = (Ptr ponyTypeT, AsLval $ selfTypeField)
in
StructDecl (AsType $ refTypeName tname) [self]
runtimeTypeDecls = map typeDecl classes ++ map typeDecl traits
where
typeDecl ref =
let
ty = A.getType ref
runtimeTy = runtimeTypeName ty
in
DeclTL (Extern ponyTypeT, AsLval runtimeTy)
encoreRuntimeTypeParam = Ptr (Ptr ponyTypeT)
methodFwds cdecl@(A.Class{A.cname, A.cmethods}) = map methodFwd cmethods
where
methodFwd m
| A.isStreamMethod m =
let params = (Ptr (Ptr encoreCtxT)) :
(Ptr . AsType $ classTypeName cname) :
encoreRuntimeTypeParam : stream :
map (translate . A.ptype) mparams
in
FunctionDecl void (methodImplName cname mname) params
| otherwise =
let params = if A.isMainClass cdecl && mname == ID.Name "main"
then [Ptr . AsType $ classTypeName cname,
encoreRuntimeTypeParam, array]
else (Ptr . AsType $ classTypeName cname) :
encoreRuntimeTypeParam :
map (translate . A.ptype) mparams
in
FunctionDecl (translate mtype) (methodImplName cname mname)
(Ptr (Ptr encoreCtxT):params)
where
mname = A.methodName m
mparams = A.methodParams m
mtype = A.methodType m
wrapperMethods [email protected]{A.cname, A.cmethods} =
if A.isPassive c then
[]
else
map (genericMethod callMethodFutureName future) nonStreamMethods ++
map (genericMethod methodImplOneWayName void) nonStreamMethods ++
map (genericMethod methodImplStreamName stream) streamMethods ++
map forwardingMethod nonStreamMethods
where
genericMethod genMethodName retType m =
let
thisType = Ptr . AsType $ classTypeName cname
rest = map (translate . A.ptype) (A.methodParams m)
args = Ptr (Ptr encoreCtxT) : thisType : encoreRuntimeTypeParam : rest
f = genMethodName cname (A.methodName m)
in
FunctionDecl retType f args
forwardingMethod m =
let
thisType = Ptr . AsType $ classTypeName cname
rest = map (translate . A.ptype) (A.methodParams m)
args = Ptr (Ptr encoreCtxT) : thisType : encoreRuntimeTypeParam :
rest ++ [future]
f = methodImplForwardName cname (A.methodName m)
in
FunctionDecl future f args
(streamMethods, nonStreamMethods) =
partition A.isStreamMethod cmethods
constructors A.Class{A.cname} = [ctr]
where
ctr =
let
retType = Ptr. AsType $ classTypeName cname
f = constructorImplName cname
in
FunctionDecl retType f []
commentSection :: String -> CCode Toplevel
commentSection s = Embed $ replicate (5 + length s) '/' ++ "\n// " ++ s
| Paow/encore | src/back/CodeGen/Header.hs | bsd-3-clause | 11,851 | 0 | 46 | 4,385 | 2,991 | 1,519 | 1,472 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE PatternGuards #-}
-- |
-- Module : Data.Array.Accelerate.CUDA.FullList
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2009..2014] Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Trevor L. McDonell <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Non-empty lists of key/value pairs. The lists are strict in the key and lazy
-- in the values. We assume that keys only occur once.
--
module Data.Array.Accelerate.CUDA.FullList (
FullList(..),
List(..),
singleton,
cons,
size,
mapM_,
lookup,
lookupDelete,
) where
import Prelude hiding ( lookup, mapM_ )
data FullList k v = FL !k v !(List k v)
data List k v = Nil | Cons !k v !(List k v)
infixr 5 `Cons`
instance (Eq k, Eq v) => Eq (FullList k v) where
(FL k1 v1 xs) == (FL k2 v2 ys) = k1 == k2 && v1 == v2 && xs == ys
(FL k1 v1 xs) /= (FL k2 v2 ys) = k1 /= k2 || v1 /= v2 || xs /= ys
instance (Eq k, Eq v) => Eq (List k v) where
(Cons k1 v1 xs) == (Cons k2 v2 ys) = k1 == k2 && v1 == v2 && xs == ys
Nil == Nil = True
_ == _ = False
(Cons k1 v1 xs) /= (Cons k2 v2 ys) = k1 /= k2 || v1 /= v2 || xs /= ys
Nil /= Nil = False
_ /= _ = True
-- List-like operations
--
infixr 5 `cons`
cons :: k -> v -> FullList k v -> FullList k v
cons k v (FL k' v' xs) = FL k v (Cons k' v' xs)
singleton :: k -> v -> FullList k v
singleton k v = FL k v Nil
size :: FullList k v -> Int
size (FL _ _ xs) = 1 + sizeL xs
sizeL :: List k v -> Int
sizeL Nil = 0
sizeL (Cons _ _ xs) = 1 + sizeL xs
lookup :: Eq k => k -> FullList k v -> Maybe v
lookup key (FL k v xs)
| key == k = Just v
| otherwise = lookupL key xs
{-# INLINABLE lookup #-}
{-# SPECIALISE lookup :: () -> FullList () v -> Maybe v #-}
lookupL :: Eq k => k -> List k v -> Maybe v
lookupL !key = go
where
go Nil = Nothing
go (Cons k v xs)
| key == k = Just v
| otherwise = go xs
{-# INLINABLE lookupL #-}
{-# SPECIALISE lookupL :: () -> List () v -> Maybe v #-}
lookupDelete :: Eq k => k -> FullList k v -> (Maybe v, Maybe (FullList k v))
lookupDelete key (FL k v xs)
| key == k
= case xs of
Nil -> (Just v, Nothing)
Cons k' v' xs' -> (Just v, Just $ FL k' v' xs')
| (r, xs') <- lookupDeleteL k xs
= (r, Just $ FL k v xs')
{-# INLINABLE lookupDelete #-}
{-# SPECIALISE lookupDelete :: () -> FullList () v -> (Maybe v, Maybe (FullList () v)) #-}
lookupDeleteL :: Eq k => k -> List k v -> (Maybe v, List k v)
lookupDeleteL !key = go
where
go Nil = (Nothing, Nil)
go (Cons k v xs)
| key == k = (Just v, xs)
| (r, xs') <- go xs = (r, Cons k v xs')
{-# INLINABLE lookupDeleteL #-}
{-# SPECIALISE lookupDeleteL :: () -> List () v -> (Maybe v, List () v) #-}
mapM_ :: Monad m => (k -> v -> m a) -> FullList k v -> m ()
mapM_ !f (FL k v xs) = f k v >> mapML_ f xs
{-# INLINABLE mapM_ #-}
mapML_ :: Monad m => (k -> v -> m a) -> List k v -> m ()
mapML_ !f = go
where
go Nil = return ()
go (Cons k v xs) = f k v >> go xs
{-# INLINABLE mapML_ #-}
| kumasento/accelerate-cuda | Data/Array/Accelerate/CUDA/FullList.hs | bsd-3-clause | 3,283 | 0 | 11 | 1,010 | 1,292 | 656 | 636 | 77 | 2 |
{-# LANGUAGE RebindableSyntax,NoImplicitPrelude,OverloadedLists #-}
import SubHask
import SubHask.Algebra.HMatrix
import HLearn.History
import HLearn.History.DisplayMethods
import HLearn.Optimization.Common
import HLearn.Optimization.GradientDescent
-- import HLearn.Optimization.StochasticGradientDescent
import HLearn.Optimization.LineMinimization.Univariate
import HLearn.Optimization.LineMinimization.Multivariate
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Generic as VG
import System.IO
sphere :: (Ring a, VG.Vector v a) => v a -> a
sphere v = VG.foldl' (+) zero $ VG.map (\x -> x*x) v
sphere' :: (Module (v a), VG.Vector v a) => v a -> v a
sphere' v = 2*.v
rosenbrock :: (Ring a, VG.Vector v a) => v a -> a
rosenbrock v = go 0 0
where
go i tot = if i==VG.length v-1
then tot
else go (i+1) $ 100*x*x + (v VG.! i - 1)*(v VG.! i -1)
where
x =(v VG.! (i+1) - (v VG.! i)*(v VG.! i))
rosenbrock' :: (Ring a, VG.Vector v a) => v a -> v a
rosenbrock' v = VG.imap go v
where
go i x = if i==VG.length v-1
then pt2
else if i== 0
then pt1
else pt1+pt2
where
pt1 = 400*x*x*x - 400*xp1*x + 2*x -2
pt2 = 200*x - 200*xm1*xm1
xp1 = v VG.! i+1
xm1 = v VG.! i-1
main = do
let { f=sphere; f'=sphere' }
-- let { f=rosenbrock; f'=rosenbrock' }
runDynamicHistory
( summaryStatistics
=== linearTrace
=== mkOptimizationPlot
(undefined::ConjugateGradientDescent (Vector Double))
f
"optplot.dat"
)
-- ( linearTrace )
( conjugateGradientDescent_
( lineSearchBrent ( brentTollerance 1e-12 || maxIterations 2 ) )
-- ( backtracking ( maxIterations 5) )
-- ( backtracking ( amijo (1e-4::Double) ) )
polakRibiere
f
f'
( [1..10] :: VS.Vector Double )
( maxIterations 5
-- || multiplicativeTollerance 1e-12
)
)
putStrLn "done."
| ehlemur/HLearn | examples/optimization/optimization.hs | bsd-3-clause | 2,194 | 1 | 15 | 734 | 703 | 376 | 327 | 50 | 3 |
{-# LANGUAGE TemplateHaskell, TypeSynonymInstances, FlexibleInstances,
MultiParamTypeClasses, TypeOperators, FlexibleContexts , RankNTypes,
GADTs, ScopedTypeVariables, EmptyDataDecls, ConstraintKinds #-}
module Data.Comp.Multi.Variables_Test where
import Data.Comp.Multi.Variables
import Data.Comp.Multi.Derive
import Data.Comp.Multi.Sum
import Data.Comp.Multi.Term
import Data.Comp.Multi.HFunctor
import Data.Comp.Multi.Show ()
import qualified Data.Map as Map
import qualified Data.Set as Set
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit
--------------------------------------------------------------------------------
-- Definitions
--------------------------------------------------------------------------------
data Var = X | Y | Z deriving (Eq,Ord,Show)
data Ex
type Value f = forall i . Term f i
type Expression f = Term f Ex
data Val e i where
Abs :: Var -> e Ex -> Val e i
Var :: Var -> Val e i
Int :: Int -> Val e i
data Op e i where
App :: e Ex -> e Ex -> Op e Ex
Plus :: e Ex -> e Ex -> Op e Ex
data Let e i where
Let :: Var -> e Ex -> e Ex -> Let e Ex
data LetRec e i where
LetRec :: Var -> e Ex -> e Ex -> LetRec e Ex
type Sig = Op :+: Val
type SigLet = Let :+: Sig
type SigRec = LetRec :+: Sig
$(derive [makeHFunctor, makeHTraversable, makeHFoldable,
makeEqHF, makeShowHF, smartConstructors]
[''Op, ''Val, ''Let, ''LetRec])
instance HasVars Val Var where
isVar (Var v) = Just v
isVar _ = Nothing
bindsVars (Abs v a) = a |-> Set.singleton v
bindsVars _ = empty
instance HasVars Op a where
instance HasVars Let Var where
bindsVars (Let v _ a) = a |-> Set.singleton v
instance HasVars LetRec Var where
bindsVars (LetRec v a b) = a |-> vs & b |-> vs
where vs = Set.singleton v
-- let x = x + 1 in (\y. y + x) z
letExp, letExp' :: Expression SigLet
letExp = iLet X (iVar X `iPlus` iInt 1) (iAbs Y (iVar Y `iPlus` iVar X) `iApp` iVar Z)
letExp' = iLet X (iInt 1 `iPlus` iInt 1) (iAbs Y (iVar Y `iPlus` iVar X) `iApp` iInt 3)
-- letrec x = x + 1 in (\y. y + x) z
recExp, recExp' :: Expression SigRec
recExp = iLetRec X (iVar X `iPlus` iInt 1) (iAbs Y (iVar Y `iPlus` iVar X) `iApp` iVar Z)
recExp' = iLetRec X (iVar X `iPlus` iInt 1) (iAbs Y (iVar Y `iPlus` iVar X) `iApp` iInt 3)
subst :: (Val :<: f) => Subst f Var
subst = Map.fromList [(X, A $ iInt 1), (Y, A $ iInt 2), (Z, A $ iInt 3)]
--------------------------------------------------------------------------------
-- Properties
--------------------------------------------------------------------------------
case_letFree = variables letExp @=? Set.fromList [Z,X]
case_recFree = variables recExp @=? Set.fromList [Z]
case_letSubst = appSubst s letExp @=? letExp'
where s = subst :: Subst SigLet Var
case_recSubst = appSubst s recExp @=? recExp'
where s = subst :: Subst SigRec Var
--------------------------------------------------------------------------------
-- Test Suits
--------------------------------------------------------------------------------
main = defaultMain [tests]
tests = testGroup "Variables" [
testCase "case_letFree" case_letFree
,testCase "case_recFree" case_recFree
,testCase "case_letSubst" case_letSubst
,testCase "case_recSubst" case_recSubst
]
| spacekitteh/compdata | testsuite/tests/Data/Comp/Multi/Variables_Test.hs | bsd-3-clause | 3,379 | 0 | 11 | 693 | 1,071 | 587 | 484 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
module Util.UnionFindST(
Element,
T,
UF(),
find,
fromElement,
getElements,
getUnique,
getW,
liftST,
new,
new_,
putW,
runUF,
union,
union_,
updateW
) where
import Control.Monad.Reader
import Control.Monad.ST
import Data.STRef
newtype UF s a = UF (ReaderT (STRef s Int) (ST s) a)
deriving(Monad,Functor)
runUF :: forall a . (forall s . UF s a) -> a
runUF st = runST $ do
ref <- newSTRef 0
let rn = unUF st
unUF (UF x) = x
runReaderT rn ref
data Element s w a = Element a !Int {-# UNPACK #-} !(STRef s (Link s w a))
data Link s w a = Weight {-# UNPACK #-} !Int w [Element s w a] | Next (Element s w a)
type T = Element
newUnique :: UF s Int
newUnique = UF $ do
ref <- ask
u <- lift $ readSTRef ref
let nu = u + 1
lift $ writeSTRef ref nu
return nu
new :: w -> a -> UF s (Element s w a)
new w x = do
r <- liftST $ newSTRef (Weight 1 w [])
n <- newUnique
let ne = Element x n r
liftST $ writeSTRef r (Weight 1 w [ne])
return ne
new_ :: a -> UF s (Element s () a)
new_ x = new () x
liftST = UF . lift
find :: Element s w a -> UF s (Element s w a)
find x@(Element a _ r) = do
e <- liftST $ readSTRef r
case e of
Weight {} -> return x
Next next -> do
last <- Util.UnionFindST.find next
when (next /= last) $ liftST $ writeSTRef r (Next last)
return last
getW :: Element s w a -> UF s w
getW x = do
Element _ _ r <- find x
Weight _ w _ <- UF $ lift $ readSTRef r
return w
-- retrieve list of unified elements
getElements :: Element s w a -> UF s [Element s w a]
getElements x = do
Element _ _ r <- find x
Weight _ _ es <- liftST $ readSTRef r
return es
getUnique :: Element s w a -> UF s Int
getUnique x = do
Element _ u _ <- find x
return u
-- update w returning the old value
updateW :: (w -> w) -> Element s w a -> UF s w
updateW f x = do
Element _ _ r <- find x
Weight _ w _ <- liftST $ readSTRef r
liftST $ modifySTRef r (\ (Weight s w es) -> Weight s (f w) es)
return w
-- puts a new w, returning old value
putW :: Element s w a -> w -> UF s w
putW e w = updateW (const w) e
union :: (w -> w -> w) -> Element s w a -> Element s w a -> UF s ()
union comb e1 e2 = do
e1'@(Element _ _ r1) <- find e1
e2'@(Element _ _ r2) <- find e2
when (r1 /= r2) $ liftST $ do
Weight w1 x1 es1 <- readSTRef r1
Weight w2 x2 es2 <- readSTRef r2
if w1 <= w2 then do
writeSTRef r1 (Next e2')
writeSTRef r2 $! (Weight (w1 + w2) (comb x1 x2) (es1 ++ es2))
else do
writeSTRef r1 $! (Weight (w1 + w2) (comb x1 x2) (es1 ++ es2))
writeSTRef r2 (Next e1')
union_ :: Element s () a -> Element s () a -> UF s ()
union_ x y = union (\_ _ -> ()) x y
fromElement :: Element s w a -> a
fromElement (Element a _ _) = a
instance Eq (Element s w a) where
Element _ x _ == Element _ y _ = x == y
Element _ x _ /= Element _ y _ = x /= y
instance Ord (Element s w a) where
Element _ x _ `compare` Element _ y _ = x `compare` y
Element _ x _ <= Element _ y _ = x <= y
Element _ x _ >= Element _ y _ = x >= y
instance Show a => Show (Element s w a) where
showsPrec n (Element x _ _) = showsPrec n x
| hvr/jhc | src/Util/UnionFindST.hs | mit | 3,389 | 0 | 17 | 1,116 | 1,691 | 822 | 869 | 111 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="it-IT">
<title>AMF Support</title>
<maps>
<homeID>amf</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/amf/src/main/javahelp/help_it_IT/helpset_it_IT.hs | apache-2.0 | 956 | 82 | 52 | 156 | 390 | 206 | 184 | -1 | -1 |
module Where1 where
f3 x
= (ls, rs)
where
ls = x + 1
rs = x - 1
f1 :: Int -> Int
f1 x = ls where ls = x + 1
f2 :: Int -> Int
f2 x = rs where rs = x - 1
| kmate/HaRe | old/testing/merging/Where1AST.hs | bsd-3-clause | 179 | 0 | 7 | 76 | 94 | 52 | 42 | 9 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="si-LK">
<title>Active Scan Rules - Blpha | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | 0xkasun/security-tools | src/org/zaproxy/zap/extension/wavsepRpt/resources/help_si_LK/helpset_si_LK.hs | apache-2.0 | 987 | 85 | 53 | 163 | 405 | 213 | 192 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude, PatternSynonyms #-}
{-# OPTIONS_GHC -Wall #-}
module Test where
x :: ()
x = ()
pattern Point2 :: () -> () -> ((), ())
pattern Point2 x y = (x, y)
pattern Point :: () -> () -> ((), ())
pattern Point{x1, y1} = (x1, y1)
| snoyberg/ghc | testsuite/tests/patsyn/should_compile/T12615.hs | bsd-3-clause | 252 | 0 | 9 | 52 | 120 | 68 | 52 | 9 | 1 |
{-# LANGUAGE MultiParamTypeClasses, PolyKinds #-}
{-# OPTIONS_GHC -fprint-explicit-kinds #-}
module TidyClassKinds where
import Data.Proxy
class Poly a b
type ProxySyn = Proxy
instance Poly ProxySyn ProxySyn
-- output should really talk about k1 and k2, not about k and k!
| olsner/ghc | testsuite/tests/polykinds/TidyClassKinds.hs | bsd-3-clause | 280 | 0 | 5 | 47 | 36 | 21 | 15 | -1 | -1 |
module Bio.VCF.Parser.Helpers
( tabOrSpace
, isTab
, isSpace
, notTabOrSpace
, isNumber
, isFloatNumber
, isBase
, isBaseOrDeletion
, endOfLine
) where
import Data.Word (Word8)
tabOrSpace :: Word8 -> Bool
tabOrSpace c = isTab c || isSpace c
isTab :: Word8 -> Bool
isTab c = c == 9
isSpace :: Word8 -> Bool
isSpace c = c == 32
notTabOrSpace :: Word8 -> Bool
notTabOrSpace = not . tabOrSpace
isNumber :: Word8 -> Bool
isNumber c = c >= 48 && c <= 57
isFloatNumber :: Word8 -> Bool
isFloatNumber c = isNumber c || c == 46 -- '.'
isBase :: Word8 -> Bool
isBase c = c == 65 || c == 97 || -- A or a
c == 67 || c == 99 || -- C or c
c == 71 || c == 103 || -- G or g
c == 84 || c == 116 || -- T or t
c == 78 || c == 110 -- N or n
isBaseOrDeletion :: Word8 -> Bool
isBaseOrDeletion c = isBase c || c == 42 || c == 44 || -- or '*' and ','
c == 60 || c == 62 -- '<' and '>'
endOfLine :: Word8 -> Bool
endOfLine c = c == 13 || c == 10
| juanpaucar/vcf | src/Bio/VCF/Parser/Helpers.hs | mit | 1,017 | 0 | 23 | 313 | 373 | 201 | 172 | 34 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGLineElement
(js_getX1, getX1, js_getY1, getY1, js_getX2, getX2, js_getY2,
getY2, SVGLineElement, castToSVGLineElement, gTypeSVGLineElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"x1\"]" js_getX1 ::
JSRef SVGLineElement -> IO (JSRef SVGAnimatedLength)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLineElement.x1 Mozilla SVGLineElement.x1 documentation>
getX1 ::
(MonadIO m) => SVGLineElement -> m (Maybe SVGAnimatedLength)
getX1 self
= liftIO ((js_getX1 (unSVGLineElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"y1\"]" js_getY1 ::
JSRef SVGLineElement -> IO (JSRef SVGAnimatedLength)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLineElement.y1 Mozilla SVGLineElement.y1 documentation>
getY1 ::
(MonadIO m) => SVGLineElement -> m (Maybe SVGAnimatedLength)
getY1 self
= liftIO ((js_getY1 (unSVGLineElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"x2\"]" js_getX2 ::
JSRef SVGLineElement -> IO (JSRef SVGAnimatedLength)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLineElement.x2 Mozilla SVGLineElement.x2 documentation>
getX2 ::
(MonadIO m) => SVGLineElement -> m (Maybe SVGAnimatedLength)
getX2 self
= liftIO ((js_getX2 (unSVGLineElement self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"y2\"]" js_getY2 ::
JSRef SVGLineElement -> IO (JSRef SVGAnimatedLength)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLineElement.y2 Mozilla SVGLineElement.y2 documentation>
getY2 ::
(MonadIO m) => SVGLineElement -> m (Maybe SVGAnimatedLength)
getY2 self
= liftIO ((js_getY2 (unSVGLineElement self)) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/SVGLineElement.hs | mit | 2,562 | 24 | 11 | 342 | 659 | 385 | 274 | 42 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Writer.Data
-- License : MIT (see the LICENSE file)
-- Maintainer : Felix Klein ([email protected])
--
-- Common data used by the writer module.
--
-----------------------------------------------------------------------------
{-# LANGUAGE
LambdaCase
, MultiParamTypeClasses
, TypeSynonymInstances
, FlexibleInstances
#-}
-----------------------------------------------------------------------------
module Writer.Data
( WriteMode(..)
, QuoteMode(..)
, OperatorConfig(..)
, UnaryOperator(..)
, BinaryOperator(..)
, Unsupported(..)
, Assoc(..)
) where
-----------------------------------------------------------------------------
import Data.Convertible
( Convertible(..)
, ConvertError(..)
)
-----------------------------------------------------------------------------
-- | There are two writing modes currently supported:
data WriteMode =
Pretty
-- ^ pretty printing, producing a well readable, minimal ouptut
| Fully
-- ^ fully paranthesized printing, producing fully parenthesized
-- expressions
deriving (Eq, Ord)
-----------------------------------------------------------------------------
-- | There are two quoting modes currently supported:
data QuoteMode =
NoQuotes
-- ^ do not quote identifiers
| DoubleQuotes
-- ^ quote identifiers using "
deriving (Eq, Ord)
-----------------------------------------------------------------------------
instance Convertible WriteMode String where
safeConvert = return . \case
Pretty -> "pretty"
Fully -> "fully"
-----------------------------------------------------------------------------
instance Convertible String WriteMode where
safeConvert = \case
"pretty" -> return Pretty
"fully" -> return Fully
str -> Left ConvertError
{ convSourceValue = str
, convSourceType = "String"
, convDestType = "WriteMode"
, convErrorMessage = "Unknown mode"
}
-----------------------------------------------------------------------------
instance Convertible QuoteMode String where
safeConvert = return . \case
NoQuotes -> "none"
DoubleQuotes -> "double"
-----------------------------------------------------------------------------
instance Convertible String QuoteMode where
safeConvert = \case
"none" -> return NoQuotes
"double" -> return DoubleQuotes
str -> Left ConvertError
{ convSourceValue = str
, convSourceType = "String"
, convDestType = "QuoteMode"
, convErrorMessage = "Unknown quote mode"
}
-----------------------------------------------------------------------------
-- | Associativity type to distinguis left associative operators from
-- right associative operators
data Assoc =
AssocLeft
| AssocRight
deriving (Eq)
-----------------------------------------------------------------------------
-- | A unary operator can be set up by providing a name and its precedence.
data UnaryOperator =
UnaryOp
{ uopName :: String
, uopPrecedence :: Int
}
| UnaryOpUnsupported
deriving (Eq)
-----------------------------------------------------------------------------
-- | A binary operator can be set up by a name, its precedencs and its
-- associativity.
data BinaryOperator =
BinaryOp
{ bopName :: String
, bopPrecedence :: Int
, bopAssoc :: Assoc
}
| BinaryOpUnsupported
deriving (Eq)
-----------------------------------------------------------------------------
-- | A simple expression printer can be set up using the function
-- 'printFormula' from 'Writer.Pretty'. The bundle the specific
-- operator names, their precedence and their associativity, the data
-- structure @OperatorNames@ is used.
--
-- Thereby, constants as True and False are given by Strings and unary
-- operators are given by their name their precedence. For binary
-- operators, additionally the associativity has to be defined.
--
-- The precedence is given by an Integer, where a lower value means
-- higher precedence. If the same value is used for multiple
-- operators, their precedence is treated equally.
--
-- The associativity is either 'AssocLeft' or 'AssocRight'.
--
-- Unsupported Operators can be disabled using 'UnaryOpUnsupported' or
-- 'BinaryOpUnsupported', respectively.
data OperatorConfig =
OperatorConfig
{ tTrue :: String
, fFalse :: String
, opNot :: UnaryOperator
, opAnd :: BinaryOperator
, opOr :: BinaryOperator
, opImplies :: BinaryOperator
, opEquiv :: BinaryOperator
, opNext :: UnaryOperator
, opPrevious :: UnaryOperator
, opFinally :: UnaryOperator
, opGlobally :: UnaryOperator
, opHistorically :: UnaryOperator
, opOnce :: UnaryOperator
, opUntil :: BinaryOperator
, opRelease :: BinaryOperator
, opWeak :: BinaryOperator
, opSince :: BinaryOperator
, opTriggered :: BinaryOperator
} deriving (Eq)
-----------------------------------------------------------------------------
-- | Unification class to check whether an operator is unsupported or not.
class Unsupported a where
unsupported :: a -> Bool
instance Unsupported UnaryOperator where
unsupported = (== UnaryOpUnsupported)
instance Unsupported BinaryOperator where
unsupported = (== BinaryOpUnsupported)
-----------------------------------------------------------------------------
| reactive-systems/syfco | src/lib/Writer/Data.hs | mit | 5,441 | 0 | 11 | 933 | 641 | 404 | 237 | 94 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Config.EnvSpec
( main
, spec
) where
import Test.Hspec
import Test.QuickCheck.Instances ()
import System.Environment
import qualified Data.Map as M
import Servant.Client
import Config
-- ----------------------------------------------
main :: IO ()
main = hspec spec
spec :: Spec
spec =
resetEnvRule $
describe "Config.loadConfig: Env" $
it "loads full configuration" $ do
mapM_ (uncurry setEnv)
[ ("PORT", "12345")
, ("LOG_LEVEL", "DEBUG")
, ("MATTERMOST_URL", "https://mattermost.invalid")
, ("MATTERMOST_API_KEY", "xyz")
, ("MATTERMOST_CHANNEL", "test-channel")
, ("GITHUB_SECRET", "abc")
]
let expected = Config
{ cfgPort = 12345
, cfgPriority = DEBUG
, cfgGithubSecret = Just "abc"
, cfgMattermostUrl = BaseUrl Https "mattermost.invalid" 443 ""
, cfgMattermostApiKey = "xyz"
, cfgRepositories = M.fromList
[ ("_default" , RepositoryConfig {rcgChannel = Just "test-channel", rcgBot = Nothing})]
}
loadConfig' `shouldReturn` Right expected
-- ----------------------------------------------
loadConfig' :: IO (Either String Config)
loadConfig' = loadConfig Nothing
resetEnvRule :: SpecWith a -> SpecWith a
resetEnvRule = around_ $ \test -> resetEnv >> test >> resetEnv
where
resetEnv = mapM_ unsetEnv usedEnvVars
usedEnvVars = [ "PORT", "LOG_LEVEL", "GITHUB_SECRET", "MATTERMOST_URL", "MATTERMOST_API_KEY", "MATTERMOST_CHANNEL"]
| UlfS/ghmm | test/Config/EnvSpec.hs | mit | 1,730 | 0 | 18 | 529 | 364 | 210 | 154 | 39 | 1 |
{-# htermination scanl :: (a -> b -> a) -> a -> [b] -> [a] #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_scanl_1.hs | mit | 63 | 0 | 2 | 16 | 3 | 2 | 1 | 1 | 0 |
module Universe.Objects.Shape (
Shape(..)
) where
import Measures
data Shape d = Spherical (MeasuredVal d Distance)
| ShapeTODO -- TODO
| fehu/hgt | core-universe/src/Universe/Objects/Shape.hs | mit | 156 | 0 | 8 | 40 | 41 | 26 | 15 | 5 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveGeneric #-}
module Bot.NetHack.ScreenPattern
( ScreenPattern()
, Match(..)
, Detailed(..)
, match
, regex
, limitRows
, debugPattern )
where
import Data.Data
import Data.Foldable
import qualified Data.Set as S
import Data.String
import qualified Data.Text as T
import GHC.Generics
import Prelude hiding ( getLine )
import Terminal.Screen
import Text.Regex.TDFA hiding ( match )
data ScreenPattern =
Plain !T.Text
| RegexPattern !Regex
| RowLimited !(S.Set Int) !ScreenPattern
deriving ( Typeable, Generic )
instance IsString ScreenPattern where
fromString str = Plain $ T.pack str
limitRows :: Foldable f => f Int -> ScreenPattern -> ScreenPattern
limitRows ff pattern = RowLimited (S.fromList $ toList ff) pattern
{-# INLINE limitRows #-}
regex :: T.Text -> ScreenPattern
regex txt = RegexPattern $ makeRegex $ T.unpack txt
{-# INLINE regex #-}
data Detailed = Detailed
{ x :: !Int
, y :: !Int
, w :: !Int
, h :: !Int
, matches :: [T.Text] }
deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic )
class Match match where
fromDetailed :: Maybe Detailed -> match
instance Match (Maybe Detailed) where
fromDetailed = id
instance Match Bool where
fromDetailed (Just{}) = True
fromDetailed Nothing = False
instance Match [T.Text] where
fromDetailed Nothing = []
fromDetailed (Just d) = matches d
instance Match [String] where
fromDetailed Nothing = []
fromDetailed (Just d) = fmap T.unpack $ matches d
eligibleRows :: ScreenState -> ScreenPattern -> S.Set Int
eligibleRows ss pattern = case pattern of
RowLimited rows payload ->
S.intersection (S.intersection rows screenrows) (eligibleRows ss payload)
_ -> screenrows
where
(_, sh) = screenSize ss
screenrows = S.fromList [0..sh-1]
match :: Match match => ScreenState -> ScreenPattern -> match
match ss pattern = fromDetailed $ goOverLines eligible_rows
where
eligible_rows = eligibleRows ss pattern
goOverLines rowset | Just (row, rest) <- S.minView rowset =
let (txt, indexer) = getLine row ss
in case patternTest pattern txt row indexer of
Nothing -> goOverLines rest
r@(Just{}) -> r
goOverLines _ = Nothing
patternTest :: ScreenPattern -> T.Text -> Int -> (Int -> Int) -> Maybe Detailed
patternTest (Plain patterntxt) subject row indexer = case T.breakOn patterntxt subject of
(leftside, rightside) | not (T.null rightside) ->
Just $ Detailed { x = indexer (T.length leftside)
, y = row
, w = indexer (T.length leftside + T.length patterntxt) -
indexer (T.length leftside)
, h = 1
, matches = [T.take (T.length patterntxt) rightside] }
_ -> Nothing
patternTest (RegexPattern regex) subject row indexer =
case matchOnce regex (T.unpack subject) :: Maybe MatchArray of
Nothing -> Nothing
Just arr ->
let lowest_offset = minimum $ fmap (\(offset, _) -> if offset /= -1 then offset else 1000000000) arr
highest_offset = maximum $ fmap (\(offset, _) -> offset) arr
in Just $ Detailed
{ x = indexer lowest_offset
, y = row
, w = indexer highest_offset - indexer lowest_offset
, h = 1
, matches = flip fmap (toList arr) $ \(offset, len) ->
T.take len $ T.drop offset subject }
patternTest (RowLimited _ inner) subject row indexer = patternTest inner subject row indexer
-- | Convenience function to test regexes on ghci REPL.
--
-- @
-- debugPattern subject regex
-- @
debugPattern :: String -> String -> [MatchText String]
debugPattern subject reg =
let r = makeRegex reg :: Regex
in matchAllText r subject
| Noeda/adeonbot | bot/src/Bot/NetHack/ScreenPattern.hs | mit | 3,804 | 0 | 18 | 910 | 1,269 | 666 | 603 | 116 | 4 |
-- | "GHC.Generics"-based 'Test.QuickCheck.arbitrary' generators.
--
-- = Basic usage
--
-- @
-- {-\# LANGUAGE DeriveGeneric \#-}
--
-- data Foo = A | B | C -- some generic data type
-- deriving 'GHC.Generics.Generic'
-- @
--
-- Derive instances of 'Test.QuickCheck.Arbitrary'.
--
-- @
-- instance Arbitrary Foo where
-- arbitrary = 'genericArbitrary' 'uniform' -- Give a distribution of constructors.
-- shrink = 'Test.QuickCheck.genericShrink' -- Generic shrinking is provided by the QuickCheck library.
-- @
--
-- Or derive standalone generators (the fields must still be instances of
-- 'Test.QuickCheck.Arbitrary', or use custom generators).
--
-- @
-- genFoo :: Gen Foo
-- genFoo = 'genericArbitrary' 'uniform'
-- @
--
-- === Using @DerivingVia@
--
-- @
-- {-\# LANGUAGE DerivingVia, TypeOperators \#-}
--
-- data Foo = A | B | C
-- deriving 'GHC.Generics.Generic'
-- deriving Arbitrary via ('GenericArbitraryU' `'AndShrinking'` Foo)
-- @
--
-- For more information:
--
-- - "Generic.Random.Tutorial"
-- - http://blog.poisson.chat/posts/2018-01-05-generic-random-tour.html
{-# LANGUAGE ExplicitNamespaces #-}
module Generic.Random
(
-- * Arbitrary implementations
-- | The suffixes for the variants have the following meanings:
--
-- - @U@: pick constructors with uniform distribution (equivalent to
-- passing 'uniform' to the non-@U@ variant).
-- - @Single@: restricted to types with a single constructor.
-- - @G@: with custom generators.
-- - @Rec@: decrease the size at every recursive call (ensuring termination
-- for (most) recursive types).
-- - @'@: automatic discovery of "base cases" when size reaches 0.
genericArbitrary
, genericArbitraryU
, genericArbitrarySingle
, genericArbitraryRec
, genericArbitrary'
, genericArbitraryU'
-- ** With custom generators
-- |
-- === Note about incoherence
--
-- The custom generator feature relies on incoherent instances, which can
-- lead to surprising behaviors for parameterized types.
--
-- ==== __Example__
--
-- For example, here is a pair type and a custom generator of @Int@ (always
-- generating 0).
--
-- @
-- data Pair a b = Pair a b
-- deriving (Generic, Show)
--
-- customGen :: Gen Int
-- customGen = pure 0
-- @
--
-- The following two ways of defining a generator of @Pair Int Int@ are
-- __not__ equivalent.
--
-- The first way is to use 'genericArbitrarySingleG' to define a
-- @Gen (Pair a b)@ parameterized by types @a@ and @b@, and then
-- specialize it to @Gen (Pair Int Int)@.
--
-- In this case, the @customGen@ will be ignored.
--
-- @
-- genPair :: (Arbitrary a, Arbitrary b) => Gen (Pair a b)
-- genPair = 'genericArbitrarySingleG' customGen
--
-- genPair' :: Gen (Pair Int Int)
-- genPair' = genPair
-- -- Will generate nonzero pairs
-- @
--
-- The second way is to define @Gen (Pair Int Int)@ directly using
-- 'genericArbitrarySingleG' (as if we inlined @genPair@ in @genPair'@
-- above.
--
-- Then the @customGen@ will actually be used.
--
-- @
-- genPair2 :: Gen (Pair Int Int)
-- genPair2 = 'genericArbitrarySingleG' customGen
-- -- Will only generate (Pair 0 0)
-- @
--
-- In other words, the decision of whether to use a custom generator
-- is done by comparing the type of the custom generator with the type of
-- the field only in the context where 'genericArbitrarySingleG' is being
-- used (or any other variant with a @G@ suffix).
--
-- In the first case above, those fields have types @a@ and @b@, which are
-- not equal to @Int@ (or rather, there is no available evidence that they
-- are equal to @Int@, even if they could be instantiated as @Int@ later).
-- In the second case, they both actually have type @Int@.
, genericArbitraryG
, genericArbitraryUG
, genericArbitrarySingleG
, genericArbitraryRecG
-- * Specifying finite distributions
, Weights
, W
, (%)
, uniform
-- * Custom generators
-- | Custom generators can be specified in a list constructed with @(':+')@,
-- and passed to functions such as 'genericArbitraryG' to override how certain
-- fields are generated.
--
-- Example:
--
-- @
-- customGens :: Gen String ':+' Gen Int
-- customGens =
-- (filter (/= '\NUL') '<$>' arbitrary) ':+'
-- (getNonNegative '<$>' arbitrary)
-- @
--
-- There are also different types of generators, other than 'Test.QuickCheck.Gen', providing
-- more ways to select the fields the generator than by simply comparing types:
--
-- - @'Test.QuickCheck.Gen' a@: override fields of type @a@;
-- - @'Gen1' f@: override fields of type @f x@ for some @x@, requiring a generator for @x@;
-- - @'Gen1_' f@: override fields of type @f x@ for some @x@, __not__ requiring a generator for @x@;
-- - @'FieldGen' s a@: override record fields named @s@, which must have type @a@;
-- - @'ConstrGen' c i a@: override the field at index @i@ of constructor @c@,
-- which must have type @a@ (0-indexed);
--
-- Multiple generators may match a given field: the first, leftmost
-- generator in the list will be chosen.
, (:+) (..)
, FieldGen (..)
, fieldGen
, ConstrGen (..)
, constrGen
, Gen1 (..)
, Gen1_ (..)
-- * Helpful combinators
, listOf'
, listOf1'
, vectorOf'
-- * Base cases for recursive types
, withBaseCase
, BaseCase (..)
-- * Full options
, Options ()
, genericArbitraryWith
-- ** Setters
, SetOptions
, type (<+)
, setOpts
-- ** Size modifiers
, Sizing (..)
, SetSized
, SetUnsized
, setSized
, setUnsized
-- ** Custom generators
, SetGens
, setGenerators
-- ** Coherence options
, Coherence (..)
, Incoherent (..)
-- ** Common options
, SizedOpts
, sizedOpts
, SizedOptsDef
, sizedOptsDef
, UnsizedOpts
, unsizedOpts
-- *** Advanced options
-- | See 'Coherence'
, CohUnsizedOpts
, cohUnsizedOpts
, CohSizedOpts
, cohSizedOpts
-- * Generic classes
, GArbitrary
, GUniformWeight
-- * Newtypes for DerivingVia
-- | These newtypes correspond to the variants of 'genericArbitrary' above.
, GenericArbitrary (..)
, GenericArbitraryU (..)
, GenericArbitrarySingle (..)
, GenericArbitraryRec (..)
, GenericArbitraryG (..)
, GenericArbitraryUG (..)
, GenericArbitrarySingleG (..)
, GenericArbitraryRecG (..)
, GenericArbitraryWith (..)
, AndShrinking (..)
-- ** Helpers typeclasses
, TypeLevelGenList (..)
, TypeLevelOpts (..)
) where
import Generic.Random.Internal.BaseCase
import Generic.Random.Internal.Generic
import Generic.Random.DerivingVia
| Lysxia/generic-random | src/Generic/Random.hs | mit | 6,844 | 0 | 5 | 1,634 | 467 | 371 | 96 | 73 | 0 |
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
import Test.Hspec
import Yage.Prelude hiding (group)
import Yage.Math
import Yage.Lens hiding (elements)
import qualified Data.Vector as V
import Data.Vinyl.Instances ()
import Yage.Formats.Obj
import Yage.Formats.Obj.Parser
import Data.Attoparsec.ByteString (Parser, parseOnly, endOfInput)
main :: IO ()
main = hspec $ do
fileElementSpec
describe "parse OBJ files" $ do
it "parses a simple square" $ do
parsedObj <- parseOBJFile $ "test" </> "res" </> "square.obj"
parsedObj `shouldBe` squareOBJ
it "parses a cube with groups for each side" $ do
parsedObj <- parseOBJFile $ "test" </> "res" </> "cube_groups.obj"
parsedObj `shouldBe` cubeWithGroupsOBJ
{--
it "parses a simple square into position Geometry" $ do
(pos, _tex) <- geometryFromOBJFile $ "test" </> "res" </> "square.obj"
pos `shouldBe` squarePos
it "parses a simple square into texture Geometry" $ do
(_pos, tex) <- geometryFromOBJFile $ "test" </> "res" </> "square.obj"
tex `shouldBe` squareTex
--}
fileElementSpec :: Spec
fileElementSpec = do
objFileItems
vertexDataParsing
faceParsing
lineParsing
groupParsing
smoothingParsing
vertexDataParsing :: Spec
vertexDataParsing = describe "vertex data parsing" $ do
it "parses a geometric vertex" $
testParser geovertex "v 1.2 2.3 4.5" `shouldBe` Right (GeoVertex $ V3 1.2 2.3 4.5)
it "parses a geometric vertex line with pending trash" $
testParser geovertexLine "v 1.2 2.3 4.5 this is a 3d vertex\n" `shouldBe` Right (GeoVertex $ V3 1.2 2.3 4.5)
it "parses a normal vertex" $
testParser normalvertex "vn 1.2 2.3 4.5" `shouldBe` Right (NormalVertex $ V3 1.2 2.3 4.5)
it "parses a normal vertex line with pendig trash" $
testParser normalvertexLine "vn 1.2 2.3 4.5 this is a normal\n" `shouldBe` Right (NormalVertex $ V3 1.2 2.3 4.5)
it "parses a texture vertex" $
testParser texvertex "vt 1.2 2.3" `shouldBe` Right (TextureVertex $ V2 1.2 2.3)
it "parses a texture vertex line with pendig trash" $
testParser texvertexLine "vt 1.2 2.3 this is a texture coord\n" `shouldBe` Right (TextureVertex $ V2 1.2 2.3)
faceParsing :: Spec
faceParsing = describe "face parsing" $ do
it "parses a face with only geo indices" $
testParser faceLine "f 1 2 3\n" `shouldBe` Right (Face $ References . singleton <$> [OBJVertexIndex 1, OBJVertexIndex 2, OBJVertexIndex 3])
it "parses a face with geo, tex & normal indices" $
testParser faceLine "f 1/2/3 2/3/4 3/4/5\n" `shouldBe` Right (Face [ References [ OBJVertexIndex 1, OBJTextureIndex 2, OBJNormalIndex 3 ]
, References [ OBJVertexIndex 2, OBJTextureIndex 3, OBJNormalIndex 4 ]
, References [ OBJVertexIndex 3, OBJTextureIndex 4, OBJNormalIndex 5 ]
])
it "parses a face with geo, tex & normal indices with pending space" $
testParser faceLine "f 1/2/3 2/3/4 3/4/5 \n" `shouldBe` Right (Face [ References [ OBJVertexIndex 1, OBJTextureIndex 2, OBJNormalIndex 3 ]
, References [ OBJVertexIndex 2, OBJTextureIndex 3, OBJNormalIndex 4 ]
, References [ OBJVertexIndex 3, OBJTextureIndex 4, OBJNormalIndex 5 ]
])
lineParsing :: Spec
lineParsing = describe "line parsing" $ do
it "parses a line with only geo indices" $
testParser lineLine "l 1 2\n" `shouldBe` Right (Line $ References . singleton <$> [OBJVertexIndex 1, OBJVertexIndex 2])
it "parses a line with geo & tex indices" $
testParser lineLine "l 1/2 2/3\n" `shouldBe` Right (Line [ References [ OBJVertexIndex 1, OBJTextureIndex 2 ]
, References [ OBJVertexIndex 2, OBJTextureIndex 3 ]
])
groupParsing :: Spec
groupParsing = describe "group parsing" $ do
it "parses a single group name" $ do
testParser groupLine "g GroupName\n" `shouldBe` Right ([Group "GroupName"])
it "parses a multiple group names" $ do
testParser groupLine "g GroupName01 GroupName_01 \n" `shouldBe` Right ([Group "GroupName01", Group "GroupName_01"])
smoothingParsing :: Spec
smoothingParsing = describe "smoothing group parsing" $ do
it "parses a numeric smoothing group" $
testParser smoothingGroupLine "s 10\n" `shouldBe` (Right (SmoothingGroup 10))
it "parses the 'off' smoothing group token" $
testParser smoothingGroupLine "s off\n" `shouldBe` (Right (SmoothingGroup 0))
objFileItems :: Spec
objFileItems = do
describe "vector parsing" $ do
it "parses a V3" $
testParser v3 "1.2 2.3 4.5" `shouldBe` Right (V3 1.2 2.3 4.5)
it "parses a V2" $
testParser v2 "1.2 2.3" `shouldBe` Right (V2 1.2 2.3)
describe "comment parsing" $ do
it "parses a comment line" $
testParser commentLine "# a comment\n" `shouldBe` Right "a comment"
it "parses a comment line without leading space" $
testParser commentLine "#x\n" `shouldBe` Right "x"
it "parses an comment line with leading tab" $
testParser commentLine "#\tx\n" `shouldBe` Right "x"
it "parses an empty comment line without spaces" $
testParser commentLine "#\n" `shouldBe` Right ""
it "parses an empty comment line" $
testParser commentLine "# \n" `shouldBe` Right ""
testParser :: Parser a -> ByteString -> Either String a
testParser parser = parseOnly (parser <* endOfInput)
-- fixtures
squareOBJ :: OBJ
squareOBJ = mempty & vertexData.geometricVertices .~ V.fromList ( GeoVertex <$> [ V3 0 2 0, V3 0 0 0, V3 2 0 0, V3 2 2 0 ] )
& vertexData.vertexNormals .~ V.fromList ( NormalVertex <$> [ V3 0 0 1 ] )
& vertexData.textureVertices .~ V.fromList ( TextureVertex <$> [ V2 0 0 , V2 0 1 , V2 1 0 , V2 1 1 ] )
& groups.at "default" ?~ (SmoothingGroups $ mempty & at 0 ?~ elems)
& comments .~ [ "File exported by ZBrush version 3.5"
, "www.zbrush.com"
, "Vertex Count 8844"
, "UV Vertex Count 35368"
, "Face Count 8842"
, "Auto scale x=4.472662 y=4.472662 z=4.472662"
, "Auto offset x=-0.022567 y=0.418160 z=0.309246"
]
where elems = (mempty & faces .~ V.singleton (Face [ References [ OBJVertexIndex 1, OBJTextureIndex 1, OBJNormalIndex 1 ]
, References [ OBJVertexIndex 2, OBJTextureIndex 2, OBJNormalIndex 1 ]
, References [ OBJVertexIndex 3, OBJTextureIndex 4, OBJNormalIndex 1 ]
, References [ OBJVertexIndex 4, OBJTextureIndex 3, OBJNormalIndex 1 ]
]))
cubeWithGroupsOBJ :: OBJ
cubeWithGroupsOBJ =
mempty & vertexData.geometricVertices .~ V.fromList ( GeoVertex <$> [ V3 0 2 2, V3 0 0 2, V3 2 0 2, V3 2 2 2, V3 0 2 0, V3 0 0 0, V3 2 0 0, V3 2 2 0 ])
& groups.at "cube" ?~ ( SmoothingGroups $ mempty & at 1 ?~ cube )
& groups.at "front" ?~ ( SmoothingGroups $ mempty & at 1 ?~ front )
& groups.at "back" ?~ ( SmoothingGroups $ mempty & at 1 ?~ back )
& groups.at "right" ?~ ( SmoothingGroups $ mempty & at 1 ?~ right )
& groups.at "top" ?~ ( SmoothingGroups $ mempty & at 1 ?~ top )
& groups.at "left" ?~ ( SmoothingGroups $ mempty & at 1 ?~ left )
& groups.at "bottom" ?~ ( SmoothingGroups $ mempty & at 1 ?~ bottom )
& comments .~ ["from obj spec", "8 vertices", "6 elements"]
where
cube = mconcat [ front, back, right, top, left, bottom ]
front = mempty & faces .~ V.singleton (Face [ References [ OBJVertexIndex 1 ], References [ OBJVertexIndex 2 ], References [ OBJVertexIndex 3 ], References [ OBJVertexIndex 4 ] ])
back = mempty & faces .~ V.singleton (Face [ References [ OBJVertexIndex 8 ], References [ OBJVertexIndex 7 ], References [ OBJVertexIndex 6 ], References [ OBJVertexIndex 5 ] ])
right = mempty & faces .~ V.singleton (Face [ References [ OBJVertexIndex 4 ], References [ OBJVertexIndex 3 ], References [ OBJVertexIndex 7 ], References [ OBJVertexIndex 8 ] ])
top = mempty & faces .~ V.singleton (Face [ References [ OBJVertexIndex 5 ], References [ OBJVertexIndex 1 ], References [ OBJVertexIndex 4 ], References [ OBJVertexIndex 8 ] ])
left = mempty & faces .~ V.singleton (Face [ References [ OBJVertexIndex 5 ], References [ OBJVertexIndex 6 ], References [ OBJVertexIndex 2 ], References [ OBJVertexIndex 1 ] ])
bottom = mempty & faces .~ V.singleton (Face [ References [ OBJVertexIndex 2 ], References [ OBJVertexIndex 6 ], References [ OBJVertexIndex 7 ], References [ OBJVertexIndex 3 ] ])
| MaxDaten/yage | test/ObjParsingTest.hs | mit | 9,674 | 0 | 26 | 3,093 | 2,456 | 1,225 | 1,231 | -1 | -1 |
module Question where
-- https://mail.haskell.org/pipermail/beginners/2017-November/017921.html
notThe :: String -> Maybe String
notThe word
| word == "the" = Nothing
| otherwise = Just word
replaceThe :: String -> String
replaceThe word = go $ words word
where
go [] = ""
go (x:xs) =
case (notThe x) of
Just x -> x ++ " " ++ go xs
Nothing -> " a " ++ go xs
-- case (words word) of
-- |
-- go (words word)
-- where go (x:xs)
-- | Just x = word ++ go xs
-- | Nothing = " a " ++ go xs
-- | notThe x == Just [] = []
-- | notThe x == Just word = word ++ go xs
-- | notThe word == Nothing = " a " ++ go xs
| brodyberg/Notes | csv/csv1/src/Question.hs | mit | 721 | 0 | 11 | 254 | 150 | 78 | 72 | 12 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module ZoomHub.Web.Types.OpenSeadragonTileSource
( OpenSeadragonTileSource,
fromDeepZoomImage,
)
where
import Data.Aeson (ToJSON, object, toJSON, (.=))
import qualified Data.Text as T
import System.FilePath (dropExtension)
import ZoomHub.API.Types.DeepZoomImage
( DeepZoomImage,
dziHeight,
dziTileFormat,
dziTileOverlap,
dziTileSize,
dziUrl,
dziWidth,
)
newtype OpenSeadragonTileSource = OpenSeadragonTileSource
{unOpenSeadragonTileSource :: DeepZoomImage}
deriving (Eq, Show)
fromDeepZoomImage :: DeepZoomImage -> OpenSeadragonTileSource
fromDeepZoomImage = OpenSeadragonTileSource
-- JSON
instance ToJSON OpenSeadragonTileSource where
toJSON o =
object
[ "Image"
.= object
[ "xmlns" .= ("http://schemas.microsoft.com/deepzoom/2008" :: T.Text),
"Url" .= (dropExtension (show $ dziUrl dzi) ++ "_files/"),
"Format" .= dziTileFormat dzi,
"Overlap" .= dziTileOverlap dzi,
"TileSize" .= dziTileSize dzi,
"Size"
.= object
[ "Width" .= dziWidth dzi,
"Height" .= dziHeight dzi
]
]
]
where
dzi = unOpenSeadragonTileSource o
| zoomhub/zoomhub | src/ZoomHub/Web/Types/OpenSeadragonTileSource.hs | mit | 1,300 | 0 | 17 | 364 | 268 | 156 | 112 | 35 | 1 |
{-# LANGUAGE OverloadedStrings, ExtendedDefaultRules #-}
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Monadic (assert, monadicIO)
import Data.Functor
import Control.Monad
import Database.Bicod
import Database.MongoDB
--newtype Alpha = Alpha { runAplha :: [Char] } deriving (Eq, Show)
--instance Arbitrary ObjectId where
--arbitrary = genObjectId -- liftM (Alpha . filter (`elem` ['0'..'z'])) $ arbitrary
-- --coarbitrary c = variant (ord c `rem` 4)
main = hspec $ do
describe "Bidoc" $ do
describe "Main" $ do
describe "getOpsFromURI" $ do
describe "MongoDB" $ do
it "parse host and port" $ do
(show <$> getOpsFromURI "mongo://localhost:555/test/posts/_id") `shouldBe` (Right $ show (Mongo ("localhost", 555) "test" "posts" "_id"))
it "use default port" $ do
(show <$> getOpsFromURI "mongo://localhost/test/posts/_id") `shouldBe` (Right $ show (Mongo ("localhost", 27017) "test" "posts" "_id"))
describe "ElasticSearch" $ do
it "parse host and port" $ do
(show <$> getOpsFromURI "es://localhost:555/posts/post/id") `shouldBe` (Right $ show (ElasticSearch ("localhost", 555) "posts" "post" "id"))
it "use default port" $ do
(show <$> getOpsFromURI "es://localhost/posts/post/id") `shouldBe` (Right $ show (ElasticSearch ("localhost", 9200) "posts" "post" "id"))
describe "Pivot" $ do
--it "Return the same id" $ monadicIO $ do
-- oid <- genObjectId
-- assert $ pivot oid oid == oid
it "should work for numbers" $ do
pivot "123456" "543210" `shouldBe` "333333"
it "should work for mongo oid" $ do
pivot a b `shouldBe` c
where
a = read "5169926decd2f29305538415" :: ObjectId
b = read "5169952687b1cd974758065a" :: ObjectId
c = read "516993ca3a4260152655c537" :: ObjectId
| teamon/bicod | test/Spec.hs | mit | 1,954 | 0 | 29 | 501 | 433 | 221 | 212 | 30 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverlappingInstances #-}
module System.Console.Questioner
(
Question(..)
, ChoiceEvent
, charToChoiceEvent
, listPrompt
, checkboxPrompt
, module System.Console.Questioner.ProgressIndicators
)
where
import Control.Applicative ((<$>))
import Control.Concurrent.STM
import Control.Monad (forM_, (>=>))
import Data.List (delete)
import Graphics.Vty (Event (..),
Key (..),
Modifier (..))
import qualified Graphics.Vty as Vty
import System.Console.ANSI (Color (..), ColorIntensity (..), ConsoleIntensity (..), ConsoleLayer (..),
SGR (..),
clearLine,
cursorUpLine,
setSGR)
import System.Console.Questioner.ProgressIndicators
import System.Console.Questioner.Util
import System.Exit
import System.IO (hFlush, stdin,
stdout)
-- Base `Question` and `Question` instances
-------------------------------------------------------------------------------
class Question q a where
prompt :: q -> IO a
instance {-# OVERLAPPABLE #-} Read a => Question String a where
prompt = putStr . (++ " ") >=> const readLn
instance {-# OVERLAPPING #-} Question String String where
prompt = putStr . (++ " ") >=> const getLine
instance {-# OVERLAPPING #-} Question String (Maybe String) where
prompt = putStr . (++ " ") >=> const getLine >=> helper
where
helper [] = return Nothing
helper s = return $ Just s
instance {-# OVERLAPPING #-} Question (String, (String, String)) String where
prompt (s, (o1, o2)) = do
putStr s
putStr $ " (" ++ o1 ++ "/" ++ o2 ++ ") "
getLine
instance {-# OVERLAPPING #-} Question (String, [String]) String where
prompt = uncurry listPrompt
instance {-# OVERLAPPING #-} Question (String, [String]) [String] where
prompt = uncurry checkboxPrompt
-- Multiple choice prompts
-------------------------------------------------------------------------------
data ChoiceEvent = MoveUp | MoveDown | MakeChoice | ToggleSelection | Exit
deriving(Eq, Ord, Show)
charToChoiceEvent :: Char -> Maybe ChoiceEvent
charToChoiceEvent 'j' = Just MoveDown
charToChoiceEvent 'k' = Just MoveUp
charToChoiceEvent '\n' = Just MakeChoice
charToChoiceEvent ' ' = Just ToggleSelection
charToChoiceEvent _ = Nothing
-- simpleListPrompt options choices = setup $ do
-- inp <- Vty.inputForConfig =<< Vty.standardIOConfig
-- selection <- waitForSelection (Vty._eventChannel inp) 0
-- setSGR []
-- clearScreen
-- setCursorPosition 0 0
-- Vty.shutdownInput inp
-- return selection
-- where
-- setup = withNoBuffering stdin NoBuffering . withNoCursor . withNoEcho
-- numChoices = length choices
-- waitForSelection ichan currentIdx = do
-- clearScreen
-- renderListOptions options def choices currentIdx
-- e <- atomically $ readTChan ichan
-- case e of
-- EvKey KEnter _ -> return $ Just (choices !! currentIdx)
-- EvKey (KChar 'n') [MCtrl] -> onDown
-- EvKey (KChar 'j') _ -> onDown
-- EvKey KDown _ -> onDown
-- EvKey (KChar 'p') [MCtrl] -> onUp
-- EvKey (KChar 'k') _ -> onUp
-- EvKey KUp _ -> onUp
-- EvKey (KChar 'q') _ -> return Nothing
-- EvKey KEsc _ -> return Nothing
-- _ -> waitForSelection ichan currentIdx
-- where
-- onDown = waitForSelection ichan ((currentIdx + 1) `rem` numChoices)
-- onUp = let currentIdx' = if currentIdx == 0
-- then length choices - 1
-- else currentIdx - 1
-- in waitForSelection ichan currentIdx'
listPrompt :: String -> [String] -> IO String
listPrompt question options = setup $ do
putStrLn question
-- selection has structure: (selected item's index, indexed options)
let selection = (0, zip options ([0..] :: [Int]))
mi <- listenForSelection selection
case mi of
Just i -> return (options !! i)
Nothing -> exitSuccess
where
setup = hWithNoBuffering stdin . withNoEcho
listenForSelection selection = do
inp <- Vty.inputForConfig =<< Vty.standardIOConfig
go (Vty._eventChannel inp) selection
where
go c os = do
render os
hFlush stdout
e <- atomically (readTChan c)
case e of
EvKey KEnter _ -> do
-- makeChoice
return (Just (fst os))
EvKey (KChar 'n') [MCtrl] -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveDown os)
EvKey (KChar 'j') _ -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveDown os)
EvKey KDown _ -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveDown os)
EvKey (KChar 'p') [MCtrl] -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveUp os)
EvKey (KChar 'k') _ -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveUp os)
EvKey KUp _ -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveUp os)
EvKey (KChar 'q') _ ->
return Nothing
EvKey (KChar 'c') [MCtrl] ->
return Nothing
EvKey KEsc _ ->
return Nothing
_ -> go c os
makeChoice = forM_ (replicate (length (snd selection)) ())
(const (clearLine >> cursorUpLine 1))
updateSelection MoveUp (i, os) = ((i - 1) `mod` length os, os)
updateSelection MoveDown (i, os) = ((i + 1) `mod` length os, os)
updateSelection _ _ = error "Internal error, key not recognized"
render (s, optionsI) = forM_ optionsI $ \(o, i) ->
if i == s
then do
setSGR [ SetColor Foreground Vivid White
, SetConsoleIntensity BoldIntensity
]
putStr "> "
setSGR [ SetColor Foreground Vivid Cyan
, SetConsoleIntensity NormalIntensity
]
putStrLn $ o
setSGR []
else putStrLn $ " " ++ o
checkboxPrompt :: String -> [String] -> IO [String]
checkboxPrompt question options = setup $ do
putStrLn question
let selection = (0, [], zip options ([0..] :: [Int]))
render selection
is <- listenForSelection selection
return $ map (options !!) is
where
setup = hWithNoBuffering stdin . withNoEcho
listenForSelection :: (Int, [Int], [(String, Int)]) -> IO [Int]
listenForSelection selection@(_, _, s3) = do
inp <- Vty.inputForConfig =<< Vty.standardIOConfig
go (Vty._eventChannel inp) selection
where
go :: TChan Event -> (Int, [Int], [(String, Int)]) -> IO [Int]
go c os@(_, os2, os3) = do
render os
hFlush stdout
e <- atomically (readTChan c)
print e
case e of
EvKey KEnter _ -> do
makeChoice
return os2
EvKey (KChar 'n') [MCtrl] -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveDown os)
EvKey (KChar 'j') _ -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveDown os)
EvKey KDown _ -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveDown os)
EvKey (KChar 'p') [MCtrl] -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveUp os)
EvKey (KChar 'k') _ -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveUp os)
EvKey KUp _ -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveUp os)
EvKey (KChar 'q') _ ->
return []
EvKey (KChar 'c') [MCtrl] ->
return []
EvKey KEsc _ ->
return []
_ -> do
clearFromCursorTo $ length os3
go c os
makeChoice = do
let size = length (s3 :: [(String, Int)])
mlist = replicate size ()
forM_ mlist (const (clearLine >> cursorUpLine 1))
updateSelection MoveUp (i, is, os) = ((i - 1) `mod` length os, is, os)
updateSelection MoveDown (i, is, os) = ((i + 1) `mod` length os, is, os)
updateSelection ToggleSelection (i, is, os) = (i, is', os)
where
is' = if i `elem` is then delete i is else i:is
updateSelection _ _ = error "Internal error, key not recognized"
render (i, is, optionsI) = forM_ optionsI $ \(o, j) -> do
let checkbox = if j `elem` is then "◉ " else "◯ "
if i == j
then do
setSGR [ SetColor Foreground Vivid Cyan ]
putStrLn $ ">" ++ checkbox ++ o
setSGR []
else putStrLn $ " " ++ checkbox ++ o
| yamadapc/stack-run | unix/System/Console/Questioner.hs | mit | 10,300 | 0 | 20 | 4,128 | 2,509 | 1,303 | 1,206 | -1 | -1 |
module MachineLearning.SupervisedLearning.Regression.Macro where
import Types
import Functions.Application.Macro
-- | Concrete noise modeling random variable
nois_ :: Note
nois_ = epsilon
-- | Residual sum of squares
rss :: Note -- ^ Beta parameter
-> Note
rss = fn "RSS"
-- | Ridge cost
ridge :: Note -- ^ Beta parameter
-> Note -- ^ Lambda parameter
-> Note
ridge = fn2 "Ridge"
lasso :: Note -- ^ Beta parameter
-> Note -- ^ Lambda parameter
-> Note
lasso = fn2 "Lasso"
| NorfairKing/the-notes | src/MachineLearning/SupervisedLearning/Regression/Macro.hs | gpl-2.0 | 526 | 0 | 6 | 135 | 94 | 57 | 37 | 16 | 1 |
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Search.Internal
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Internal use for Yi.Search.
module Yi.Search.Internal where
import Control.Lens (assign, use)
import Yi.Editor (EditorM, currentRegexA)
import Yi.Regex (SearchExp)
-- ---------------------------------------------------------------------
-- Searching and substitutions with regular expressions
--
-- The most recent regex is held by the editor. You can get at it with
-- getRegeE. This is useful to determine if there was a previous
-- pattern.
--
-- | Put regex into regex 'register'
setRegexE :: SearchExp -> EditorM ()
setRegexE re = assign currentRegexA (Just re)
-- | Clear the regex 'register'
resetRegexE :: EditorM ()
resetRegexE = assign currentRegexA Nothing
-- | Return contents of regex register
getRegexE :: EditorM (Maybe SearchExp)
getRegexE = use currentRegexA
| atsukotakahashi/wi | src/library/Yi/Search/Internal.hs | gpl-2.0 | 997 | 0 | 7 | 162 | 137 | 83 | 54 | 11 | 1 |
invertir::[int]->[int]
invertir [ ] = [ ]
invertir (x:xs) = (invertir xs) ++[x] | travelerfit/Ejercicios-Haskell | ordenar.hs | gpl-2.0 | 79 | 0 | 7 | 12 | 56 | 30 | 26 | 3 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE EmptyDataDecls #-}
module JSAPI (
Foreign
, Event
, Element
, addEventListener
, addWindowEventListener
, getElementById
, getElementsById
, setClassName
, setInterval
, alert
, mapM
) where
import Prelude
import FFI
class Foreign a
data Event
instance Foreign Event
data Element
instance Foreign Element
-- | Add an event listener for the given event
addEventListener :: Element -> String -> (Event -> Fay Bool) -> Fay ()
addEventListener = ffi "%1['addEventListener'](%2,%3,false)"
-- | Add a window event listener for the given event
addWindowEventListener :: String -> (Event -> Fay Bool) -> Fay ()
addWindowEventListener = ffi "window['addEventListener'](%1,%2,false)"
-- | Get an element from the document
getElementById :: String -> Fay Element
getElementById = ffi "document['getElementById'](%1)"
-- | Get a list of elements from the document
getElementsById :: [String] -> Fay [Element]
getElementsById = mapM getElementById
-- | Set the class name property
setClassName :: String -> Element -> Fay ()
setClassName = ffi "%2['className']=%1"
-- | Set a timer
setInterval :: Fay () -> Double -> Fay ()
setInterval = ffi "window['setInterval'](%1,%2)"
-- | Popup an alert window with the given input as message
alert :: String -> Fay ()
alert = ffi "window['alert'](%1)"
-- | Implementation of mapM (should really be part of Fay-Base)
mapM :: (a -> Fay b) -> [a] -> Fay [b]
mapM _ [] = return []
mapM f (x:xs) = do
vx <- f x
vxs <- mapM f xs
return (vx:vxs) | SneakingCat/fay-ticker | src/JSAPI.hs | gpl-3.0 | 1,559 | 0 | 10 | 290 | 384 | 203 | 181 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Bamboo.Theme.MiniHTML5.Widget.Navigation where
import Bamboo.Theme.MiniHTML5.Env hiding (header, current, link)
import qualified Bamboo.Type.Config as C
import qualified Bamboo.Type.State as State
import Bamboo.Helper (drop_known_extension)
navigation :: Widget
navigation s = nav [_class "site"] - do
ul' - s.config.C.navigation.mapM_ nav_item
where
nav_item x =
li [_class (home_tag ++ "page_item" ++ current)] link
where
_nav = s.State.nav_location
home_tag = if x == home_nav then "first " else ""
current = if x == _nav then " current_page_item" else ""
link = if x == home_nav then home_link else static_link
home_link = a [href - s.env.slashed_script_name] - str home_nav
static_link = a [href - s.env.slashed_script_name / "static" / x] -
str - x.drop_known_extension
| nfjinjing/bamboo-theme-mini-html5 | src/Bamboo/Theme/MiniHTML5/Widget/Navigation.hs | gpl-3.0 | 928 | 2 | 17 | 222 | 261 | 148 | 113 | 18 | 4 |
{-# LANGUAGE PackageImports #-}
import "cruzo" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, settingsPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings defaultSettings
{ settingsPort = port
} app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "dist/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| ajdunlap/cruzo | devel.hs | gpl-3.0 | 699 | 0 | 10 | 123 | 186 | 101 | 85 | 23 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
module Lamdu.GUI.ExpressionEdit.EventMap
( make
, modifyEventMap
, jumpHolesEventMap
) where
import Prelude.Compat
import Control.Applicative (liftA2)
import qualified Control.Lens as Lens
import Control.Lens.Operators
import Control.MonadA (MonadA)
import qualified Data.Store.Transaction as Transaction
import qualified Graphics.UI.Bottle.EventMap as E
import Graphics.UI.Bottle.ModKey (ModKey(..))
import Graphics.UI.Bottle.Widget (EventHandlers)
import qualified Graphics.UI.Bottle.Widget as Widget
import Lamdu.CharClassification (operatorChars)
import Lamdu.Config (Config)
import qualified Lamdu.Config as Config
import Lamdu.GUI.ExpressionEdit.HoleEdit.State (HoleState(..))
import qualified Lamdu.GUI.ExpressionEdit.HoleEdit.State as HoleEditState
import qualified Lamdu.GUI.ExpressionEdit.HoleEdit.WidgetIds as HoleWidgetIds
import Lamdu.GUI.ExpressionGui.Monad (ExprGuiM, HolePickers)
import qualified Lamdu.GUI.ExpressionGui.Monad as ExprGuiM
import qualified Lamdu.GUI.ExpressionGui.Types as ExprGuiT
import qualified Graphics.UI.Bottle.WidgetsEnvT as WE
import qualified Lamdu.GUI.WidgetIds as WidgetIds
import Lamdu.Sugar.NearestHoles (NearestHoles)
import qualified Lamdu.Sugar.NearestHoles as NearestHoles
import qualified Lamdu.Sugar.Types as Sugar
type T = Transaction.Transaction
make ::
MonadA m => HolePickers m -> Sugar.Payload m ExprGuiT.Payload ->
ExprGuiM m (EventHandlers (T m))
make holePickers pl =
mconcat <$> sequenceA
[ maybe (return mempty)
(actionsEventMap holePickers)
(pl ^. Sugar.plActions)
, jumpHolesEventMapIfSelected pl
, replaceOrComeToParentEventMap pl
]
mkEventMap ::
Functor f =>
[ModKey] -> E.Doc ->
f Sugar.EntityId -> EventHandlers f
mkEventMap keys doc =
Widget.keysEventMapMovesCursor keys doc .
fmap WidgetIds.fromEntityId
mkEventMapWithPickers ::
(Functor f, MonadA m) =>
HolePickers m ->
[ModKey] -> E.Doc ->
(f Sugar.EntityId -> T m Widget.Id) ->
f Sugar.EntityId -> EventHandlers (T m)
mkEventMapWithPickers holePickers keys doc f =
E.keyPresses keys doc .
liftA2 mappend (ExprGuiM.holePickersAction holePickers) .
fmap Widget.eventResultFromCursor . f
isExprSelected :: Sugar.Payload f a -> Widget.Id -> Bool
isExprSelected pl cursor =
WidgetIds.fromExprPayload pl
& (`Widget.subId` cursor)
& Lens.has Lens._Just
jumpHolesEventMap ::
MonadA m => NearestHoles -> ExprGuiM m (EventHandlers (T m))
jumpHolesEventMap hg =
do
config <- ExprGuiM.readConfig <&> Config.hole
let jumpEventMap keys dirStr lens =
maybe mempty
(Widget.keysEventMapMovesCursor (keys config)
(E.Doc ["Navigation", jumpDoc dirStr]) . pure . WidgetIds.fromEntityId) $
hg ^. lens
mconcat
[ jumpEventMap Config.holeJumpToNextKeys "next" NearestHoles.next
, jumpEventMap Config.holeJumpToPrevKeys "previous" NearestHoles.prev
] & return
where
jumpDoc dirStr = "Jump to " ++ dirStr ++ " hole"
jumpHolesEventMapIfSelected ::
MonadA m =>
Sugar.Payload m ExprGuiT.Payload -> ExprGuiM m (EventHandlers (T m))
jumpHolesEventMapIfSelected pl =
do
cursor <- ExprGuiM.widgetEnv WE.readCursor
if isExprSelected pl cursor
then pl ^. Sugar.plData . ExprGuiT.plNearestHoles & jumpHolesEventMap
else pure mempty
extractEventMap :: Functor m => Config -> Sugar.Actions m -> EventHandlers (T m)
extractEventMap config actions =
actions ^. Sugar.extract
& maybe mempty (mkEventMap (Config.extractKeys config)
(E.Doc ["Edit", "Extract to where-item"]))
replaceOrComeToParentEventMap ::
MonadA m =>
Sugar.Payload m ExprGuiT.Payload ->
ExprGuiM m (EventHandlers (T m))
replaceOrComeToParentEventMap pl =
do
config <- ExprGuiM.readConfig
let delKeys = Config.replaceKeys config ++ Config.delKeys config
cursor <- ExprGuiM.widgetEnv WE.readCursor
return $
if isExprSelected pl cursor
then maybe mempty (replaceEventMap config) $ pl ^. Sugar.plActions
else
Widget.keysEventMapMovesCursor delKeys
(E.Doc ["Navigation", "Select parent"]) selectParent
where
selectParent =
WidgetIds.fromExprPayload pl
& WidgetIds.notDelegatingId
& return
actionsEventMap ::
MonadA m =>
HolePickers m ->
Sugar.Actions m ->
ExprGuiM m (EventHandlers (T m))
actionsEventMap holePickers actions =
do
config <- ExprGuiM.readConfig
return $ mconcat
[ wrapEventMap holePickers config
, applyOperatorEventMap holePickers
, extractEventMap config
] actions
applyOperatorEventMap ::
MonadA m => HolePickers m -> Sugar.Actions m -> EventHandlers (T m)
applyOperatorEventMap holePickers actions =
case actions ^. Sugar.wrap of
Sugar.WrapAction wrap -> action wrap
Sugar.WrapperAlready holeId -> action $ return holeId
Sugar.WrappedAlready holeId -> action $ return holeId
Sugar.WrapNotAllowed -> mempty
where
doc = E.Doc ["Edit", ExprGuiM.holePickersAddDocPrefix holePickers "Apply operator"]
action wrap =
E.charGroup "Operator" doc operatorChars $ \c ->
mappend
<$> ExprGuiM.holePickersAction holePickers
<*> do
(guid, entityId) <- wrap
cursor <- HoleEditState.setHoleStateAndJump guid (HoleState [c]) entityId
return $ Widget.eventResultFromCursor cursor
wrapEventMap ::
MonadA m =>
HolePickers m -> Config ->
Sugar.Actions m -> EventHandlers (T m)
wrapEventMap holePickers config actions =
case actions ^. Sugar.wrap of
Sugar.WrapAction wrap ->
mkEventMapWithPickers holePickers
(Config.wrapKeys config)
(E.Doc ["Edit", ExprGuiM.holePickersAddDocPrefix holePickers "Wrap"])
(fmap (HoleWidgetIds.hidOpen . HoleWidgetIds.make)) (snd <$> wrap)
Sugar.WrapperAlready _ -> mempty
Sugar.WrappedAlready _ -> mempty
Sugar.WrapNotAllowed -> mempty
replaceEventMap :: MonadA m => Config -> Sugar.Actions m -> EventHandlers (T m)
replaceEventMap config actions =
mconcat
[ actionEventMap (Sugar.setToInnerExpr . Sugar._SetToInnerExpr)
"Replace with inner expression" $ Config.delKeys config
, actionEventMap (Sugar.setToHole . Sugar._SetToHole . Lens.to (fmap snd))
"Replace expression" delKeys
]
where
actionEventMap l doc keys =
maybe mempty (mkEventMap keys (E.Doc ["Edit", doc])) $
actions ^? l
delKeys = Config.replaceKeys config ++ Config.delKeys config
modifyEventMap ::
MonadA m => HolePickers m -> Config ->
Sugar.Actions m -> EventHandlers (T m)
modifyEventMap holePickers config =
mconcat
[ wrapEventMap holePickers config
, applyOperatorEventMap holePickers
, replaceEventMap config
]
| rvion/lamdu | Lamdu/GUI/ExpressionEdit/EventMap.hs | gpl-3.0 | 7,233 | 0 | 20 | 1,770 | 1,938 | 1,000 | 938 | -1 | -1 |
-- | Import/Export JSON support
{-# LANGUAGE NoImplicitPrelude, TemplateHaskell, OverloadedStrings, FlexibleContexts, LambdaCase #-}
module Lamdu.Data.Export.JSON
( fileExportRepl, jsonExportRepl
, fileExportAll
, fileExportDef
, fileImportAll
) where
import qualified Control.Lens as Lens
import Control.Lens.Operators hiding ((.=))
import Control.Lens.Tuple
import Control.Monad (unless)
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.State (StateT)
import qualified Control.Monad.Trans.State as State
import Control.Monad.Trans.Writer (WriterT(..))
import qualified Control.Monad.Trans.Writer as Writer
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Encode.Pretty as AesonPretty
import Data.Binary (Binary)
import qualified Data.ByteString.Lazy as LBS
import Data.Foldable (traverse_)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Store.IRef (IRef)
import qualified Data.Store.IRef as IRef
import qualified Data.Store.Property as Property
import Data.Store.Transaction (Transaction)
import qualified Data.Store.Transaction as Transaction
import Data.UUID.Types (UUID)
import qualified Lamdu.Calc.Type as T
import Lamdu.Calc.Type.Nominal (Nominal)
import qualified Lamdu.Calc.Val as V
import Lamdu.Calc.Val.Annotated (Val(..))
import qualified Lamdu.Data.Anchors as Anchors
import Lamdu.Data.DbLayout (ViewM)
import qualified Lamdu.Data.DbLayout as DbLayout
import Lamdu.Data.Definition (Definition(..))
import qualified Lamdu.Data.Definition as Definition
import qualified Lamdu.Data.Export.JSON.Codec as Codec
import Lamdu.Expr.IRef (ValI)
import qualified Lamdu.Expr.IRef as ExprIRef
import qualified Lamdu.Expr.Lens as ExprLens
import qualified Lamdu.Expr.Load as Load
import Lamdu.Expr.UniqueId (ToUUID)
import Prelude.Compat
type T = Transaction
data Visited = Visited
{ _visitedDefs :: Set V.Var
, _visitedTags :: Set T.Tag
, _visitedNominals :: Set T.NominalId
}
Lens.makeLenses ''Visited
type Export = WriterT [Codec.Entity] (StateT Visited (T ViewM))
runExport :: Export a -> T ViewM (a, Aeson.Value)
runExport act =
act
& runWriterT
<&> _2 %~ Aeson.toJSON
& (`State.evalStateT` Visited mempty mempty mempty)
trans :: T ViewM a -> Export a
trans = lift . lift
withVisited :: Ord a => Lens.ALens' Visited (Set a) -> a -> Export () -> Export ()
withVisited l x act =
do
alreadyVisited <- Lens.use (Lens.cloneLens l . Lens.contains x)
unless alreadyVisited $
do
Lens.assign (Lens.cloneLens l . Lens.contains x) True
act
readAssocName :: Monad m => ToUUID a => a -> T m (Maybe String)
readAssocName x =
do
name <- Anchors.assocNameRef x & Transaction.getP
return $
if null name
then Nothing
else Just name
tell :: Codec.Entity -> Export ()
tell = Writer.tell . (: [])
exportTag :: T.Tag -> Export ()
exportTag tag =
do
tagOrder <- Transaction.getP (Anchors.assocTagOrder tag) & trans
mName <- readAssocName tag & trans
Codec.EntityTag tagOrder mName tag & tell
& withVisited visitedTags tag
exportNominal :: T.NominalId -> Export ()
exportNominal nomId =
do
nominal <- trans (Load.nominal nomId)
mName <- readAssocName nomId & trans
Codec.EntityNominal mName nomId nominal & tell
& withVisited visitedNominals nomId
exportSubexpr :: Val (ValI ViewM) -> Export ()
exportSubexpr (Val lamI (V.BLam (V.Lam lamVar _))) =
do
mName <- readAssocName lamVar & trans
mParamList <- Transaction.getP (Anchors.assocFieldParamList lamI) & trans
Codec.EntityLamVar mParamList mName (valIToUUID lamI) lamVar & tell
exportSubexpr _ = return ()
exportVal :: Val (ValI ViewM) -> Export ()
exportVal val =
do
val ^.. ExprLens.valGlobals mempty & traverse_ (exportDef . ExprIRef.defI)
val ^.. ExprLens.valTags & traverse_ exportTag
val ^.. ExprLens.valNominals & traverse_ exportNominal
val ^.. ExprLens.subExprs & traverse_ exportSubexpr
valIToUUID :: ValI m -> UUID
valIToUUID = IRef.uuid . ExprIRef.unValI
exportDef :: ExprIRef.DefI ViewM -> Export ()
exportDef defI =
do
presentationMode <- Transaction.getP (Anchors.assocPresentationMode globalId) & trans
mName <- readAssocName globalId & trans
def <-
Load.def defI & trans
<&> Definition.defBody . Lens.mapped . Lens.mapped %~ Property.value
traverse_ exportVal (def ^. Definition.defBody)
let def' = def & Definition.defBody . Lens.mapped . Lens.mapped %~ valIToUUID
(presentationMode, mName, globalId) <$ def' & Codec.EntityDef & tell
& withVisited visitedDefs globalId
where
globalId = ExprIRef.globalId defI
exportRepl :: Export ()
exportRepl =
do
repl <-
DbLayout.repl DbLayout.codeIRefs & Transaction.readIRef
>>= ExprIRef.readVal & trans
exportVal repl
repl <&> valIToUUID & Codec.EntityRepl & tell
jsonExportRepl :: T ViewM Aeson.Value
jsonExportRepl = runExport exportRepl <&> snd
fileExportRepl :: FilePath -> T ViewM (IO ())
fileExportRepl = export "repl" exportRepl
fileExportDef :: ExprIRef.DefI ViewM -> FilePath -> T ViewM (IO ())
fileExportDef defI =
export ("def: " ++ show defI) (exportDef defI)
fileExportAll :: FilePath -> T ViewM (IO ())
fileExportAll =
do
exportSet DbLayout.globals exportDef
exportSet DbLayout.tags exportTag
exportSet DbLayout.tids exportNominal
exportRepl
& export "all"
where
exportSet indexIRef exportFunc =
indexIRef DbLayout.codeIRefs & Transaction.readIRef & trans
>>= traverse_ exportFunc
export :: String -> Export a -> FilePath -> T ViewM (IO ())
export msg act exportPath =
runExport act
<&> snd
<&> \json ->
do
putStrLn $ "Exporting " ++ msg ++ " to " ++ show exportPath
LBS.writeFile exportPath (AesonPretty.encodePretty json)
setName :: ToUUID a => a -> String -> T ViewM ()
setName x = Transaction.setP (Anchors.assocNameRef x)
writeValAt :: Monad m => Val (ValI m) -> T m (ValI m)
writeValAt (Val valI body) =
do
traverse writeValAt body >>= ExprIRef.writeValBody valI
return valI
writeValAtUUID :: Monad m => Val UUID -> T m (ValI m)
writeValAtUUID val = val <&> IRef.unsafeFromUUID <&> ExprIRef.ValI & writeValAt
insertTo ::
(Monad m, Ord a, Binary a) =>
a -> (DbLayout.Code (IRef ViewM) ViewM -> IRef m (Set a)) -> T m ()
insertTo item setIRef =
Transaction.readIRef iref
<&> Set.insert item
>>= Transaction.writeIRef iref
where
iref = setIRef DbLayout.codeIRefs
importDef :: Definition (Val UUID) (Anchors.PresentationMode, Maybe String, V.Var) -> T ViewM ()
importDef (Definition defBody (presentationMode, mName, globalId)) =
do
Transaction.setP (Anchors.assocPresentationMode globalId) presentationMode
traverse_ (setName globalId) mName
Lens.traverse writeValAtUUID defBody
>>= Transaction.writeIRef defI
defI `insertTo` DbLayout.globals
where
defI = ExprIRef.defI globalId
importRepl :: Val UUID -> T ViewM ()
importRepl val =
writeValAtUUID val >>= Transaction.writeIRef (DbLayout.repl DbLayout.codeIRefs)
importTag :: Codec.TagOrder -> Maybe String -> T.Tag -> T ViewM ()
importTag tagOrder mName tag =
do
Transaction.setP (Anchors.assocTagOrder tag) tagOrder
traverse_ (setName tag) mName
tag `insertTo` DbLayout.tags
importLamVar :: Maybe Anchors.ParamList -> Maybe String -> UUID -> V.Var -> T ViewM ()
importLamVar paramList mName lamUUID var =
do
Transaction.setP (Anchors.assocFieldParamList lamI) paramList
traverse_ (setName var) mName
where
lamI = IRef.unsafeFromUUID lamUUID & ExprIRef.ValI
importNominal :: Maybe String -> T.NominalId -> Nominal -> T ViewM ()
importNominal mName nomId nominal =
do
traverse_ (setName nomId) mName
Transaction.writeIRef (ExprIRef.nominalI nomId) nominal
nomId `insertTo` DbLayout.tids
importOne :: Codec.Entity -> T ViewM ()
importOne (Codec.EntityDef def) = importDef def
importOne (Codec.EntityRepl val) = importRepl val
importOne (Codec.EntityTag tagOrder mName tag) = importTag tagOrder mName tag
importOne (Codec.EntityNominal mName nomId nom) = importNominal mName nomId nom
importOne (Codec.EntityLamVar paramList mName lamUUID var) =
importLamVar paramList mName lamUUID var
fileImportAll :: FilePath -> IO (T ViewM ())
fileImportAll importPath =
do
putStrLn $ "importing from: " ++ show importPath
LBS.readFile importPath <&> Aeson.eitherDecode
>>= either fail return
<&> \json ->
case Aeson.fromJSON json of
Aeson.Error str -> fail str
Aeson.Success entities -> mapM_ importOne (entities :: [Codec.Entity])
| da-x/lamdu | Lamdu/Data/Export/JSON.hs | gpl-3.0 | 9,236 | 0 | 15 | 2,179 | 2,882 | 1,475 | 1,407 | -1 | -1 |
{-|
Description : Working with lines of code
-}
module Language.Haskell.Formatter.Process.LineTool
(Shifter, Shift, countEmptyLines, createShifter, shiftCode) where
import qualified Data.Map.Strict as Map
import qualified Language.Haskell.Formatter.Location as Location
import qualified Language.Haskell.Formatter.Process.Code as Code
newtype Shifter = Shifter (Map.Map Location.Line Shift)
deriving (Eq, Ord, Show)
type Shift = Int
countEmptyLines :: Location.Line -> Location.Line -> Int
countEmptyLines endLine startLine = pred lineDifference
where lineDifference = Location.minus startLine endLine
createShifter :: Map.Map Location.Line Shift -> Shifter
createShifter relativeShifter = Shifter absoluteShifter
where (_, absoluteShifter) = Map.mapAccum accumulate noShift relativeShifter
accumulate absoluteShift relativeShift
= (absoluteShift', absoluteShift')
where absoluteShift' = absoluteShift + relativeShift
noShift :: Shift
noShift = 0
shiftCode :: Shifter -> Code.LocatableCode -> Code.LocatableCode
shiftCode shifter = fmap $ shiftNestedPortion shifter
where shiftNestedPortion = Location.replaceNestedPortionLines . shiftLine
shiftLine :: Shifter -> Location.Line -> Location.Line
shiftLine shifter line = Location.plus shift line
where shift = lookupShift line shifter
lookupShift :: Location.Line -> Shifter -> Shift
lookupShift line (Shifter shifter)
= case Map.lookupLE line shifter of
Nothing -> noShift
Just (_, shift) -> shift
| evolutics/haskell-formatter | src/library/Language/Haskell/Formatter/Process/LineTool.hs | gpl-3.0 | 1,537 | 0 | 9 | 258 | 385 | 213 | 172 | 30 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.UserProFiles.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves list of user profiles for a user.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.userProfiles.list@.
module Network.Google.Resource.DFAReporting.UserProFiles.List
(
-- * REST Resource
UserProFilesListResource
-- * Creating a Request
, userProFilesList
, UserProFilesList
-- * Request Lenses
, upflXgafv
, upflUploadProtocol
, upflAccessToken
, upflUploadType
, upflCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.userProfiles.list@ method which the
-- 'UserProFilesList' request conforms to.
type UserProFilesListResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] UserProFileList
-- | Retrieves list of user profiles for a user.
--
-- /See:/ 'userProFilesList' smart constructor.
data UserProFilesList =
UserProFilesList'
{ _upflXgafv :: !(Maybe Xgafv)
, _upflUploadProtocol :: !(Maybe Text)
, _upflAccessToken :: !(Maybe Text)
, _upflUploadType :: !(Maybe Text)
, _upflCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UserProFilesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'upflXgafv'
--
-- * 'upflUploadProtocol'
--
-- * 'upflAccessToken'
--
-- * 'upflUploadType'
--
-- * 'upflCallback'
userProFilesList
:: UserProFilesList
userProFilesList =
UserProFilesList'
{ _upflXgafv = Nothing
, _upflUploadProtocol = Nothing
, _upflAccessToken = Nothing
, _upflUploadType = Nothing
, _upflCallback = Nothing
}
-- | V1 error format.
upflXgafv :: Lens' UserProFilesList (Maybe Xgafv)
upflXgafv
= lens _upflXgafv (\ s a -> s{_upflXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
upflUploadProtocol :: Lens' UserProFilesList (Maybe Text)
upflUploadProtocol
= lens _upflUploadProtocol
(\ s a -> s{_upflUploadProtocol = a})
-- | OAuth access token.
upflAccessToken :: Lens' UserProFilesList (Maybe Text)
upflAccessToken
= lens _upflAccessToken
(\ s a -> s{_upflAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
upflUploadType :: Lens' UserProFilesList (Maybe Text)
upflUploadType
= lens _upflUploadType
(\ s a -> s{_upflUploadType = a})
-- | JSONP
upflCallback :: Lens' UserProFilesList (Maybe Text)
upflCallback
= lens _upflCallback (\ s a -> s{_upflCallback = a})
instance GoogleRequest UserProFilesList where
type Rs UserProFilesList = UserProFileList
type Scopes UserProFilesList =
'["https://www.googleapis.com/auth/ddmconversions",
"https://www.googleapis.com/auth/dfareporting",
"https://www.googleapis.com/auth/dfatrafficking"]
requestClient UserProFilesList'{..}
= go _upflXgafv _upflUploadProtocol _upflAccessToken
_upflUploadType
_upflCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy UserProFilesListResource)
mempty
| brendanhay/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/UserProFiles/List.hs | mpl-2.0 | 4,411 | 0 | 16 | 1,031 | 632 | 370 | 262 | 96 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigtableAdmin.Projects.Instances.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists information about instances in a project.
--
-- /See:/ <https://cloud.google.com/bigtable/ Cloud Bigtable Admin API Reference> for @bigtableadmin.projects.instances.list@.
module Network.Google.Resource.BigtableAdmin.Projects.Instances.List
(
-- * REST Resource
ProjectsInstancesListResource
-- * Creating a Request
, projectsInstancesList
, ProjectsInstancesList
-- * Request Lenses
, pilParent
, pilXgafv
, pilUploadProtocol
, pilAccessToken
, pilUploadType
, pilPageToken
, pilCallback
) where
import Network.Google.BigtableAdmin.Types
import Network.Google.Prelude
-- | A resource alias for @bigtableadmin.projects.instances.list@ method which the
-- 'ProjectsInstancesList' request conforms to.
type ProjectsInstancesListResource =
"v2" :>
Capture "parent" Text :>
"instances" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListInstancesResponse
-- | Lists information about instances in a project.
--
-- /See:/ 'projectsInstancesList' smart constructor.
data ProjectsInstancesList =
ProjectsInstancesList'
{ _pilParent :: !Text
, _pilXgafv :: !(Maybe Xgafv)
, _pilUploadProtocol :: !(Maybe Text)
, _pilAccessToken :: !(Maybe Text)
, _pilUploadType :: !(Maybe Text)
, _pilPageToken :: !(Maybe Text)
, _pilCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsInstancesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pilParent'
--
-- * 'pilXgafv'
--
-- * 'pilUploadProtocol'
--
-- * 'pilAccessToken'
--
-- * 'pilUploadType'
--
-- * 'pilPageToken'
--
-- * 'pilCallback'
projectsInstancesList
:: Text -- ^ 'pilParent'
-> ProjectsInstancesList
projectsInstancesList pPilParent_ =
ProjectsInstancesList'
{ _pilParent = pPilParent_
, _pilXgafv = Nothing
, _pilUploadProtocol = Nothing
, _pilAccessToken = Nothing
, _pilUploadType = Nothing
, _pilPageToken = Nothing
, _pilCallback = Nothing
}
-- | Required. The unique name of the project for which a list of instances
-- is requested. Values are of the form \`projects\/{project}\`.
pilParent :: Lens' ProjectsInstancesList Text
pilParent
= lens _pilParent (\ s a -> s{_pilParent = a})
-- | V1 error format.
pilXgafv :: Lens' ProjectsInstancesList (Maybe Xgafv)
pilXgafv = lens _pilXgafv (\ s a -> s{_pilXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pilUploadProtocol :: Lens' ProjectsInstancesList (Maybe Text)
pilUploadProtocol
= lens _pilUploadProtocol
(\ s a -> s{_pilUploadProtocol = a})
-- | OAuth access token.
pilAccessToken :: Lens' ProjectsInstancesList (Maybe Text)
pilAccessToken
= lens _pilAccessToken
(\ s a -> s{_pilAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pilUploadType :: Lens' ProjectsInstancesList (Maybe Text)
pilUploadType
= lens _pilUploadType
(\ s a -> s{_pilUploadType = a})
-- | DEPRECATED: This field is unused and ignored.
pilPageToken :: Lens' ProjectsInstancesList (Maybe Text)
pilPageToken
= lens _pilPageToken (\ s a -> s{_pilPageToken = a})
-- | JSONP
pilCallback :: Lens' ProjectsInstancesList (Maybe Text)
pilCallback
= lens _pilCallback (\ s a -> s{_pilCallback = a})
instance GoogleRequest ProjectsInstancesList where
type Rs ProjectsInstancesList = ListInstancesResponse
type Scopes ProjectsInstancesList =
'["https://www.googleapis.com/auth/bigtable.admin",
"https://www.googleapis.com/auth/bigtable.admin.cluster",
"https://www.googleapis.com/auth/bigtable.admin.instance",
"https://www.googleapis.com/auth/cloud-bigtable.admin",
"https://www.googleapis.com/auth/cloud-bigtable.admin.cluster",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient ProjectsInstancesList'{..}
= go _pilParent _pilXgafv _pilUploadProtocol
_pilAccessToken
_pilUploadType
_pilPageToken
_pilCallback
(Just AltJSON)
bigtableAdminService
where go
= buildClient
(Proxy :: Proxy ProjectsInstancesListResource)
mempty
| brendanhay/gogol | gogol-bigtableadmin/gen/Network/Google/Resource/BigtableAdmin/Projects/Instances/List.hs | mpl-2.0 | 5,596 | 0 | 17 | 1,287 | 798 | 466 | 332 | 119 | 1 |
module AlternateCase where
import Data.Char
alternateCase :: String -> String
alternateCase = ((\x -> if isUpper x then toLower x else toUpper x) <$>)
| ice1000/OI-codes | codewars/1-100/alternate-case.hs | agpl-3.0 | 153 | 0 | 9 | 26 | 52 | 30 | 22 | 4 | 2 |
data Customer = Customer {
customerID :: Int,
customerName :: String,
customerAddress :: [String]
} deriving (Show)
| aniketd/learn.haskell | RWH/4_and_before/bookStore.hs | unlicense | 130 | 0 | 9 | 31 | 38 | 23 | 15 | 5 | 0 |
{-# LANGUAGE ForeignFunctionInterface #-}
module Ffi where
import Control.Monad
import Data.Vector.Storable as V
import Foreign hiding (unsafePerformIO)
import Foreign.C
import System.IO.Unsafe
foreign import ccall unsafe "ffi.h" nativeC :: Ptr CDouble -> CInt -> IO (Ptr CDouble)
doubleVec :: Vector Double -> Vector Double
doubleVec xs = unsafePerformIO $ do
let (fpIn, off, len) = unsafeToForeignPtr xs
pOut <- liftM castPtr $ withForeignPtr fpIn $ \ptr ->
nativeC (castPtr ptr) (fromIntegral len)
fpOut <- newForeignPtr finalizerFree pOut
return $ unsafeFromForeignPtr0 fpOut len
| jstolarek/sandbox | haskell/ffi/demo/src/Ffi.hs | unlicense | 613 | 0 | 13 | 110 | 188 | 97 | 91 | 15 | 1 |
{-
Copyright 2014 [email protected]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Resolver(
initResolverEnv,
importFile
) where
import qualified TypeInfer as TI
import qualified Type as T
import qualified AST as A
import qualified Module as M
import qualified Parser as P
import qualified Data.Map as Map
import Data.List
import Control.Monad.Error
import Control.Monad.State
import Text.Parsec.Pos (sourceName, sourceLine, sourceColumn)
import System.Directory
import Control.Exception as CE
import System.Exit
type Modules = Map.Map String M.Module
nullModules = Map.empty
type Resolver a = ErrorT String (StateT Modules IO) a
importModule ps m n pre fs pos poses = do
ms <- get
case Map.lookup n ms of
Nothing -> do
r <- liftIO $ importFile_ ps n fs pos poses
case r of
Right nm -> do
put $ Map.insert n nm ms
importModule ps m n pre fs pos poses
Left err -> throwError err
Just nm -> do
m <- Map.foldlWithKey
(\acc k (v, pos) -> do
m <- acc
case M.addType k v pos m of
Right m -> return m
Left err -> throwError $ err ++ "\nwhen try to import " ++ n
++ "\n @ " ++ (show $ sourceName pos) ++ ":("
++ (show $ sourceLine pos) ++ "," ++ (show $ sourceColumn pos) ++ ")\n"
)
(return m)
(M.types nm)
m <- Map.foldlWithKey
(\acc k v -> do
m <- acc
case M.addSource k v m of
Right m -> return m
Left err -> throwError $ err ++ "\nwhen try to import " ++ n
++ "\n @ " ++ (show $ sourceName (A.exprPos v)) ++ ":("
++ (show $ sourceLine (A.exprPos v)) ++ ","
++ (show $ sourceColumn (A.exprPos v)) ++ ")\n"
)
(return m)
(M.source nm)
m <- Map.foldlWithKey
(\acc k (v, pos) -> do
m <- acc
case M.addEnv k v pos m of
Right m -> return m
Left err -> throwError $ err ++ "\nwhen try to import " ++ n
++ "\n @ " ++ (show $ sourceName pos) ++ ":("
++ (show $ sourceLine pos) ++ "," ++ (show $ sourceColumn pos) ++ ")\n"
)
(return m)
(M.env nm)
return m
importModules ps m fs poses =
Map.foldlWithKey
(\acc n (pre, pos) -> do
m <- acc
importModule ps m n pre fs pos poses
)
(return m)
(M.imports m)
initResolverEnv m = TI.typeInferState 0
(TI.TypeEnv $ Map.map (\(s, _)->s) $ M.env m)
(M.source m)
TI.nullInferred
resolveModuleSource m =
Map.foldlWithKey
(\acc n e ->
do
m <- acc
res <- liftIO $ TI.runTI n $ initResolverEnv m
case res of
(Right s, _) -> do
return $ M.addEnv_ n s (A.exprPos e) m
(Left err, _) -> do
throwError err
)
(return m)
(M.source m)
resolveType ts (T.TCon a b) pos = do
resolveType ts a pos
resolveTypeList ts b pos
resolveType ts (T.TCN a) pos =
case Map.lookup a ts of
Nothing -> throwError $ "type `" ++ a ++ "\' cannot be found"
++ "\n @ " ++ (show $ sourceName pos)
++ ":(" ++ (show $ sourceLine pos)
++ "," ++ (show $ sourceColumn pos)
++ ")\n"
Just t -> return ()
resolveType ts (T.TFun a b) pos = do
resolveTypeList ts a pos
resolveType ts b pos
resolveType _ _ _ = return ()
resolveTypeList ts [] pos = return ()
resolveTypeList ts (x:ps) pos = do
resolveType ts x pos
resolveTypeList ts ps pos
resolveModuleTypes m = do
foldM
(\m ((T.Scheme _ s), pos) -> do
resolveType (M.types m) s pos
return m
)
m
(Map.elems (M.env m))
importFile_ ps file fs pos poses = do
fn <- foldM
(\acc p ->
foldM
(\a f -> do
if a == ""
then do e <- doesFileExist f
if e then return f else return a
else return a
)
acc
[file, file ++ ".hw", p ++ "/" ++ file, p ++ "/" ++ file ++ ".hw"]
)
""
ps
contents <- CE.catch (readFile (if fn == "" then file else fn))
(\e ->
let err = show (e :: IOException) ++ "\n" ++
"import " ++ file ++ " failed\n" ++
foldl' (\acc (f, pos) -> acc ++ " from "
++ f ++ ":(" ++ (show $ sourceLine pos)
++ "," ++ (show $ sourceColumn pos) ++")\n")
""
(zip fs poses)
in do putStrLn err
exitFailure)
case P.iParse fn contents of
Left err -> return $ Left $ show err
Right m -> do
(r, _) <- runStateT (runErrorT (
if elem fn fs
then throwError $ "circle importing `" ++ file ++ "\'\n" ++
foldl' (\acc (f, pos) -> acc ++ " from "
++ f ++ ":(" ++ (show $ sourceLine pos)
++ "," ++ (show $ sourceColumn pos) ++")\n")
""
(zip fs poses)
else do
m <- if file == "Prelude"
then return (M.addInitEnv m)
else importModules ps (M.addInitEnv m) (fn:fs) (pos:poses)
m <- resolveModuleTypes m
resolveModuleSource m)) nullModules
case r of
Right m -> return $ Right m
Left err -> return $ Left err
importFile ps file = importFile_ ps file [] M.sysSourcePos []
| wehu/hw | src/Resolver.hs | apache-2.0 | 6,752 | 0 | 32 | 2,913 | 2,082 | 1,037 | 1,045 | 157 | 8 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.Lifecycle where
import GHC.Generics
import Kubernetes.V1.Handler
import qualified Data.Aeson
-- | Lifecycle describes actions that the management system should take in response to container lifecycle events. For the PostStart and PreStop lifecycle handlers, management of the container blocks until the action is complete, unless the container process fails, in which case the handler is aborted.
data Lifecycle = Lifecycle
{ postStart :: Maybe Handler -- ^ PostStart is called immediately after a container is created. If the handler fails, the container is terminated and restarted according to its restart policy. Other management of the container blocks until the hook completes. More info: http://releases.k8s.io/HEAD/docs/user-guide/container-environment.md#hook-details
, preStop :: Maybe Handler -- ^ PreStop is called immediately before a container is terminated. The container is terminated after the handler completes. The reason for termination is passed to the handler. Regardless of the outcome of the handler, the container is eventually terminated. Other management of the container blocks until the hook completes. More info: http://releases.k8s.io/HEAD/docs/user-guide/container-environment.md#hook-details
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON Lifecycle
instance Data.Aeson.ToJSON Lifecycle
| minhdoboi/deprecated-openshift-haskell-api | kubernetes/lib/Kubernetes/V1/Lifecycle.hs | apache-2.0 | 1,539 | 0 | 9 | 211 | 94 | 57 | 37 | 15 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.GitBuildSource where
import GHC.Generics
import Data.Text
import qualified Data.Aeson
-- |
data GitBuildSource = GitBuildSource
{ uri :: Text -- ^ points to the source that will be built, structure of the source will depend on the type of build to run
, ref :: Maybe Text -- ^ identifies the branch/tag/ref to build
, httpProxy :: Maybe Text -- ^ specifies a http proxy to be used during git clone operations
, httpsProxy :: Maybe Text -- ^ specifies a https proxy to be used during git clone operations
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON GitBuildSource
instance Data.Aeson.ToJSON GitBuildSource
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1/GitBuildSource.hs | apache-2.0 | 838 | 0 | 9 | 151 | 110 | 67 | 43 | 17 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- | DAG-based model for morphosyntactic tagging.
module NLP.Concraft.Polish.DAGSeg
(
-- * Types
Tag
-- ** Simplification
, simplify4gsr
, complexify4gsr
, simplify4dmb
-- ** Model
, C.Concraft
, C.saveModel
, C.loadModel
-- * Tagging
, guess
-- , disamb
-- , disamb'
-- , tag
-- , tag'
-- ** High level
, AnnoSent (..)
, AnnoConf (..)
, annoAll
-- * Training
, TrainConf (..)
-- , DisambTiersCfg (..)
, train
-- * Pruning
-- , C.prune
) where
import Prelude hiding (Word, pred)
import Control.Applicative ((<$>))
-- import Control.Arrow (first)
import Control.Monad (guard)
-- import Data.Maybe (listToMaybe)
import qualified Data.Text as T
-- import qualified Data.Text.Lazy as L
import qualified Data.Set as S
import qualified Data.Map.Strict as M
-- import Data.Data (Data)
-- import Data.Typeable (Typeable)
import qualified Data.Tagset.Positional as P
import qualified Numeric.SGD.Momentum as SGD
import qualified Data.DAG as DAG
import qualified Data.CRF.Chain1.Constrained.DAG as CRF
import qualified NLP.Concraft.DAG.Morphosyntax as X
import qualified NLP.Concraft.DAG.Morphosyntax.Ambiguous as XA
import qualified NLP.Concraft.DAG.Schema as S
import NLP.Concraft.DAG.Schema (SchemaConf(..), entry, entryWith)
import qualified NLP.Concraft.DAG.Guess as G
import qualified NLP.Concraft.DAG.DisambSeg as D
import qualified NLP.Concraft.DAG.Segmentation as Seg
import qualified NLP.Concraft.DAGSeg as C
import NLP.Concraft.Polish.DAG.Morphosyntax hiding (tag, Tag)
import qualified NLP.Concraft.Polish.DAG.Morphosyntax as PolX
import qualified NLP.Concraft.Polish.DAG.Config as Cfg
import qualified NLP.Concraft.Polish.DAG.Config.Disamb as Cfg
-- import Debug.Trace (trace)
-------------------------------------------------
-- Default configuration
-------------------------------------------------
-- | Default configuration for the guessing observation schema.
guessSchemaDefault :: SchemaConf
guessSchemaDefault = S.nullConf
{ lowPrefixesC = entryWith [1, 2] [0]
, lowSuffixesC = entryWith [1, 2] [0]
, knownC = entry [0]
, begPackedC = entry [0]
}
-- | Default configuration for the segmentation observation schema.
segmentSchemaDefault :: SchemaConf
segmentSchemaDefault = S.nullConf
{ lowPrefixesC = entryWith [1, 2] [-1, 0, 1]
, begPackedC = entry [-1, 0, 1] }
-- NOTE: The current configuration works quite well for segmentation.
-- Adding orthographic forms was not a good idea, at least not on a small
-- training dataset.
-- -- | Default configuration for the guessing observation schema.
-- disambSchemaDefault :: SchemaConf
-- disambSchemaDefault = S.nullConf
-- { lowPrefixesC = entryWith [1, 2] [-1, 0, 1]
-- , begPackedC = entry [-1, 0, 1] }
-- -- | Default configuration for the guessing observation schema.
-- disambSchemaDefault :: SchemaConf
-- disambSchemaDefault = S.nullConf
-- { orthC = entry [-2, -1, 0, 1, 2]
-- -- { lowOrthC = entry [-2, -1, 0, 1, 2]
-- , lowPrefixesC = entryWith [1, 2, 3] [-2, -1, 0, 1, 2]
-- , lowSuffixesC = entryWith [1, 2, 3] [-2, -1, 0, 1, 2]
-- , begPackedC = entry [-2, -1, 0, 1, 2] }
-- | Default configuration for the disambiguation observation schema.
disambSchemaDefault :: SchemaConf
disambSchemaDefault = S.nullConf
{ lowOrthC = entry [-2, -1, 0, 1]
, lowPrefixesC = oov $ entryWith [1, 2, 3] [0]
, lowSuffixesC = oov $ entryWith [1, 2, 3] [0]
, begPackedC = oov $ entry [0] }
where
oov (Just body) = Just $ body { S.oovOnly = True }
oov Nothing = Nothing
-- | Default tiered tagging configuration for the segmentation model.
tiersSegment :: [D.Tier]
tiersSegment =
[tier]
where
tier = D.Tier
{ D.withPos = True
, D.withEos = True
, D.withAtts = S.fromList []
}
-------------------------------------------------
-- Tagging Tiers
-------------------------------------------------
-- -- | Configuration of disambiguation tiers.
-- data DisambTiersCfg
-- = TiersDefault
-- | TiersGndCasSeparately
-- deriving (Data, Typeable, Show, Eq, Ord)
--
--
-- -- | Tiered tagging configuration for the disambiguation model.
-- tiersDisamb :: DisambTiersCfg -> [D.Tier]
-- tiersDisamb cfg = case cfg of
-- TiersDefault -> tiersDisambDefault
-- TiersGndCasSeparately -> tiersDisambGndCasSeparately
--
--
-- -- | Default tiered tagging configuration for the disambiguation model.
-- tiersDisambDefault :: [D.Tier]
-- tiersDisambDefault =
-- [tier1, tier2]
-- where
-- tier1 = D.Tier True False $ S.fromList
-- ["cas", "per"]
-- tier2 = D.Tier False False $ S.fromList
-- [ "nmb", "gnd", "deg", "asp" , "ngt", "acm"
-- , "acn", "ppr", "agg", "vlc", "dot"
-- , "sbg", "col"
-- ]
--
-- -- | Separate tier with gender and case values.
-- tiersDisambGndCasSeparately :: [D.Tier]
-- tiersDisambGndCasSeparately =
-- [tier1, tier2, tier3]
-- where
-- tier1 = D.Tier True False $ S.fromList
-- [ "per" ]
-- tier2 = D.Tier False False $ S.fromList
-- [ "nmb", "deg", "asp" , "ngt", "acm"
-- , "acn", "ppr", "agg", "vlc", "dot"
-- , "sbg", "col"
-- ]
-- tier3 = D.Tier False False $ S.fromList
-- [ "cas", "gnd"
-- ]
tiersDisamb :: Cfg.Config -> [D.Tier]
tiersDisamb Cfg.Config{..} = do
Cfg.TierCfg{..} <- Cfg.tiersCfg disambCfg
return $ D.Tier
{ D.withPos = withPos
, D.withEos = withEos
, D.withAtts = Cfg.unSet withAtts
}
-------------------------------------------------
-- Tagging
-------------------------------------------------
-- type Tag = PolX.Tag
type Tag = PolX.Interp PolX.Tag
-- | Tag the sentence with guessing marginal probabilities.
guess :: CRF.Config P.Tag -> C.Concraft Tag -> Sent Tag -> Sent Tag
guess cfg = tagWith (C.guessMarginals cfg . C.guesser)
-- | Tag with the help of a lower-level annotation function.
tagWith
-- :: (C.Concraft Tag -> X.Sent Word P.Tag -> C.Anno P.Tag Double)
:: (C.Concraft Tag -> X.Sent Word Tag -> C.Anno Tag Double)
-> C.Concraft Tag -> Sent Tag -> Sent Tag
tagWith annoFun concraft sent
= fmap select
$ DAG.zipE sent annoSent
where
select (edge, anno) = selectAnno anno edge
annoSent = annoWith annoFun concraft sent
-- | Annotate with the help of a lower-level annotation function.
annoWith
:: (C.Concraft Tag -> X.Sent Word Tag -> C.Anno Tag a)
-> C.Concraft Tag -> Sent Tag -> C.Anno Tag a
annoWith anno concraft =
anno concraft . packSent
-- | Tranform each tag into two tags, one with EOS(end-of-sentence)=True, one
-- with EOS=False. The idea behind this transformation is that, at some point,
-- we may want to tag with an EOS-aware model, with no EOS-related information
-- coming from the previous tagging stages.
--
-- NOTE: this procedure does not apply to OOV words. The motivation: it seems
-- highly unlikely that any OOV word can mark a sentence end.
--
-- NOTE: this procedure does not apply to segmentation-ambiguous words either.
-- Allowing the tagger to cut on such words might cause losing DAG edges.
addEosMarkers
:: DAG.DAG a Bool
-> DAG.EdgeID
-> X.Seg Word Tag
-> X.Seg Word Tag
addEosMarkers ambiDag edgeID seg
| X.oov seg = seg
| DAG.edgeLabel edgeID ambiDag = seg
| otherwise = seg {X.tags = newTags (X.tags seg)}
where
newTags tagMap = X.mkWMap $ concat
[ multiply interp p
| (interp, p) <- M.toList (X.unWMap tagMap) ]
multiply interp p
| eos interp == True =
[ (interp {eos=True}, p)
, (interp {eos=False}, 0) ]
| otherwise =
[ (interp {eos=True}, 0)
, (interp {eos=False}, p) ]
-- | Mark the word as EOS or not.
resolveEOS
:: X.Seg Word Tag
-> M.Map Tag Bool
-> X.Seg Word Tag
resolveEOS seg selMap
| isEos = seg {X.tags = markEos}
| otherwise = seg {X.tags = markNoEos}
where
isEos = not (null chosen) && all PolX.eos chosen
chosen = [interp | (interp, True) <- M.toList selMap]
-- Mark the segment as EOS or not
markEos = X.mkWMap
[ (interp {PolX.eos=True}, p)
| (interp, p) <- M.toList (X.unWMap tagMap) ]
markNoEos = X.mkWMap
[ (interp {PolX.eos=False}, p)
| (interp, p) <- M.toList (X.unWMap tagMap) ]
tagMap = X.tags seg
-- | Decide if the word should be marked as eos or not, based on marginal
-- probabilities.
--
-- TODO: we don't need this heavy machinery any more. This function is now
-- only used for training data preparation.
--
resolveEOS'
:: Double
-- ^ 0.5 means that the probability of the tag being EOS is twice as high
-- as that of not being EOS. 1.0 means that EOS will be marked only if its
-- 100% probable.
-> X.Seg Word Tag
-> X.Seg Word Tag
resolveEOS' minProp seg
| isEos = seg {X.tags = markEos}
| otherwise = seg {X.tags = markNoEos}
where
tagMap = X.tags seg
-- Determine the weights of the most probable EOS-marked and non-marked tags
withEosW = maxWeightWith ((==True) . PolX.eos) tagMap
withNoEosW = maxWeightWith ((==False) . PolX.eos) tagMap
-- Should the segment be marked as EOS?
isEos = case (withEosW, withNoEosW) of
(Just eosW, Just noEosW) ->
eosW / (eosW + noEosW) >= minProp
(Just _, Nothing) -> True
_ -> False
-- Mark the segment as EOS or not
markEos = X.mkWMap
[ (interp {PolX.eos=True}, p)
| (interp, p) <- M.toList (X.unWMap tagMap) ]
markNoEos = X.mkWMap
[ (interp {PolX.eos=False}, p)
| (interp, p) <- M.toList (X.unWMap tagMap) ]
-- | Determine the weight of the most probable interpretation which satisfies
-- the given predicate.
maxWeightWith :: (Tag -> Bool) -> X.WMap Tag -> Maybe Double
maxWeightWith pred tagMap = mayMaximum
[ p
| (interp, p) <- M.toList (X.unWMap tagMap)
, pred interp ]
where
mayMaximum [] = Nothing
mayMaximum xs = Just $ maximum xs
-- | Try to segment the sentence based on the EOS markers.
-- segment :: X.Sent Word Tag -> [X.Sent Word Tag]
segment :: DAG.DAG a (X.Seg Word Tag) -> [DAG.DAG a (X.Seg Word Tag)]
segment sent =
-- (\xs -> trace ("splits: " ++ show (length xs)) xs) $
-- go (trace ("splitPoints: " ++ show splitPoints) splitPoints) sent
go splitPoints sent
where
splitPoints = (S.toList . S.fromList)
[ DAG.endsWith edgeID sent
| edgeID <- DAG.dagEdges sent
, interp <- M.keys . X.unWMap . X.tags . DAG.edgeLabel edgeID $ sent
, PolX.eos interp ]
go (splitPoint:rest) dag =
case split splitPoint dag of
Just (left, right) -> left : go rest right
Nothing -> go rest dag
go [] dag = [dag]
split point dag = do
(x, y) <- DAG.splitTmp point dag
let empty = null . DAG.dagEdges
guard . not . empty $ x
guard . not . empty $ y
return (x, y)
-------------------------------------------------
-- High-level Tagging
-------------------------------------------------
-- -- | Configuration related to frequency-based path picking.
-- data PickFreqConf = PickFreqConf
-- { pickFreqMap :: Maybe (M.Map T.Text (Int, Int))
-- -- ^ A map which assigns (chosen, not chosen) counts to the invidiaul
-- -- orthographic forms.
-- , smoothingParam :: Double
-- -- ^ A naive smoothing related parameter, which should be adddd to each
-- -- count in `pickFreqMap`.
-- }
-- | Annotation config.
data AnnoConf = AnnoConf
{ trimParam :: Int
-- ^ How many morphosyntactic tags should be kept for OOV words
, pickPath :: Maybe Seg.PathTyp
-- ^ Which path picking method should be used. The function takes the
, blackSet :: S.Set T.Text
-- ^ The set of blacklisted tags
}
-- | Annotated sentence.
data AnnoSent = AnnoSent
{ guessSent :: Sent Tag
-- ^ The sentence after guessing and segmentation
-- (TODO: and annotated with marginal probabilities?)
, disambs :: C.Anno Tag Bool
-- ^ Disambiguation markers
, marginals :: C.Anno Tag Double
-- ^ Marginal probabilities according to the disambiguation model
, maxProbs :: C.Anno Tag Double
-- ^ Maximal probabilities according to the disambiguation model
}
-- | Annotate all possibly interesting information.
annoAll
-- :: Int
-- -- ^ Trimming parameter
:: AnnoConf
-> C.Concraft Tag
-> Sent Tag
-> [AnnoSent]
-- annoAll k concraft sent0 =
annoAll AnnoConf{..} concraft sent00 =
map annoOne _guessSent1
where
-- See whether the shortest path should be computed first
sent0 =
case pickPath of
Just typ -> Seg.pickPath typ sent00
Nothing -> sent00
-- Parsed blacklisted tags and CRF config
blackSet' =
S.fromList . map (P.parseTag (C.tagset concraft)) . S.toList $ blackSet
-- Make sure that the blackset is evaluated (otherwise, some malfored
-- tags may be silentely ignored by the tool)
crfCfg = length (show blackSet') `seq` CRF.Config {blackSet = blackSet'}
-- We add EOS markers only after guessing, because the possible tags are not
-- yet determined for the OOV words.
ambiDag = XA.identifyAmbiguousSegments sent0
_guessSent0 = DAG.mapE (addEosMarkers ambiDag) $
tagWith (C.guess trimParam crfCfg . C.guesser) concraft sent0
-- Resolve EOS tags based on the segmentation model
-- _guessSent1 = segment . fmap (resolveEOS' 0.5) $
-- tagWith (C.disambProbs D.MaxProbs . C.segmenter) concraft _guessSent0
_guessSent1 = segment . fmap (uncurry resolveEOS) . DAG.zipE _guessSent0 $
annoWith (C.disamb . C.segmenter) concraft _guessSent0
-- -- TEMP
-- _guessSent1 = (:[]) $
-- tagWith (C.guess trimParam crfCfg . C.guesser) concraft sent0
-- -- tagWith (const clearIt) concraft sent0
-- -- clearIt = fmap (fmap (const 0.0) . X.unWMap . X.tags) . DAG.mapN (const ())
annoOne _guessSent = AnnoSent
{ guessSent = _guessSent
, disambs = _disambs
, marginals = _marginals
, maxProbs = _maxProbs
-- , disambs = clearDAG _guessSent
-- , marginals = clearDAG _guessSent
-- , maxProbs = clearDAG _guessSent
}
where
-- clearDAG = fmap (const M.empty) . DAG.mapN (const ())
_marginals =
annoWith (C.disambProbs D.Marginals . C.disamber) concraft _guessSent
_maxProbs =
annoWith (C.disambProbs D.MaxProbs . C.disamber) concraft _guessSent
_disambs =
annoWith (C.disamb . C.disamber) concraft _guessSent
-- _disambs =
-- C.disambPath (optimal _maxProbs) _maxProbs
-- where optimal = maybe [] id . listToMaybe . C.findOptimalPaths
-------------------------------------------------
-- Training
-------------------------------------------------
-- | Training configuration.
data TrainConf = TrainConf {
-- | Tagset.
tagset :: P.Tagset
-- | SGD parameters.
, sgdArgs :: SGD.SgdArgs
-- | Store SGD dataset on disk.
, onDisk :: Bool
-- | Numer of guessed tags for each word.
, guessNum :: Int
-- | `G.r0T` parameter.
, r0 :: G.R0T
-- | `G.zeroProbLabel` parameter
, zeroProbLabel :: Tag
-- | Extract only visible features for the guesser
, guessOnlyVisible :: Bool
-- | Global configuration
, globalConfig :: Cfg.Config
-- -- | Disambiguation tiers configuration
-- , disambTiersCfg :: DisambTiersCfg
}
-- | Train concraft model.
-- TODO: It should be possible to supply the two training procedures with
-- different SGD arguments.
train
:: TrainConf
-> IO [Sent Tag] -- ^ Training data
-> IO [Sent Tag] -- ^ Evaluation data
-> IO (C.Concraft Tag)
train TrainConf{..} train0 eval0 = do
let trainR'IO = map packSent <$> train0
evalR'IO = map packSent <$> eval0
putStr $ concat
[ "Batch size for the "
, "guessing and sentence segmentation models = "
]
-- average number of sentences per paragraph
averageParSize <- average . map (fromIntegral . length . segment) <$> trainR'IO
let parBatchSize = ceiling $ fromIntegral (SGD.batchSize sgdArgs) / averageParSize
print parBatchSize
putStrLn "\n===== Train guessing model ====="
guesser <- G.train (guessConf parBatchSize) trainR'IO evalR'IO
let crfCfg = CRF.Config {blackSet = S.empty}
doGuess = C.guessSent guessNum crfCfg guesser
prepSent dag =
let ambiDag = XA.identifyAmbiguousSegments dag
in DAG.mapE (addEosMarkers ambiDag) (doGuess dag)
trainG'IO = map prepSent <$> trainR'IO
evalG'IO = map prepSent <$> evalR'IO
putStrLn "\n===== Train sentence segmentation model ====="
segmenter <- D.train (segmentConf parBatchSize) trainG'IO evalG'IO
let prepSent' = segment . fmap (resolveEOS' 0.5)
trainS'IO = concatMap prepSent' <$> trainG'IO
evalS'IO = concatMap prepSent' <$> evalG'IO
putStrLn "\n===== Train disambiguation model ====="
disamb <- D.train disambConf trainS'IO evalS'IO
return $ C.Concraft tagset guessNum guesser segmenter disamb
where
guessConf batchSize = G.TrainConf
guessSchemaDefault
(sgdArgs {SGD.batchSize = batchSize})
onDisk r0 zeroProbLabel
(simplify4gsr tagset)
(complexify4gsr tagset)
strip4gsr
guessOnlyVisible
strip4gsr interp = PolX.voidInterp (PolX.tag interp)
segmentConf batchSize = D.TrainConf
tiersSegment segmentSchemaDefault
(sgdArgs {SGD.batchSize = batchSize})
onDisk
(simplify4dmb tagset)
disambConf = D.TrainConf
-- (tiersDisamb disambTiersCfg)
(tiersDisamb globalConfig)
disambSchemaDefault sgdArgs onDisk
(simplify4dmb tagset)
-- | Simplify the tag for the sake of the disambiguation model.
simplify4dmb :: P.Tagset -> PolX.Interp PolX.Tag -> D.Tag
simplify4dmb tagset PolX.Interp{..} = D.Tag
{ D.posiTag = P.parseTag tagset tag
, D.hasEos = eos }
-- | Simplify the tag for the sake of the guessing model.
-- TODO: it is also used in the evaluation script, which assumes that
-- `simplify4gsr` simplifies to a positional tag. The name of the function
-- should reflect this, perhaps, or there should be two separate functions: one
-- dedicated to guesser, one dedicated to evaluation (and other more generic
-- things).
simplify4gsr :: P.Tagset -> PolX.Interp PolX.Tag -> P.Tag
simplify4gsr tagset PolX.Interp{..} = P.parseTag tagset tag
complexify4gsr :: P.Tagset -> P.Tag -> PolX.Interp PolX.Tag
complexify4gsr tagset tag = PolX.voidInterp (P.showTag tagset tag)
-- -- | Train the `Concraft` model.
-- -- No reanalysis of the input data will be performed.
-- --
-- -- The `FromJSON` and `ToJSON` instances are used to store processed
-- -- input data in temporary files on a disk.
-- train
-- :: (X.Word w, Ord t)
-- => P.Tagset -- ^ A morphosyntactic tagset to which `P.Tag`s
-- -- of the training and evaluation input data
-- -- must correspond.
-- -> Int -- ^ How many tags is the guessing model supposed
-- -- to produce for a given OOV word? It will be
-- -- used (see `G.guessSent`) on both training and
-- -- evaluation input data prior to the training
-- -- of the disambiguation model.
-- -> G.TrainConf t P.Tag -- ^ Training configuration for the guessing model.
-- -> D.TrainConf t -- ^ Training configuration for the
-- -- disambiguation model.
-- -> IO [Sent w t] -- ^ Training dataset. This IO action will be
-- -- executed a couple of times, so consider using
-- -- lazy IO if your dataset is big.
-- -> IO [Sent w t] -- ^ Evaluation dataset IO action. Consider using
-- -- lazy IO if your dataset is big.
-- -> IO (Concraft t)
-- train tagset guessNum guessConf disambConf trainR'IO evalR'IO = do
-- Temp.withTempDirectory "." ".guessed" $ \tmpDir -> do
-- let temp = withTemp tagset tmpDir
--
-- putStrLn "\n===== Train guessing model ====="
-- guesser <- G.train guessConf trainR'IO evalR'IO
-- let guess = guessSent guessNum guesser
-- trainG <- map guess <$> trainR'IO
-- evalG <- map guess <$> evalR'IO
--
-- temp "train" trainG $ \trainG'IO -> do
-- temp "eval" evalG $ \evalG'IO -> do
--
-- putStrLn "\n===== Train disambiguation model ====="
-- disamb <- D.train disambConf trainG'IO evalG'IO
-- return $ Concraft tagset guessNum guesser disamb
-- | Compute an average of the list.
average :: [Double] -> Double
average xs = sum xs / fromIntegral (length xs)
| kawu/concraft-pl | src/NLP/Concraft/Polish/DAGSeg.hs | bsd-2-clause | 21,133 | 0 | 15 | 5,238 | 3,802 | 2,146 | 1,656 | 277 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
module Devel
( devel
) where
import qualified Distribution.Simple.Utils as D
import qualified Distribution.Verbosity as D
import qualified Distribution.PackageDescription.Parse as D
import qualified Distribution.PackageDescription as D
import qualified Distribution.ModuleName as D
import Control.Concurrent (forkIO, threadDelay)
import qualified Control.Exception as Ex
import Control.Monad (forever, when)
import Data.Char (isUpper, isNumber)
import qualified Data.List as L
import qualified Data.Map as Map
import qualified Data.Set as Set
import System.Directory
import System.Exit (exitFailure, exitSuccess, ExitCode (..))
import System.FilePath (splitDirectories, dropExtension, takeExtension)
import System.Posix.Types (EpochTime)
import System.PosixCompat.Files (modificationTime, getFileStatus)
import System.Process (createProcess, proc, terminateProcess, readProcess,
waitForProcess, rawSystem)
import Build (recompDeps, getDeps,findHaskellFiles)
lockFile :: FilePath
lockFile = "dist/devel-terminate"
writeLock :: IO ()
writeLock = do
createDirectoryIfMissing True "dist"
writeFile lockFile ""
removeLock :: IO ()
removeLock = try_ (removeFile lockFile)
devel :: Bool -> [String] -> IO ()
devel isCabalDev passThroughArgs = do
checkDevelFile
writeLock
putStrLn "Yesod devel server. Press ENTER to quit"
_ <- forkIO $ do
cabal <- D.findPackageDesc "."
gpd <- D.readPackageDescription D.normal cabal
checkCabalFile gpd
_<- rawSystem cmd args
mainLoop
_ <- getLine
writeLock
exitSuccess
where
cmd | isCabalDev == True = "cabal-dev"
| otherwise = "cabal"
diffArgs | isCabalDev == True = [
"--cabal-install-arg=-fdevel" -- legacy
, "--cabal-install-arg=-flibrary-only"
]
| otherwise = [
"-fdevel" -- legacy
, "-flibrary-only"
]
args = "configure":diffArgs ++ ["--disable-library-profiling" ]
mainLoop :: IO ()
mainLoop = do
ghcVer <- ghcVersion
forever $ do
putStrLn "Rebuilding application..."
recompDeps
list <- getFileList
exit <- rawSystem cmd ["build"]
case exit of
ExitFailure _ -> putStrLn "Build failure, pausing..."
_ -> do
removeLock
let pkg = pkgConfigs isCabalDev ghcVer
let dev_args = ([pkg, "devel.hs"] ++ passThroughArgs)
putStrLn $ "Starting development server: runghc " ++ L.intercalate " " dev_args
(_,_,_,ph) <- createProcess $ proc "runghc" dev_args
watchTid <- forkIO . try_ $ do
watchForChanges list
putStrLn "Stopping development server..."
writeLock
threadDelay 1000000
putStrLn "Terminating development server..."
terminateProcess ph
ec <- waitForProcess ph
putStrLn $ "Exit code: " ++ show ec
Ex.throwTo watchTid (userError "process finished")
watchForChanges list
try_ :: forall a. IO a -> IO ()
try_ x = (Ex.try x :: IO (Either Ex.SomeException a)) >> return ()
pkgConfigs :: Bool -> String -> String
pkgConfigs isCabalDev ghcVer
| isCabalDev = unwords ["-package-confcabal-dev/packages-" ++ ghcVer ++ ".conf", inplacePkg]
| otherwise = inplacePkg
where
inplacePkg = "-package-confdist/package.conf.inplace"
type FileList = Map.Map FilePath EpochTime
getFileList :: IO FileList
getFileList = do
files <- findHaskellFiles "."
deps <- getDeps
let files' = files ++ map fst (Map.toList deps)
fmap Map.fromList $ flip mapM files' $ \f -> do
fs <- getFileStatus f
return (f, modificationTime fs)
watchForChanges :: FileList -> IO ()
watchForChanges list = do
newList <- getFileList
if list /= newList
then return ()
else threadDelay 1000000 >> watchForChanges list
checkDevelFile :: IO ()
checkDevelFile = do
e <- doesFileExist "devel.hs"
when (not e) $ failWith "file devel.hs not found"
checkCabalFile :: D.GenericPackageDescription -> IO ()
checkCabalFile gpd = case D.condLibrary gpd of
Nothing -> failWith "incorrect cabal file, no library"
Just ct ->
case lookupDevelLib ct of
Nothing ->
failWith "no development flag found in your configuration file. Expected a 'library-only' flag or the older 'devel' flag"
Just dLib -> do
case (D.hsSourceDirs . D.libBuildInfo) dLib of
[] -> return ()
["."] -> return ()
_ ->
putStrLn $ "WARNING: yesod devel may not work correctly with " ++
"custom hs-source-dirs"
fl <- getFileList
let unlisted = checkFileList fl dLib
when (not . null $ unlisted) $ do
putStrLn "WARNING: the following source files are not listed in exposed-modules or other-modules:"
mapM_ putStrLn unlisted
when (D.fromString "Application" `notElem` D.exposedModules dLib) $ do
putStrLn "WARNING: no exposed module Application"
failWith :: String -> IO a
failWith msg = do
putStrLn $ "ERROR: " ++ msg
exitFailure
checkFileList :: FileList -> D.Library -> [FilePath]
checkFileList fl lib = filter isUnlisted . filter isSrcFile $ sourceFiles
where
al = allModules lib
-- a file is only a possible 'module file' if all path pieces start with a capital letter
sourceFiles = filter isSrcFile . map fst . Map.toList $ fl
isSrcFile file = let dirs = filter (/=".") $ splitDirectories file
in all (isUpper . head) dirs && (takeExtension file `elem` [".hs", ".lhs"])
isUnlisted file = not (toModuleName file `Set.member` al)
toModuleName = L.intercalate "." . filter (/=".") . splitDirectories . dropExtension
allModules :: D.Library -> Set.Set String
allModules lib = Set.fromList $ map toString $ D.exposedModules lib ++ (D.otherModules . D.libBuildInfo) lib
where
toString = L.intercalate "." . D.components
ghcVersion :: IO String
ghcVersion = fmap getNumber $ readProcess "runghc" ["--numeric-version", "0"] []
where
getNumber = filter (\x -> isNumber x || x == '.')
lookupDevelLib :: D.CondTree D.ConfVar c a -> Maybe a
lookupDevelLib ct | found = Just (D.condTreeData ct)
| otherwise = Nothing
where
found = not . null . map (\(_,x,_) -> D.condTreeData x) .
filter isDevelLib . D.condTreeComponents $ ct
isDevelLib ((D.Var (D.Flag (D.FlagName f))), _, _) = f `elem` ["library-only", "devel"]
isDevelLib _ = False
| chreekat/yesod | yesod/Devel.hs | bsd-2-clause | 7,176 | 0 | 22 | 2,104 | 1,884 | 949 | 935 | 157 | 5 |
{-# LANGUAGE DeriveDataTypeable #-}
module Data.Text.Punycode.Decode (PunycodeDecodeException (..), decode) where
import Control.Exception.Base
import qualified Data.ByteString as BS
import Data.Char
import Data.Serialize hiding (decode)
import qualified Data.Text as T
import Data.Typeable
import Data.Word
import Data.Text.Punycode.Shared
data PunycodeDecodeException
= GenericDecodeException
| InternalStringTooShort
| InputTooShort
| RightOfHyphenShouldBeAlphanumeric
| LeftOfHyphenShouldBeBasic
| CantStartWithDash
| InvalidCodePoint
deriving (Eq,Show,Typeable)
instance Exception PunycodeDecodeException
-- | Decode a string into its unicode form
decode :: BS.ByteString -> Either PunycodeDecodeException T.Text
decode input
| input == BS.pack [45, 45] = Right $ T.pack "-"
| not (BS.null input) && BS.length (BS.filter (== 45) input) == 1 && BS.head input == 45 = Left CantStartWithDash
| T.any (not . isExtendedBasic) before = Left LeftOfHyphenShouldBeBasic
| otherwise = case runGet (inner2 initial_n 0 initial_bias before) after of
Right out -> out
Left _ -> Left InputTooShort
where (before, after)
| BS.any f input = (T.pack $ map (chr . fromIntegral) $ BS.unpack $ BS.init b1, a1)
| otherwise = (T.empty, input)
f = (== (fromIntegral $ ord '-'))
(b1, a1) = BS.breakEnd f input
inner2 :: Int -> Int -> Int -> T.Text -> Get (Either PunycodeDecodeException T.Text)
inner2 n oldi bias output = do
b <- isEmpty
helper b
where helper False = do
i <- inner base 1 oldi bias
helper' i
where helper' Nothing = return $ Left RightOfHyphenShouldBeAlphanumeric
helper' (Just i) = case output' of
Right output'' -> inner2 n' (i' + 1) bias' output''
Left err -> return $ Left err
where bias' = adapt (i - oldi) (T.length output + 1) (oldi == 0)
n' = n + i `div` (T.length output + 1)
i' = i `mod` (T.length output + 1)
output' = insertInto output n' i'
helper True = return $ Right output
inner :: Int -> Int -> Int -> Int -> Get (Maybe Int)
inner k w i bias = do
word8 <- getWord8
helper $ word8ToDigit word8
where helper Nothing = return Nothing
helper (Just digit)
| digit < t = return $ Just i'
| otherwise = inner (k + base) w' i' bias
where w' = w * (base - t)
i' = i + digit * w
t
| k <= bias + tmin = tmin
| k >= bias + tmax = tmax
| otherwise = k - bias
insertInto :: T.Text -> Int -> Int -> Either PunycodeDecodeException T.Text
insertInto input n i
| T.length input < i = Left InternalStringTooShort
| otherwise = case n' of
Just n'' -> Right $ T.concat [T.take i input, T.singleton n'', T.drop i input]
Nothing -> Left InvalidCodePoint
where n' = safeChr n
safeChr :: Int -> Maybe Char
safeChr x
| x >= 0 && x <= fromEnum (maxBound :: Char) = Just $ chr x
| otherwise = Nothing
word8ToDigit :: Word8 -> Maybe Int
word8ToDigit = helper . fromIntegral
where helper word8
| word8 >= ord 'a' && word8 <= ord 'z' = Just $ word8 - (ord 'a')
| word8 >= ord 'A' && word8 <= ord 'Z' = Just $ word8 - (ord 'A')
| word8 >= ord '0' && word8 <= ord '9' = Just $ 26 + word8 - (ord '0')
| otherwise = Nothing
isExtendedBasic :: Char -> Bool
isExtendedBasic x
| isBasic x = True
| ord x == 128 = True
| otherwise = False
| litherum/punycode | Data/Text/Punycode/Decode.hs | bsd-2-clause | 3,681 | 0 | 15 | 1,121 | 1,349 | 670 | 679 | 86 | 4 |
{-# LANGUAGE BangPatterns, ScopedTypeVariables #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Numeric.SpecFunctions.Internal
-- Copyright : (c) 2009, 2011, 2012 Bryan O'Sullivan
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Internal module with implementation of special functions.
module Numeric.SpecFunctions.Internal
( module Numeric.SpecFunctions.Internal
, Compat.log1p
, Compat.expm1
) where
import Control.Applicative
import Data.Bits ((.&.), (.|.), shiftR)
import Data.Int (Int64)
import Data.Word (Word)
import Data.Default.Class
import qualified Data.Vector.Unboxed as U
import Data.Vector.Unboxed ((!))
import Text.Printf
import Numeric.Polynomial.Chebyshev (chebyshevBroucke)
import Numeric.Polynomial (evaluatePolynomial, evaluatePolynomialL, evaluateEvenPolynomialL
,evaluateOddPolynomialL)
import Numeric.RootFinding (Root(..), newtonRaphson, NewtonParam(..), Tolerance(..))
import Numeric.Series
import Numeric.MathFunctions.Constants
import Numeric.SpecFunctions.Compat (log1p)
import qualified Numeric.SpecFunctions.Compat as Compat
----------------------------------------------------------------
-- Error function
----------------------------------------------------------------
-- | Error function.
--
-- \[
-- \operatorname{erf}(x) = \frac{2}{\sqrt{\pi}} \int_{0}^{x} \exp(-t^2) dt
-- \]
--
-- Function limits are:
--
-- \[
-- \begin{aligned}
-- &\operatorname{erf}(-\infty) &=& -1 \\
-- &\operatorname{erf}(0) &=& \phantom{-}\,0 \\
-- &\operatorname{erf}(+\infty) &=& \phantom{-}\,1 \\
-- \end{aligned}
-- \]
erf :: Double -> Double
erf = Compat.erf
{-# INLINE erf #-}
-- | Complementary error function.
--
-- \[
-- \operatorname{erfc}(x) = 1 - \operatorname{erf}(x)
-- \]
--
-- Function limits are:
--
-- \[
-- \begin{aligned}
-- &\operatorname{erf}(-\infty) &=&\, 2 \\
-- &\operatorname{erf}(0) &=&\, 1 \\
-- &\operatorname{erf}(+\infty) &=&\, 0 \\
-- \end{aligned}
-- \]
erfc :: Double -> Double
erfc = Compat.erfc
{-# INLINE erfc #-}
-- | Inverse of 'erf'.
invErf :: Double -- ^ /p/ ∈ [-1,1]
-> Double
invErf p
| p == 1 = m_pos_inf
| p == -1 = m_neg_inf
| p < 1 && p > -1 = if p > 0 then r else -r
| otherwise = error "invErf: p must in [-1,1] range"
where
-- We solve equation same as in invErfc. We're able to ruse same
-- Halley step by solving equation:
-- > pp - erf x = 0
-- instead of
-- > erf x - pp = 0
pp = abs p
r = step $ step $ guessInvErfc $ 1 - pp
step x = invErfcHalleyStep (pp - erf x) x
-- | Inverse of 'erfc'.
invErfc :: Double -- ^ /p/ ∈ [0,2]
-> Double
invErfc p
| p == 2 = m_neg_inf
| p == 0 = m_pos_inf
| p >0 && p < 2 = if p <= 1 then r else -r
| otherwise = modErr $ "invErfc: p must be in [0,2] got " ++ show p
where
pp | p <= 1 = p
| otherwise = 2 - p
-- We perform 2 Halley steps in order to get to solution
r = step $ step $ guessInvErfc pp
step x = invErfcHalleyStep (erfc x - pp) x
-- Initial guess for invErfc & invErf
guessInvErfc :: Double -> Double
guessInvErfc p
= -0.70711 * ((2.30753 + t * 0.27061) / (1 + t * (0.99229 + t * 0.04481)) - t)
where
t = sqrt $ -2 * log( 0.5 * p)
-- Halley step for solving invErfc
invErfcHalleyStep :: Double -> Double -> Double
invErfcHalleyStep err x
= x + err / (1.12837916709551257 * exp(-x * x) - x * err)
----------------------------------------------------------------
-- Gamma function
----------------------------------------------------------------
data L = L {-# UNPACK #-} !Double {-# UNPACK #-} !Double
-- | Compute the logarithm of the gamma function, Γ(/x/).
--
-- \[
-- \Gamma(x) = \int_0^{\infty}t^{x-1}e^{-t}\,dt = (x - 1)!
-- \]
--
-- This implementation uses Lanczos approximation. It gives 14 or more
-- significant decimal digits, except around /x/ = 1 and /x/ = 2,
-- where the function goes to zero.
--
-- Returns ∞ if the input is outside of the range (0 < /x/
-- ≤ 1e305).
logGamma :: Double -> Double
logGamma z
| z <= 0 = m_pos_inf
-- For very small values z we can just use Laurent expansion
| z < m_sqrt_eps = log (1/z - m_eulerMascheroni)
-- For z<1 we use recurrence. Γ(z+1) = z·Γ(z) Note that in order to
-- avoid precision loss we have to compute parameter to
-- approximations here:
--
-- > (z + 1) - 1 = z
-- > (z + 1) - 2 = z - 1
--
-- Simple passing (z + 1) to piecewise approxiations and computing
-- difference leads to bad loss of precision near 1.
-- This is reason lgamma1_15 & lgamma15_2 have three parameters
| z < 0.5 = lgamma1_15 z (z - 1) - log z
| z < 1 = lgamma15_2 z (z - 1) - log z
-- Piecewise polynomial approximations
| z <= 1.5 = lgamma1_15 (z - 1) (z - 2)
| z < 2 = lgamma15_2 (z - 1) (z - 2)
| z < 15 = lgammaSmall z
-- Otherwise we switch to Lanczos approximation
| otherwise = lanczosApprox z
-- | Synonym for 'logGamma'. Retained for compatibility
logGammaL :: Double -> Double
logGammaL = logGamma
{-# DEPRECATED logGammaL "Use logGamma instead" #-}
-- Polynomial expansion used in interval (1,1.5]
--
-- > logΓ(z) = (z-1)(z-2)(Y + R(z-1))
lgamma1_15 :: Double -> Double -> Double
lgamma1_15 zm1 zm2
= r * y + r * ( evaluatePolynomial zm1 tableLogGamma_1_15P
/ evaluatePolynomial zm1 tableLogGamma_1_15Q
)
where
r = zm1 * zm2
y = 0.52815341949462890625
tableLogGamma_1_15P,tableLogGamma_1_15Q :: U.Vector Double
tableLogGamma_1_15P = U.fromList
[ 0.490622454069039543534e-1
, -0.969117530159521214579e-1
, -0.414983358359495381969e0
, -0.406567124211938417342e0
, -0.158413586390692192217e0
, -0.240149820648571559892e-1
, -0.100346687696279557415e-2
]
{-# NOINLINE tableLogGamma_1_15P #-}
tableLogGamma_1_15Q = U.fromList
[ 1
, 0.302349829846463038743e1
, 0.348739585360723852576e1
, 0.191415588274426679201e1
, 0.507137738614363510846e0
, 0.577039722690451849648e-1
, 0.195768102601107189171e-2
]
{-# NOINLINE tableLogGamma_1_15Q #-}
-- Polynomial expansion used in interval (1.5,2)
--
-- > logΓ(z) = (2-z)(1-z)(Y + R(2-z))
lgamma15_2 :: Double -> Double -> Double
lgamma15_2 zm1 zm2
= r * y + r * ( evaluatePolynomial (-zm2) tableLogGamma_15_2P
/ evaluatePolynomial (-zm2) tableLogGamma_15_2Q
)
where
r = zm1 * zm2
y = 0.452017307281494140625
tableLogGamma_15_2P,tableLogGamma_15_2Q :: U.Vector Double
tableLogGamma_15_2P = U.fromList
[ -0.292329721830270012337e-1
, 0.144216267757192309184e0
, -0.142440390738631274135e0
, 0.542809694055053558157e-1
, -0.850535976868336437746e-2
, 0.431171342679297331241e-3
]
{-# NOINLINE tableLogGamma_15_2P #-}
tableLogGamma_15_2Q = U.fromList
[ 1
, -0.150169356054485044494e1
, 0.846973248876495016101e0
, -0.220095151814995745555e0
, 0.25582797155975869989e-1
, -0.100666795539143372762e-2
, -0.827193521891290553639e-6
]
{-# NOINLINE tableLogGamma_15_2Q #-}
-- Polynomial expansion used in interval (2,3)
--
-- > logΓ(z) = (z - 2)(z + 1)(Y + R(z-2))
lgamma2_3 :: Double -> Double
lgamma2_3 z
= r * y + r * ( evaluatePolynomial zm2 tableLogGamma_2_3P
/ evaluatePolynomial zm2 tableLogGamma_2_3Q
)
where
r = zm2 * (z + 1)
zm2 = z - 2
y = 0.158963680267333984375e0
tableLogGamma_2_3P,tableLogGamma_2_3Q :: U.Vector Double
tableLogGamma_2_3P = U.fromList
[ -0.180355685678449379109e-1
, 0.25126649619989678683e-1
, 0.494103151567532234274e-1
, 0.172491608709613993966e-1
, -0.259453563205438108893e-3
, -0.541009869215204396339e-3
, -0.324588649825948492091e-4
]
{-# NOINLINE tableLogGamma_2_3P #-}
tableLogGamma_2_3Q = U.fromList
[ 1
, 0.196202987197795200688e1
, 0.148019669424231326694e1
, 0.541391432071720958364e0
, 0.988504251128010129477e-1
, 0.82130967464889339326e-2
, 0.224936291922115757597e-3
, -0.223352763208617092964e-6
]
{-# NOINLINE tableLogGamma_2_3Q #-}
-- For small z we can just use Gamma function recurrence and reduce
-- problem to interval [2,3] and use polynomial approximation
-- there. Surpringly it gives very good precision
lgammaSmall :: Double -> Double
lgammaSmall = go 0
where
go acc z | z < 3 = acc + lgamma2_3 z
| otherwise = go (acc + log zm1) zm1
where
zm1 = z - 1
-- Lanczos approximation for gamma function.
--
-- > Γ(z) = sqrt(2π)(z + g - 0.5)^(z - 0.5)·exp{-(z + g - 0.5)}·A_g(z)
--
-- Coeffients are taken from boost. Constants are absorbed into
-- polynomial's coefficients.
lanczosApprox :: Double -> Double
lanczosApprox z
= (log (z + g - 0.5) - 1) * (z - 0.5)
+ log (evalRatio tableLanczos z)
where
g = 6.024680040776729583740234375
tableLanczos :: U.Vector (Double,Double)
{-# NOINLINE tableLanczos #-}
tableLanczos = U.fromList
[ (56906521.91347156388090791033559122686859 , 0)
, (103794043.1163445451906271053616070238554 , 39916800)
, (86363131.28813859145546927288977868422342 , 120543840)
, (43338889.32467613834773723740590533316085 , 150917976)
, (14605578.08768506808414169982791359218571 , 105258076)
, (3481712.15498064590882071018964774556468 , 45995730)
, (601859.6171681098786670226533699352302507 , 13339535)
, (75999.29304014542649875303443598909137092 , 2637558)
, (6955.999602515376140356310115515198987526 , 357423)
, (449.9445569063168119446858607650988409623 , 32670)
, (19.51992788247617482847860966235652136208 , 1925)
, (0.5098416655656676188125178644804694509993 , 66)
, (0.006061842346248906525783753964555936883222 , 1)
]
-- Evaluate rational function. Polynomials in both numerator and
-- denominator must have same order. Function seems to be too specific
-- so it's not exposed
--
-- Special care taken in order to avoid overflow for large values of x
evalRatio :: U.Vector (Double,Double) -> Double -> Double
evalRatio coef x
| x > 1 = fini $ U.foldl' stepL (L 0 0) coef
| otherwise = fini $ U.foldr' stepR (L 0 0) coef
where
fini (L num den) = num / den
stepR (a,b) (L num den) = L (num * x + a) (den * x + b)
stepL (L num den) (a,b) = L (num * rx + a) (den * rx + b)
rx = recip x
-- |
-- Compute the log gamma correction factor for Stirling
-- approximation for @x@ ≥ 10. This correction factor is
-- suitable for an alternate (but less numerically accurate)
-- definition of 'logGamma':
--
-- \[
-- \log\Gamma(x) = \frac{1}{2}\log(2\pi) + (x-\frac{1}{2})\log x - x + \operatorname{logGammaCorrection}(x)
-- \]
logGammaCorrection :: Double -> Double
logGammaCorrection x
| x < 10 = m_NaN
| x < big = chebyshevBroucke (t * t * 2 - 1) coeffs / x
| otherwise = 1 / (x * 12)
where
big = 94906265.62425156
t = 10 / x
coeffs = U.fromList [
0.1666389480451863247205729650822e+0,
-0.1384948176067563840732986059135e-4,
0.9810825646924729426157171547487e-8,
-0.1809129475572494194263306266719e-10,
0.6221098041892605227126015543416e-13,
-0.3399615005417721944303330599666e-15,
0.2683181998482698748957538846666e-17
]
-- | Compute the normalized lower incomplete gamma function
-- γ(/z/,/x/). Normalization means that γ(/z/,∞)=1
--
-- \[
-- \gamma(z,x) = \frac{1}{\Gamma(z)}\int_0^{x}t^{z-1}e^{-t}\,dt
-- \]
--
-- Uses Algorithm AS 239 by Shea.
incompleteGamma :: Double -- ^ /z/ ∈ (0,∞)
-> Double -- ^ /x/ ∈ (0,∞)
-> Double
-- Notation used:
-- + P(a,x) - regularized lower incomplete gamma
-- + Q(a,x) - regularized upper incomplete gamma
incompleteGamma a x
| a <= 0 || x < 0 = error
$ "incompleteGamma: Domain error z=" ++ show a ++ " x=" ++ show x
| x == 0 = 0
| x == m_pos_inf = 1
-- For very small x we use following expansion for P:
--
-- See http://functions.wolfram.com/GammaBetaErf/GammaRegularized/06/01/05/01/01/
| x < sqrt m_epsilon && a > 1
= x**a / a / exp (logGamma a) * (1 - a*x / (a + 1))
| x < 0.5 = case () of
_| (-0.4)/log x < a -> taylorSeriesP
| otherwise -> taylorSeriesComplQ
| x < 1.1 = case () of
_| 0.75*x < a -> taylorSeriesP
| otherwise -> taylorSeriesComplQ
| a > 20 && useTemme = uniformExpansion
| x - (1 / (3 * x)) < a = taylorSeriesP
| otherwise = contFraction
where
mu = (x - a) / a
useTemme = (a > 200 && 20/a > mu*mu)
|| (abs mu < 0.4)
-- Gautschi's algorithm.
--
-- Evaluate series for P(a,x). See [Temme1994] Eq. 5.5
--
-- FIXME: Term `exp (log x * z - x - logGamma (z+1))` doesn't give full precision
taylorSeriesP
= sumPowerSeries x (scanSequence (/) 1 $ enumSequenceFrom (a+1))
* exp (log x * a - x - logGamma (a+1))
-- Series for 1-Q(a,x). See [Temme1994] Eq. 5.5
taylorSeriesComplQ
= sumPowerSeries (-x) (scanSequence (/) 1 (enumSequenceFrom 1) / enumSequenceFrom a)
* x**a / exp(logGamma a)
-- Legendre continued fractions
contFraction = 1 - ( exp ( log x * a - x - logGamma a )
/ evalContFractionB frac
)
where
frac = (\k -> (k*(a-k), x - a + 2*k + 1)) <$> enumSequenceFrom 0
-- Evaluation based on uniform expansions. See [Temme1994] 5.2
uniformExpansion =
let -- Coefficients f_m in paper
fm :: U.Vector Double
fm = U.fromList [ 1.00000000000000000000e+00
,-3.33333333333333370341e-01
, 8.33333333333333287074e-02
,-1.48148148148148153802e-02
, 1.15740740740740734316e-03
, 3.52733686067019369930e-04
,-1.78755144032921825352e-04
, 3.91926317852243766954e-05
,-2.18544851067999240532e-06
,-1.85406221071515996597e-06
, 8.29671134095308545622e-07
,-1.76659527368260808474e-07
, 6.70785354340149841119e-09
, 1.02618097842403069078e-08
,-4.38203601845335376897e-09
, 9.14769958223679020897e-10
,-2.55141939949462514346e-11
,-5.83077213255042560744e-11
, 2.43619480206674150369e-11
,-5.02766928011417632057e-12
, 1.10043920319561347525e-13
, 3.37176326240098513631e-13
]
y = - log1pmx mu
eta = sqrt (2 * y) * signum mu
-- Evaluate S_α (Eq. 5.9)
loop !_ !_ u 0 = u
loop bm1 bm0 u i = let t = (fm ! i) + (fromIntegral i + 1)*bm1 / a
u' = eta * u + t
in loop bm0 t u' (i-1)
s_a = let n = U.length fm
in loop (fm ! (n-1)) (fm ! (n-2)) 0 (n-3)
/ exp (logGammaCorrection a)
in 1/2 * erfc(-eta*sqrt(a/2)) - exp(-(a*y)) / sqrt (2*pi*a) * s_a
-- Adapted from Numerical Recipes §6.2.1
-- | Inverse incomplete gamma function. It's approximately inverse of
-- 'incompleteGamma' for the same /z/. So following equality
-- approximately holds:
--
-- > invIncompleteGamma z . incompleteGamma z ≈ id
invIncompleteGamma :: Double -- ^ /z/ ∈ (0,∞)
-> Double -- ^ /p/ ∈ [0,1]
-> Double
invIncompleteGamma a p
| a <= 0 =
modErr $ printf "invIncompleteGamma: a must be positive. a=%g p=%g" a p
| p < 0 || p > 1 =
modErr $ printf "invIncompleteGamma: p must be in [0,1] range. a=%g p=%g" a p
| p == 0 = 0
| p == 1 = 1 / 0
| otherwise = loop 0 guess
where
-- Solve equation γ(a,x) = p using Halley method
loop :: Int -> Double -> Double
loop i x
| i >= 12 = x'
-- For small s derivative becomes approximately 1/x*exp(-x) and
-- skyrockets for small x. If it happens correct answer is 0.
| isInfinite f' = 0
| abs dx < eps * x' = x'
| otherwise = loop (i + 1) x'
where
-- Value of γ(a,x) - p
f = incompleteGamma a x - p
-- dγ(a,x)/dx
f' | a > 1 = afac * exp( -(x - a1) + a1 * (log x - lna1))
| otherwise = exp( -x + a1 * log x - gln)
u = f / f'
-- Halley correction to Newton-Rapson step
corr = u * (a1 / x - 1)
dx = u / (1 - 0.5 * min 1.0 corr)
-- New approximation to x
x' | x < dx = 0.5 * x -- Do not go below 0
| otherwise = x - dx
-- Calculate inital guess for root
guess
--
| a > 1 =
let t = sqrt $ -2 * log(if p < 0.5 then p else 1 - p)
x1 = (2.30753 + t * 0.27061) / (1 + t * (0.99229 + t * 0.04481)) - t
x2 = if p < 0.5 then -x1 else x1
in max 1e-3 (a * (1 - 1/(9*a) - x2 / (3 * sqrt a)) ** 3)
-- For a <= 1 use following approximations:
-- γ(a,1) ≈ 0.253a + 0.12a²
--
-- γ(a,x) ≈ γ(a,1)·x^a x < 1
-- γ(a,x) ≈ γ(a,1) + (1 - γ(a,1))(1 - exp(1 - x)) x >= 1
| otherwise =
let t = 1 - a * (0.253 + a*0.12)
in if p < t
then (p / t) ** (1 / a)
else 1 - log( 1 - (p-t) / (1-t))
-- Constants
a1 = a - 1
lna1 = log a1
afac = exp( a1 * (lna1 - 1) - gln )
gln = logGamma a
eps = 1e-8
----------------------------------------------------------------
-- Beta function
----------------------------------------------------------------
-- | Compute the natural logarithm of the beta function.
--
-- \[
-- B(a,b) = \int_0^1 t^{a-1}(1-t)^{b-1}\,dt = \frac{\Gamma(a)\Gamma(b)}{\Gamma(a+b)}
-- \]
logBeta
:: Double -- ^ /a/ > 0
-> Double -- ^ /b/ > 0
-> Double
logBeta a b
| p < 0 = m_NaN
| p == 0 = m_pos_inf
| p >= 10 = allStirling
| q >= 10 = twoStirling
-- This order of summands marginally improves precision
| otherwise = logGamma p + (logGamma q - logGamma pq)
where
p = min a b
q = max a b
ppq = p / pq
pq = p + q
-- When both parameters are large than 10 we can use Stirling
-- approximation with correction. It's more precise than sum of
-- logarithms of gamma functions
allStirling
= log q * (-0.5)
+ m_ln_sqrt_2_pi
+ logGammaCorrection p
+ (logGammaCorrection q - logGammaCorrection pq)
+ (p - 0.5) * log ppq
+ q * log1p(-ppq)
-- Otherwise only two of three gamma functions use Stirling
-- approximation
twoStirling
= logGamma p
+ (logGammaCorrection q - logGammaCorrection pq)
+ p
- p * log pq
+ (q - 0.5) * log1p(-ppq)
-- | Regularized incomplete beta function.
--
-- \[
-- I(x;a,b) = \frac{1}{B(a,b)} \int_0^x t^{a-1}(1-t)^{b-1}\,dt
-- \]
--
-- Uses algorithm AS63 by Majumder and Bhattachrjee and quadrature
-- approximation for large /p/ and /q/.
incompleteBeta :: Double -- ^ /a/ > 0
-> Double -- ^ /b/ > 0
-> Double -- ^ /x/, must lie in [0,1] range
-> Double
incompleteBeta p q = incompleteBeta_ (logBeta p q) p q
-- | Regularized incomplete beta function. Same as 'incompleteBeta'
-- but also takes logarithm of beta function as parameter.
incompleteBeta_ :: Double -- ^ logarithm of beta function for given /p/ and /q/
-> Double -- ^ /a/ > 0
-> Double -- ^ /b/ > 0
-> Double -- ^ /x/, must lie in [0,1] range
-> Double
incompleteBeta_ beta p q x
| p <= 0 || q <= 0 =
modErr $ printf "incompleteBeta_: p <= 0 || q <= 0. p=%g q=%g x=%g" p q x
| x < 0 || x > 1 || isNaN x =
modErr $ printf "incompleteBeta_: x out of [0,1] range. p=%g q=%g x=%g" p q x
| x == 0 || x == 1 = x
| p >= (p+q) * x = incompleteBetaWorker beta p q x
| otherwise = 1 - incompleteBetaWorker beta q p (1 - x)
-- Approximation of incomplete beta by quandrature.
--
-- Note that x =< p/(p+q)
incompleteBetaApprox :: Double -> Double -> Double -> Double -> Double
incompleteBetaApprox beta p q x
| ans > 0 = 1 - ans
| otherwise = -ans
where
-- Constants
p1 = p - 1
q1 = q - 1
mu = p / (p + q)
lnmu = log mu
lnmuc = log1p (-mu)
-- Upper limit for integration
xu = max 0 $ min (mu - 10*t) (x - 5*t)
where
t = sqrt $ p*q / ( (p+q) * (p+q) * (p + q + 1) )
-- Calculate incomplete beta by quadrature
go y w = let t = x + (xu - x) * y
in w * exp( p1 * (log t - lnmu) + q1 * (log(1-t) - lnmuc) )
s = U.sum $ U.zipWith go coefY coefW
ans = s * (xu - x) * exp( p1 * lnmu + q1 * lnmuc - beta )
-- Worker for incomplete beta function. It is separate function to
-- avoid confusion with parameter during parameter swapping
incompleteBetaWorker :: Double -> Double -> Double -> Double -> Double
incompleteBetaWorker beta p q x
-- For very large p and q this method becomes very slow so another
-- method is used.
| p > 3000 && q > 3000 = incompleteBetaApprox beta p q x
| otherwise = loop (p+q) (truncate $ q + cx * (p+q)) 1 1 1
where
-- Constants
eps = 1e-15
cx = 1 - x
-- Common multiplies for expansion. Accurate calculation is a bit
-- tricky. Performing calculation in log-domain leads to slight
-- loss of precision for small x, while using ** prone to
-- underflows.
--
-- If either beta function of x**p·(1-x)**(q-1) underflows we
-- switch to log domain. It could waste work but there's no easy
-- switch criterion.
factor
| beta < m_min_log || prod < m_tiny = exp( p * log x + (q - 1) * log cx - beta)
| otherwise = prod / exp beta
where
prod = x**p * cx**(q - 1)
-- Soper's expansion of incomplete beta function
loop !psq (ns :: Int) ai term betain
| done = betain' * factor / p
| otherwise = loop psq' (ns - 1) (ai + 1) term' betain'
where
-- New values
term' = term * fact / (p + ai)
betain' = betain + term'
fact | ns > 0 = (q - ai) * x/cx
| ns == 0 = (q - ai) * x
| otherwise = psq * x
-- Iterations are complete
done = db <= eps && db <= eps*betain' where db = abs term'
psq' = if ns < 0 then psq + 1 else psq
-- | Compute inverse of regularized incomplete beta function. Uses
-- initial approximation from AS109, AS64 and Halley method to solve
-- equation.
invIncompleteBeta :: Double -- ^ /a/ > 0
-> Double -- ^ /b/ > 0
-> Double -- ^ /x/ ∈ [0,1]
-> Double
invIncompleteBeta p q a
| p <= 0 || q <= 0 =
modErr $ printf "invIncompleteBeta p <= 0 || q <= 0. p=%g q=%g a=%g" p q a
| a < 0 || a > 1 =
modErr $ printf "invIncompleteBeta x must be in [0,1]. p=%g q=%g a=%g" p q a
| a == 0 || a == 1 = a
| otherwise = invIncompleteBetaWorker (logBeta p q) p q a
invIncompleteBetaWorker :: Double -> Double -> Double -> Double -> Double
invIncompleteBetaWorker beta a b p = loop (0::Int) (invIncBetaGuess beta a b p)
where
a1 = a - 1
b1 = b - 1
-- Solve equation using Halley method
loop !i !x
-- We cannot continue at this point so we simply return `x'
| x == 0 || x == 1 = x
-- When derivative becomes infinite we cannot continue
-- iterations. It can only happen in vicinity of 0 or 1. It's
-- hardly possible to get good answer in such circumstances but
-- `x' is already reasonable.
| isInfinite f' = x
-- Iterations limit reached. Most of the time solution will
-- converge to answer because of discreteness of Double. But
-- solution have good precision already.
| i >= 10 = x
-- Solution converges
| abs dx <= 16 * m_epsilon * x = x'
| otherwise = loop (i+1) x'
where
-- Calculate Halley step.
f = incompleteBeta_ beta a b x - p
f' = exp $ a1 * log x + b1 * log1p (-x) - beta
u = f / f'
-- We bound Halley correction to Newton-Raphson to (-1,1) range
corr | d > 1 = 1
| d < -1 = -1
| isNaN d = 0
| otherwise = d
where
d = u * (a1 / x - b1 / (1 - x))
dx = u / (1 - 0.5 * corr)
-- Next approximation. If Halley step leads us out of [0,1]
-- range we revert to bisection.
x' | z < 0 = x / 2
| z > 1 = (x + 1) / 2
| otherwise = z
where z = x - dx
-- Calculate initial guess for inverse incomplete beta function.
invIncBetaGuess :: Double -> Double -> Double -> Double -> Double
-- Calculate initial guess. for solving equation for inverse incomplete beta.
-- It's really hodgepodge of different approximations accumulated over years.
--
-- Equations are referred to by name of paper and number e.g. [AS64 2]
-- In AS64 papers equations are not numbered so they are refered to by
-- number of appearance starting from definition of incomplete beta.
invIncBetaGuess beta a b p
-- If both a and b are less than 1 incomplete beta have inflection
-- point.
--
-- > x = (1 - a) / (2 - a - b)
--
-- We approximate incomplete beta by neglecting one of factors under
-- integral and then rescaling result of integration into [0,1]
-- range.
| a < 1 && b < 1 =
let x_infl = (1 - a) / (2 - a - b)
p_infl = incompleteBeta a b x_infl
x | p < p_infl = let xg = (a * p * exp beta) ** (1/a) in xg / (1+xg)
| otherwise = let xg = (b * (1-p) * exp beta) ** (1/b) in 1 - xg/(1+xg)
in x
-- If both a and b larger or equal that 1 but not too big we use
-- same approximation as above but calculate it a bit differently
| a+b <= 6 && a>1 && b>1 =
let x_infl = (a - 1) / (a + b - 2)
p_infl = incompleteBeta a b x_infl
x | p < p_infl = exp ((log(p * a) + beta) / a)
| otherwise = 1 - exp((log((1-p) * b) + beta) / b)
in x
-- For small a and not too big b we use approximation from boost.
| b < 5 && a <= 1 =
let x | p**(1/a) < 0.5 = (p * a * exp beta) ** (1/a)
| otherwise = 1 - (1 - p ** (b * exp beta))**(1/b)
in x
-- When a>>b and both are large approximation from [Temme1992],
-- section 4 "the incomplete gamma function case" used. In this
-- region it greatly improves over other approximation (AS109, AS64,
-- "Numerical Recipes")
--
-- FIXME: It could be used when b>>a too but it require inverse of
-- upper incomplete gamma to be precise enough. In current
-- implementation it loses precision in horrible way (40
-- order of magnitude off for sufficiently small p)
| a+b > 5 && a/b > 4 =
let -- Calculate initial approximation to eta using eq 4.1
eta0 = invIncompleteGamma b (1-p) / a
mu = b / a -- Eq. 4.3
-- A lot of helpers for calculation of
w = sqrt(1 + mu) -- Eq. 4.9
w_2 = w * w
w_3 = w_2 * w
w_4 = w_2 * w_2
w_5 = w_3 * w_2
w_6 = w_3 * w_3
w_7 = w_4 * w_3
w_8 = w_4 * w_4
w_9 = w_5 * w_4
w_10 = w_5 * w_5
d = eta0 - mu
d_2 = d * d
d_3 = d_2 * d
d_4 = d_2 * d_2
w1 = w + 1
w1_2 = w1 * w1
w1_3 = w1 * w1_2
w1_4 = w1_2 * w1_2
-- Evaluation of eq 4.10
e1 = (w + 2) * (w - 1) / (3 * w)
+ (w_3 + 9 * w_2 + 21 * w + 5) * d
/ (36 * w_2 * w1)
- (w_4 - 13 * w_3 + 69 * w_2 + 167 * w + 46) * d_2
/ (1620 * w1_2 * w_3)
- (7 * w_5 + 21 * w_4 + 70 * w_3 + 26 * w_2 - 93 * w - 31) * d_3
/ (6480 * w1_3 * w_4)
- (75 * w_6 + 202 * w_5 + 188 * w_4 - 888 * w_3 - 1345 * w_2 + 118 * w + 138) * d_4
/ (272160 * w1_4 * w_5)
e2 = (28 * w_4 + 131 * w_3 + 402 * w_2 + 581 * w + 208) * (w - 1)
/ (1620 * w1 * w_3)
- (35 * w_6 - 154 * w_5 - 623 * w_4 - 1636 * w_3 - 3983 * w_2 - 3514 * w - 925) * d
/ (12960 * w1_2 * w_4)
- ( 2132 * w_7 + 7915 * w_6 + 16821 * w_5 + 35066 * w_4 + 87490 * w_3
+ 141183 * w_2 + 95993 * w + 21640
) * d_2
/ (816480 * w_5 * w1_3)
- ( 11053 * w_8 + 53308 * w_7 + 117010 * w_6 + 163924 * w_5 + 116188 * w_4
- 258428 * w_3 - 677042 * w_2 - 481940 * w - 105497
) * d_3
/ (14696640 * w1_4 * w_6)
e3 = -( (3592 * w_7 + 8375 * w_6 - 1323 * w_5 - 29198 * w_4 - 89578 * w_3
- 154413 * w_2 - 116063 * w - 29632
) * (w - 1)
)
/ (816480 * w_5 * w1_2)
- ( 442043 * w_9 + 2054169 * w_8 + 3803094 * w_7 + 3470754 * w_6 + 2141568 * w_5
- 2393568 * w_4 - 19904934 * w_3 - 34714674 * w_2 - 23128299 * w - 5253353
) * d
/ (146966400 * w_6 * w1_3)
- ( 116932 * w_10 + 819281 * w_9 + 2378172 * w_8 + 4341330 * w_7 + 6806004 * w_6
+ 10622748 * w_5 + 18739500 * w_4 + 30651894 * w_3 + 30869976 * w_2
+ 15431867 * w + 2919016
) * d_2
/ (146966400 * w1_4 * w_7)
eta = evaluatePolynomialL (1/a) [eta0, e1, e2, e3]
-- Now we solve eq 4.2 to recover x using Newton iterations
u = eta - mu * log eta + (1 + mu) * log(1 + mu) - mu
cross = 1 / (1 + mu);
lower = if eta < mu then cross else 0
upper = if eta < mu then 1 else cross
x_guess = (lower + upper) / 2
func x = ( u + log x + mu*log(1 - x)
, 1/x - mu/(1-x)
)
Root x0 = newtonRaphson def{newtonTol=RelTol 1e-8} (lower, x_guess, upper) func
in x0
-- For large a and b approximation from AS109 (Carter
-- approximation). It's reasonably good in this region
| a > 1 && b > 1 =
let r = (y*y - 3) / 6
s = 1 / (2*a - 1)
t = 1 / (2*b - 1)
h = 2 / (s + t)
w = y * sqrt(h + r) / h - (t - s) * (r + 5/6 - 2 / (3 * h))
in a / (a + b * exp(2 * w))
-- Otherwise we revert to approximation from AS64 derived from
-- [AS64 2] when it's applicable.
--
-- It slightly reduces average number of iterations when `a' and
-- `b' have different magnitudes.
| chi2 > 0 && ratio > 1 = 1 - 2 / (ratio + 1)
-- If all else fails we use approximation from "Numerical
-- Recipes". It's very similar to approximations [AS64 4,5] but
-- it never goes out of [0,1] interval.
| otherwise = case () of
_| p < t / w -> (a * p * w) ** (1/a)
| otherwise -> 1 - (b * (1 - p) * w) ** (1/b)
where
lna = log $ a / (a+b)
lnb = log $ b / (a+b)
t = exp( a * lna ) / a
u = exp( b * lnb ) / b
w = t + u
where
-- Formula [AS64 2]
ratio = (4*a + 2*b - 2) / chi2
-- Quantile of chi-squared distribution. Formula [AS64 3].
chi2 = 2 * b * (1 - t + y * sqrt t) ** 3
where
t = 1 / (9 * b)
-- `y' is Hasting's approximation of p'th quantile of standard
-- normal distribution.
y = r - ( 2.30753 + 0.27061 * r )
/ ( 1.0 + ( 0.99229 + 0.04481 * r ) * r )
where
r = sqrt $ - 2 * log p
----------------------------------------------------------------
-- Sinc function
----------------------------------------------------------------
-- | Compute sinc function @sin(x)\/x@
sinc :: Double -> Double
sinc x
| ax < eps_0 = 1
| ax < eps_2 = 1 - x2/6
| ax < eps_4 = 1 - x2/6 + x2*x2/120
| otherwise = sin x / x
where
ax = abs x
x2 = x*x
-- For explanation of choice see `doc/sinc.hs'
eps_0 = 1.8250120749944284e-8 -- sqrt (6ε/4)
eps_2 = 1.4284346431400855e-4 -- (30ε)**(1/4) / 2
eps_4 = 4.043633626430947e-3 -- (1206ε)**(1/6) / 2
----------------------------------------------------------------
-- Logarithm
----------------------------------------------------------------
-- | Compute log(1+x)-x:
log1pmx :: Double -> Double
log1pmx x
| x < -1 = error "Domain error"
| x == -1 = m_neg_inf
| ax > 0.95 = log(1 + x) - x
| ax < m_epsilon = -(x * x) /2
| otherwise = - x * x * sumPowerSeries (-x) (recip <$> enumSequenceFrom 2)
where
ax = abs x
-- | /O(log n)/ Compute the logarithm in base 2 of the given value.
log2 :: Int -> Int
log2 v0
| v0 <= 0 = modErr $ "log2: nonpositive input, got " ++ show v0
| otherwise = go 5 0 v0
where
go !i !r !v | i == -1 = r
| v .&. b i /= 0 = let si = U.unsafeIndex sv i
in go (i-1) (r .|. si) (v `shiftR` si)
| otherwise = go (i-1) r v
b = U.unsafeIndex bv
!bv = U.fromList [ 0x02, 0x0c, 0xf0, 0xff00
, fromIntegral (0xffff0000 :: Word)
, fromIntegral (0xffffffff00000000 :: Word)]
!sv = U.fromList [1,2,4,8,16,32]
----------------------------------------------------------------
-- Factorial
----------------------------------------------------------------
-- | Compute the factorial function /n/!. Returns +∞ if the input is
-- above 170 (above which the result cannot be represented by a
-- 64-bit 'Double').
factorial :: Int -> Double
factorial n
| n < 0 = error "Numeric.SpecFunctions.factorial: negative input"
| n > 170 = m_pos_inf
| otherwise = U.unsafeIndex factorialTable n
-- | Compute the natural logarithm of the factorial function. Gives
-- 16 decimal digits of precision.
logFactorial :: Integral a => a -> Double
logFactorial n
| n < 0 = error "Numeric.SpecFunctions.logFactorial: negative input"
-- For smaller inputs we just look up table
| n <= 170 = log $ U.unsafeIndex factorialTable (fromIntegral n)
-- Otherwise we use asymptotic Stirling's series. Number of terms
-- necessary depends on the argument.
| n < 1500 = stirling + rx * ((1/12) - (1/360)*rx*rx)
| otherwise = stirling + (1/12)*rx
where
stirling = (x - 0.5) * log x - x + m_ln_sqrt_2_pi
x = fromIntegral n + 1
rx = 1 / x
{-# SPECIALIZE logFactorial :: Int -> Double #-}
-- | Calculate the error term of the Stirling approximation. This is
-- only defined for non-negative values.
--
-- \[
-- \operatorname{stirlingError}(n) = \log(n!) - \log(\sqrt{2\pi n}\frac{n}{e}^n)
-- \]
stirlingError :: Double -> Double
stirlingError n
| n <= 15.0 = case properFraction (n+n) of
(i,0) -> sfe `U.unsafeIndex` i
_ -> logGamma (n+1.0) - (n+0.5) * log n + n -
m_ln_sqrt_2_pi
| n > 500 = evaluateOddPolynomialL (1/n) [s0,-s1]
| n > 80 = evaluateOddPolynomialL (1/n) [s0,-s1,s2]
| n > 35 = evaluateOddPolynomialL (1/n) [s0,-s1,s2,-s3]
| otherwise = evaluateOddPolynomialL (1/n) [s0,-s1,s2,-s3,s4]
where
s0 = 0.083333333333333333333 -- 1/12
s1 = 0.00277777777777777777778 -- 1/360
s2 = 0.00079365079365079365079365 -- 1/1260
s3 = 0.000595238095238095238095238 -- 1/1680
s4 = 0.0008417508417508417508417508 -- 1/1188
sfe = U.fromList [ 0.0,
0.1534264097200273452913848, 0.0810614667953272582196702,
0.0548141210519176538961390, 0.0413406959554092940938221,
0.03316287351993628748511048, 0.02767792568499833914878929,
0.02374616365629749597132920, 0.02079067210376509311152277,
0.01848845053267318523077934, 0.01664469118982119216319487,
0.01513497322191737887351255, 0.01387612882307074799874573,
0.01281046524292022692424986, 0.01189670994589177009505572,
0.01110455975820691732662991, 0.010411265261972096497478567,
0.009799416126158803298389475, 0.009255462182712732917728637,
0.008768700134139385462952823, 0.008330563433362871256469318,
0.007934114564314020547248100, 0.007573675487951840794972024,
0.007244554301320383179543912, 0.006942840107209529865664152,
0.006665247032707682442354394, 0.006408994188004207068439631,
0.006171712263039457647532867, 0.005951370112758847735624416,
0.005746216513010115682023589, 0.005554733551962801371038690 ]
----------------------------------------------------------------
-- Combinatorics
----------------------------------------------------------------
-- |
-- Quickly compute the natural logarithm of /n/ @`choose`@ /k/, with
-- no checking.
--
-- Less numerically stable:
--
-- > exp $ lg (n+1) - lg (k+1) - lg (n-k+1)
-- > where lg = logGamma . fromIntegral
logChooseFast :: Double -> Double -> Double
logChooseFast n k = -log (n + 1) - logBeta (n - k + 1) (k + 1)
-- | Calculate binomial coefficient using exact formula
chooseExact :: Int -> Int -> Double
n `chooseExact` k
= U.foldl' go 1 $ U.enumFromTo 1 k
where
go a i = a * (nk + j) / j
where j = fromIntegral i :: Double
nk = fromIntegral (n - k)
-- | Compute logarithm of the binomial coefficient.
logChoose :: Int -> Int -> Double
n `logChoose` k
| k > n = (-1) / 0
-- For very large N exact algorithm overflows double so we
-- switch to beta-function based one
| k' < 50 && (n < 20000000) = log $ chooseExact n k'
| otherwise = logChooseFast (fromIntegral n) (fromIntegral k)
where
k' = min k (n-k)
-- | Compute the binomial coefficient /n/ @\``choose`\`@ /k/. For
-- values of /k/ > 50, this uses an approximation for performance
-- reasons. The approximation is accurate to 12 decimal places in the
-- worst case
--
-- Example:
--
-- > 7 `choose` 3 == 35
choose :: Int -> Int -> Double
n `choose` k
| k > n = 0
| k' < 50 = chooseExact n k'
| approx < max64 = fromIntegral . round64 $ approx
| otherwise = approx
where
k' = min k (n-k)
approx = exp $ logChooseFast (fromIntegral n) (fromIntegral k')
max64 = fromIntegral (maxBound :: Int64)
round64 x = round x :: Int64
-- | Compute ψ(/x/), the first logarithmic derivative of the gamma
-- function.
--
-- \[
-- \psi(x) = \frac{d}{dx} \ln \left(\Gamma(x)\right) = \frac{\Gamma'(x)}{\Gamma(x)}
-- \]
--
-- Uses Algorithm AS 103 by Bernardo, based on Minka's C implementation.
digamma :: Double -> Double
digamma x
| isNaN x || isInfinite x = m_NaN
-- FIXME:
-- This is ugly. We are testing here that number is in fact
-- integer. It's somewhat tricky question to answer. When ε for
-- given number becomes 1 or greater every number is represents
-- an integer. We also must make sure that excess precision
-- won't bite us.
| x <= 0 && fromIntegral (truncate x :: Int64) == x = m_neg_inf
-- Jeffery's reflection formula
| x < 0 = digamma (1 - x) + pi / tan (negate pi * x)
| x <= 1e-6 = - γ - 1/x + trigamma1 * x
| x' < c = r
-- De Moivre's expansion
| otherwise = let s = 1/x'
in evaluateEvenPolynomialL s
[ r + log x' - 0.5 * s
, - 1/12
, 1/120
, - 1/252
, 1/240
, - 1/132
, 391/32760
]
where
γ = m_eulerMascheroni
c = 12
-- Reduce to digamma (x + n) where (x + n) >= c
(r, x') = reduce 0 x
where
reduce !s y
| y < c = reduce (s - 1 / y) (y + 1)
| otherwise = (s, y)
----------------------------------------------------------------
-- Constants
----------------------------------------------------------------
-- Coefficients for 18-point Gauss-Legendre integration. They are
-- used in implementation of incomplete gamma and beta functions.
coefW,coefY :: U.Vector Double
coefW = U.fromList [ 0.0055657196642445571, 0.012915947284065419, 0.020181515297735382
, 0.027298621498568734, 0.034213810770299537, 0.040875750923643261
, 0.047235083490265582, 0.053244713977759692, 0.058860144245324798
, 0.064039797355015485, 0.068745323835736408, 0.072941885005653087
, 0.076598410645870640, 0.079687828912071670, 0.082187266704339706
, 0.084078218979661945, 0.085346685739338721, 0.085983275670394821
]
coefY = U.fromList [ 0.0021695375159141994, 0.011413521097787704, 0.027972308950302116
, 0.051727015600492421, 0.082502225484340941, 0.12007019910960293
, 0.16415283300752470, 0.21442376986779355, 0.27051082840644336
, 0.33199876341447887, 0.39843234186401943, 0.46931971407375483
, 0.54413605556657973, 0.62232745288031077, 0.70331500465597174
, 0.78649910768313447, 0.87126389619061517, 0.95698180152629142
]
{-# NOINLINE coefW #-}
{-# NOINLINE coefY #-}
trigamma1 :: Double
trigamma1 = 1.6449340668482264365 -- pi**2 / 6
modErr :: String -> a
modErr msg = error $ "Numeric.SpecFunctions." ++ msg
factorialTable :: U.Vector Double
{-# NOINLINE factorialTable #-}
factorialTable = U.fromListN 171
[ 1.0
, 1.0
, 2.0
, 6.0
, 24.0
, 120.0
, 720.0
, 5040.0
, 40320.0
, 362880.0
, 3628800.0
, 3.99168e7
, 4.790016e8
, 6.2270208e9
, 8.71782912e10
, 1.307674368e12
, 2.0922789888e13
, 3.55687428096e14
, 6.402373705728e15
, 1.21645100408832e17
, 2.43290200817664e18
, 5.109094217170944e19
, 1.1240007277776077e21
, 2.5852016738884974e22
, 6.204484017332394e23
, 1.5511210043330984e25
, 4.032914611266056e26
, 1.0888869450418352e28
, 3.0488834461171384e29
, 8.841761993739702e30
, 2.6525285981219103e32
, 8.222838654177922e33
, 2.631308369336935e35
, 8.683317618811886e36
, 2.9523279903960412e38
, 1.0333147966386144e40
, 3.719933267899012e41
, 1.3763753091226343e43
, 5.23022617466601e44
, 2.0397882081197442e46
, 8.159152832478977e47
, 3.3452526613163803e49
, 1.4050061177528798e51
, 6.041526306337383e52
, 2.6582715747884485e54
, 1.1962222086548019e56
, 5.5026221598120885e57
, 2.5862324151116818e59
, 1.2413915592536073e61
, 6.082818640342675e62
, 3.0414093201713376e64
, 1.5511187532873822e66
, 8.065817517094388e67
, 4.2748832840600255e69
, 2.308436973392414e71
, 1.2696403353658275e73
, 7.109985878048634e74
, 4.0526919504877214e76
, 2.3505613312828785e78
, 1.386831185456898e80
, 8.32098711274139e81
, 5.075802138772247e83
, 3.146997326038793e85
, 1.9826083154044399e87
, 1.2688693218588415e89
, 8.24765059208247e90
, 5.44344939077443e92
, 3.647111091818868e94
, 2.4800355424368305e96
, 1.711224524281413e98
, 1.197857166996989e100
, 8.504785885678623e101
, 6.1234458376886085e103
, 4.470115461512684e105
, 3.307885441519386e107
, 2.4809140811395396e109
, 1.88549470166605e111
, 1.4518309202828586e113
, 1.1324281178206297e115
, 8.946182130782974e116
, 7.15694570462638e118
, 5.797126020747368e120
, 4.753643337012841e122
, 3.9455239697206583e124
, 3.314240134565353e126
, 2.81710411438055e128
, 2.422709538367273e130
, 2.1077572983795275e132
, 1.8548264225739844e134
, 1.650795516090846e136
, 1.4857159644817613e138
, 1.352001527678403e140
, 1.2438414054641305e142
, 1.1567725070816416e144
, 1.087366156656743e146
, 1.0329978488239058e148
, 9.916779348709496e149
, 9.619275968248211e151
, 9.426890448883246e153
, 9.332621544394413e155
, 9.332621544394415e157
, 9.425947759838358e159
, 9.614466715035125e161
, 9.902900716486179e163
, 1.0299016745145626e166
, 1.0813967582402908e168
, 1.1462805637347082e170
, 1.2265202031961378e172
, 1.3246418194518288e174
, 1.4438595832024934e176
, 1.5882455415227428e178
, 1.7629525510902446e180
, 1.974506857221074e182
, 2.2311927486598134e184
, 2.543559733472187e186
, 2.9250936934930154e188
, 3.393108684451898e190
, 3.9699371608087206e192
, 4.68452584975429e194
, 5.574585761207606e196
, 6.689502913449126e198
, 8.094298525273443e200
, 9.875044200833601e202
, 1.214630436702533e205
, 1.5061417415111406e207
, 1.8826771768889257e209
, 2.372173242880047e211
, 3.0126600184576594e213
, 3.856204823625804e215
, 4.974504222477286e217
, 6.466855489220473e219
, 8.471580690878819e221
, 1.1182486511960041e224
, 1.4872707060906857e226
, 1.9929427461615188e228
, 2.6904727073180504e230
, 3.6590428819525483e232
, 5.012888748274991e234
, 6.917786472619488e236
, 9.615723196941088e238
, 1.3462012475717523e241
, 1.898143759076171e243
, 2.6953641378881624e245
, 3.8543707171800725e247
, 5.5502938327393044e249
, 8.047926057471992e251
, 1.1749972043909107e254
, 1.7272458904546386e256
, 2.5563239178728654e258
, 3.808922637630569e260
, 5.713383956445854e262
, 8.62720977423324e264
, 1.3113358856834524e267
, 2.0063439050956823e269
, 3.0897696138473508e271
, 4.789142901463393e273
, 7.471062926282894e275
, 1.1729568794264143e278
, 1.8532718694937346e280
, 2.946702272495038e282
, 4.714723635992061e284
, 7.590705053947218e286
, 1.2296942187394494e289
, 2.0044015765453023e291
, 3.287218585534296e293
, 5.423910666131589e295
, 9.003691705778436e297
, 1.5036165148649988e300
, 2.526075744973198e302
, 4.269068009004705e304
, 7.257415615307998e306
]
| bos/math-functions | Numeric/SpecFunctions/Internal.hs | bsd-2-clause | 46,384 | 2 | 34 | 13,631 | 11,161 | 6,035 | 5,126 | 839 | 4 |
{-# LANGUAGE TupleSections, OverloadedStrings, ScopedTypeVariables #-}
module Handler.Home where
import Import
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Text as T
import Data.Time.Clock
import Database.Persist.GenericSql.Raw
import Database.Persist.Store
import Text.Shakespeare.Text
import Yesod.Default.Config
takeM :: (Functor m, Monad m) => Int -> [a] -> (a -> m Bool) -> m [a]
takeM 0 _ _ = return []
takeM _ [] _ = return []
takeM n (x:xs) p = do
b <- p x
if b
then (x:) <$> takeM (n-1) xs p
else takeM n xs p
getHomeR :: Handler RepHtml
getHomeR = do
now <- liftIO getCurrentTime
root <- appRoot . settings <$> getYesod
let sanitize = T.map (\c -> if c == ':' then '-' else c)
let (sanitize -> host)
| "http://" `T.isPrefixOf` root = T.drop 7 root
| "https://" `T.isPrefixOf` root = T.drop 8 root
| otherwise = root
(map entityVal -> recent) <- runDB $ do
recent <- selectList [PackageDownloaded ==. True] [Desc PackageDate, LimitTo 100]
takeM 5 recent $ \(Entity _ val) -> do
cnt <- count [PackageName ==. packageName val, PackageDate <=. packageDate val]
return $ cnt == 1
(map entityVal -> updated) <- runDB $ do
recent <- selectList [PackageDownloaded ==. True] [Desc PackageDate, LimitTo 100]
takeM 5 recent $ \(Entity _ val) -> do
cnt <- count [PackageName ==. packageName val, PackageDate <=. packageDate val]
return $ cnt > 1
let cvt ls =
[ (a, b, fromIntegral c)
| [PersistText a, PersistText b, PersistInt64 c] <- ls
]
(cvt -> pop) <- runDB $ do
C.runResourceT $ withStmt [st|
SELECT "Package".name, "Package".version, count("Download")
FROM "Package", "Download"
WHERE "Package".id == "Download".package
AND "Download".date >= ?
GROUP BY name
ORDER BY count("Download") DESC
LIMIT 5
|] [PersistUTCTime $ (-24*60*60) `addUTCTime` now] C.$$ CL.consume
let showDate date
| sec < 1 = "just now" :: String
| sec < 60 = show (floor sec) ++ " seconds ago"
| mnu < 60 = show (floor mnu) ++ " minutes ago"
| hou < 24 = show (floor hou) ++ " hours ago"
| otherwise = show (floor day) ++ " days ago"
where
sec = realToFrac (now `diffUTCTime` date) :: Double
mnu = sec / 60
hou = mnu / 60
day = hou / 24
specName pkg =
packageName pkg <> "-" <> packageVersion pkg
defaultLayout $ do
setTitle "HackageDB mirror - Home"
$(widgetFile "homepage")
| tanakh/hackage-mirror | Handler/Home.hs | bsd-2-clause | 2,562 | 0 | 19 | 655 | 944 | 477 | 467 | -1 | -1 |
-- |
-- Module : Enterprise.FizzBuzz
-- Copyright : (c) 2013 Josh Hawkins & Mike Bethany
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Putting some Fizz and Buzz in your numbers!
module Enterprise.FizzBuzz
(
functionToPad1,
functionToPad2,
functionToPad3,
functionToPad4,
functionToPad5,
functionToPad6,
functionToPad7,
functionToPad8,
functionToPad9,
functionToPad10,
functionToPad11,
-- * More info on fizzBuzz.
-- $fizzBuzz
fizzBuzz,
-- * How is this used in Main?
-- $fizzBuzzRange
fizzBuzzRange) where
functionToPad1 i = undefined
functionToPad2 i = undefined
functionToPad3 i = undefined
functionToPad4 i = undefined
functionToPad5 i = undefined
functionToPad6 i = undefined
functionToPad7 i = undefined
functionToPad8 i = undefined
functionToPad9 i = undefined
functionToPad10 i = undefined
functionToPad11 i = undefined
functionToPadHidden i = undefined
-- courtesy of Mike Bethany @ Haskell Quiz
-- http://www.haskell.org/haskellwiki/Haskell_Quiz/FizzBuzz/Solution_Mikbe
fizz :: Int -> String
fizz x = if x `mod` 3 == 0 then "Fizz" else ""
buzz :: Int -> String
buzz x = if x `mod` 5 == 0 then "Buzz" else ""
fizzBuzz :: Int -> String
fizzBuzz x = if fizz(x) ++ buzz(x) == ""
then show x
else fizz(x) ++ buzz(x)
-- | This is a comment with a code example using fizzBuzzRange
--
-- > putStrLn $ show $ fizzBuzzRange $ (read n :: Int)
fizzBuzzRange :: Int -> String
fizzBuzzRange limit =
foldl fizzJoin "" [1..limit]
where fizzJoin acc i = acc ++ " " ++ fizzBuzz i
| JHawk/enterpriseFizzBuzz | src/Enterprise/FizzBuzz.hs | bsd-3-clause | 1,666 | 0 | 9 | 371 | 348 | 197 | 151 | 39 | 2 |
module Printer where
{-|
>>> :{
let callback name = do
putStrLn $ "Hello. Yes, this is " ++ name
>>> :}
>>> printer "Dog" callback
Dog says:
Hello. Yes, this is Dog
-}
printer :: String -> (String -> IO ()) -> IO ()
printer name callBack = putStrLn $ name ++ " says:"
| pfcuttle/twentyfour-2013 | src/doctest/Printer.hs | bsd-3-clause | 282 | 0 | 10 | 69 | 53 | 28 | 25 | 3 | 1 |
module Paths (tests) where
import Data.Monoid
import PathFinder.Core
import PathFinder.Types
import Control.Lens (preview, _Just)
import Test.Tasty
import Test.Tasty.Hspec
import qualified Data.Map as Map
tests :: TestTree
tests = testGroup "tests"
[ testCase "finding a path" spec
]
spec :: Spec
spec = describe "Path finding functionality" $
it "should be able to reconstruct the path, given a predecessor map" $ do
let zero = 0 :: Int
pmap = Map.fromList [ ((zero,zero), (Sum zero, Nothing))
, ((0,1), (Sum 0, Just (0,0)))
, ((0,2), (Sum 0, Just (0,1)))
]
preview (_Just . pathCoords) (reconstructPath (0,2) pmap) `shouldBe`
Just [(0,0), (0,1), (0,2)]
-- it "should not expand to disallowed coords" $ do
-- it "should add the cell to closed set after visiting it" $ do
-- context "analyzing neighbors" $ do
-- context "if the neighbor was unknown" $ do
-- it "should insert an unknown neighbor into the predecessor map" $ do
-- it "should add new neighbors to the open set" $ do
-- it "should update the predecessor map if the new path is shorter" $ do
-- it "should do nothing if the path to the predecessor is unknown" $ do
-- context "finding a path" $ do
-- it "should return an empty path if start == goal" $ do
-- it "should find a straight path" $ do
-- it "should find a path around an obstacle" $ do
-- it "should terminate if path is blocked" $ do
-- it "should find a path around a more complicated obstacle" $ do
| markus1189/pathfinder | tests/Paths.hs | bsd-3-clause | 1,665 | 0 | 16 | 494 | 294 | 178 | 116 | 20 | 1 |
{-# Language DeriveFunctor #-}
module Spell where
import Control.Monad.Free
-- | 10 is the magic number for everything
data CastType
= Beam
| BlastRadius
| Target
deriving (Show)
data Element
= Earth -- knocked down, no movement
| Wind -- slowed, half action points
| Water -- frozen, skip turn
| Fire -- burning, damage over time
deriving (Show)
data SpellF next
= CastType CastType next
| Element Element next
| Range Int next
| Power Int next
deriving (Show, Functor)
type Spell' a = Free SpellF a
type Spell = Spell' ()
beam :: Spell
beam = liftF $ CastType Beam ()
blastRadius :: Spell
blastRadius = liftF $ CastType BlastRadius ()
target :: Spell
target = liftF $ CastType Target ()
earth :: Spell
earth = liftF $ Element Earth ()
wind :: Spell
wind = liftF $ Element Wind ()
water :: Spell
water = liftF $ Element Water ()
fire :: Spell
fire = liftF $ Element Fire ()
range :: Int -> Spell
range r = liftF $ Range r ()
power :: Int -> Spell
power p = liftF $ Power p ()
fireBeam :: Spell
fireBeam = fire >> beam
cost :: Spell -> Int
cost (Free (CastType Beam next)) = 3 + cost next
cost (Free (CastType BlastRadius next)) = 6 + cost next
cost (Free (CastType Target next)) = 4 + cost next
cost (Free (Element Earth next)) = 6 + cost next
cost (Free (Element Wind next)) = 3 + cost next
cost (Free (Element Water next)) = 4 + cost next
cost (Free (Element Fire next)) = 5 + cost next
cost (Free (Range value next)) = value + cost next
cost (Free (Power value next)) = 2 * value + cost next
cost (Pure next) = 0
| aspidites/functional | src/Spell.hs | bsd-3-clause | 1,563 | 0 | 9 | 345 | 631 | 330 | 301 | 53 | 1 |
{-# LANGUAGE CPP #-}
{-# OPTIONS -fno-warn-missing-signatures #-}
-- | Sequential implementation of the segmented array API defined in
-- @dph-prim-interface@.
--
-- There is a parallel implementation in @dph-prim-par@,
-- so you probably want that instead.
-- The API is defined in @DPH_Header.h@ and @DPH_Interface.h@ to ensure that both
-- @dph-prim-par@ and @dph-prim-seq@ really do export the same symbols.
#include "DPH_Header.h"
import Data.Array.Parallel.Unlifted.Sequential.USel
import Data.Array.Parallel.Unlifted.Sequential
import qualified Data.Array.Parallel.Unlifted.Sequential.USegd as USegd
import qualified Data.Array.Parallel.Unlifted.Sequential.USSegd as USSegd
import qualified Data.Array.Parallel.Unlifted.Sequential.UVSegd as UVSegd
import qualified Data.Array.Parallel.Unlifted.Sequential.Vector as U
import qualified Data.Array.Parallel.Unlifted.Vectors as US
#include "DPH_Interface.h"
-- NOTE:
-- See DPH_Interface.h for documentation.
-- The defs should appear in the same order as they are listed in DPH_Interface.h
-- Basics ---------------------------------------------------------------------
class U.Unbox a => Elt a
-- | Arrays are stored as unboxed vectors.
-- They have bulk-strict semantics, so demanding one element demands them all.
type Array = U.Vector
-- Constructors ---------------------------------------------------------------
empty = U.empty
(+:+) = (U.++)
append_s _ xd xs yd ys = appendSU xd xs yd ys
append_vs _ xd xs yd ys = appendSU xd' xs' yd' ys'
where xd' = unsafeDemoteToSegdOfVSegd xd
yd' = unsafeDemoteToSegdOfVSegd yd
xs' = extractsFromVectorsUVSegd xd xs
ys' = extractsFromVectorsUVSegd yd ys
replicate = U.replicate
replicate_s = replicateSU
replicate_rs = replicateRSU
repeat n _ = U.repeat n
indexed = U.indexed
indices_s = indicesSU
enumFromTo = U.enumFromTo
enumFromThenTo = U.enumFromThenTo
enumFromStepLen = U.enumFromStepLen
enumFromStepLenEach = U.enumFromStepLenEach
-- Projections ----------------------------------------------------------------
length = U.length
index = U.index
indexs = indexsFromVector
indexs_avs = indexsFromVectorsUVSegd
extract = U.extract
extracts_nss = extractsFromNestedUSSegd
extracts_ass = extractsFromVectorsUSSegd
extracts_avs = extractsFromVectorsUVSegd
drop = U.drop
-- Update ---------------------------------------------------------------------
update = U.update
-- Permutation ----------------------------------------------------------------
permute = U.permute
bpermute = U.bpermute
mbpermute = U.mbpermute
bpermuteDft = U.bpermuteDft
-- Zipping and Unzipping ------------------------------------------------------
zip = U.zip
zip3 = U.zip3
unzip = U.unzip
unzip3 = U.unzip3
fsts = U.fsts
snds = U.snds
-- Map and ZipWith ------------------------------------------------------------
map = U.map
zipWith = U.zipWith
-- Scans and Folds ------------------------------------------------------------
scan = U.scan
fold = U.fold
fold_s = foldSU
fold_ss = foldSSU
fold_r = foldlRU
fold1 = U.fold1
fold1_s = fold1SU
fold1_ss = fold1SSU
sum = U.sum
sum_r = sumRU
and = U.and
#if defined(__GLASGOW_HASKELL_LLVM__)
mmap = U.mmap
mzipWith = U.mzipWith
mfold = U.mfold
#endif /* defined(__GLASGOW_HASKELL_LLVM__) */
-- Packing and Filter ---------------------------------------------------------
pack = U.pack
filter = U.filter
-- Combine and Interleave -----------------------------------------------------
combine = U.combine
combine2 tags _ = U.combine2ByTag tags
interleave = U.interleave
-- Selectors ------------------------------------------------------------------
type Sel2 = USel2
mkSel2 tags idxs n0 n1 _ = mkUSel2 tags idxs n0 n1
tagsSel2 = tagsUSel2
indicesSel2 = indicesUSel2
elementsSel2_0 = elementsUSel2_0
elementsSel2_1 = elementsUSel2_1
repSel2 _ = ()
type SelRep2 = ()
mkSelRep2 _ = ()
indicesSelRep2 tags _ = tagsToIndices2 tags
elementsSelRep2_0 tags _ = count tags 0
elementsSelRep2_1 tags _ = count tags 1
-- Segment Descriptors --------------------------------------------------------
type Segd = USegd.USegd
mkSegd = USegd.mkUSegd
validSegd = USegd.valid
emptySegd = USegd.empty
singletonSegd = USegd.singleton
lengthSegd = USegd.length
lengthsSegd = USegd.takeLengths
indicesSegd = USegd.takeIndices
elementsSegd = USegd.takeElements
-- Slice Segment Descriptors --------------------------------------------------
type SSegd = USSegd.USSegd
mkSSegd = USSegd.mkUSSegd
validSSegd = USSegd.valid
emptySSegd = USSegd.empty
singletonSSegd = USSegd.singleton
promoteSegdToSSegd = USSegd.fromUSegd
isContiguousSSegd = USSegd.isContiguous
lengthOfSSegd = USSegd.length
lengthsOfSSegd = USSegd.takeLengths
indicesOfSSegd = USSegd.takeIndices
startsOfSSegd = USSegd.takeStarts
sourcesOfSSegd = USSegd.takeSources
getSegOfSSegd = USSegd.getSeg
appendSSegd = USSegd.appendWith
-- Virtual Segment Descriptors ------------------------------------------------
type VSegd = UVSegd.UVSegd
mkVSegd = UVSegd.mkUVSegd
validVSegd = UVSegd.valid
emptyVSegd = UVSegd.empty
singletonVSegd = UVSegd.singleton
replicatedVSegd = UVSegd.replicated
promoteSegdToVSegd = UVSegd.fromUSegd
promoteSSegdToVSegd = UVSegd.fromUSSegd
isManifestVSegd = UVSegd.isManifest
isContiguousVSegd = UVSegd.isContiguous
lengthOfVSegd = UVSegd.length
takeVSegidsOfVSegd = UVSegd.takeVSegids
takeVSegidsRedundantOfVSegd = UVSegd.takeVSegids
takeSSegdOfVSegd = UVSegd.takeUSSegd
takeSSegdRedundantOfVSegd = UVSegd.takeUSSegd
takeLengthsOfVSegd = UVSegd.takeLengths
getSegOfVSegd = UVSegd.getSeg
unsafeDemoteToSSegdOfVSegd = UVSegd.unsafeDemoteToUSSegd
unsafeDemoteToSegdOfVSegd = UVSegd.unsafeDemoteToUSegd
updateVSegsOfVSegd = UVSegd.updateVSegs
updateVSegsReachableOfVSegd = UVSegd.updateVSegsReachable
appendVSegd = UVSegd.appendWith
combine2VSegd = UVSegd.combine2
-- Irregular 2D Arrays --------------------------------------------------------
class US.Unboxes a => Elts a
type Arrays = US.Vectors
emptys = US.empty
lengths = US.length
singletons = US.singleton
unsafeIndexs = US.unsafeIndex
unsafeIndex2s = US.unsafeIndex2
appends = US.append
fromVectors = US.fromVector
toVectors = US.toVector
-- Random Arrays --------------------------------------------------------------
randoms = U.random
randomRs = U.randomR
-- Array IO -------------------------------------------------------------------
class U.UIO a => IOElt a
hPut = U.hPut
hGet = U.hGet
toList = U.toList
fromList = U.fromList
| mainland/dph | dph-prim-seq/Data/Array/Parallel/Unlifted.hs | bsd-3-clause | 9,060 | 0 | 7 | 3,382 | 1,172 | 685 | 487 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Handler.Index where
import qualified Data.Text as T
import Snap.Snaplet
import Heist
import Snap.Snaplet.Heist
import Snap.Snaplet.Auth
import State.Site
import Splice.Site
import Application
indexHandler :: AppHandler ()
indexHandler = do
mu <- with auth currentUser
sites <- case fmap (read . T.unpack . unUid) (mu >>= userId) of
Nothing -> return []
Just uid -> getUserSites uid
renderWithSplices "index" ("sites" ## sitesSplice sites)
| dbp/positionsites | src/Handler/Index.hs | bsd-3-clause | 518 | 0 | 13 | 103 | 152 | 81 | 71 | 17 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TupleSections #-}
module Stack.Setup
( setupEnv
, ensureCompiler
, ensureDockerStackExe
, SetupOpts (..)
, defaultStackSetupYaml
) where
import Control.Applicative
import Control.Exception.Enclosed (catchIO, tryAny)
import Control.Monad (liftM, when, join, void, unless)
import Control.Monad.Catch
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, ReaderT (..), asks)
import Control.Monad.State (get, put, modify)
import Control.Monad.Trans.Control
import Crypto.Hash (SHA1(SHA1))
import Data.Aeson.Extended
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as LBS
import Data.Char (isSpace)
import Data.Conduit (Conduit, ($$), (=$), await, yield, awaitForever)
import qualified Data.Conduit.Binary as CB
import Data.Conduit.Lift (evalStateC)
import qualified Data.Conduit.List as CL
import Data.Either
import Data.Foldable hiding (concatMap, or, maximum)
import Data.IORef
import Data.IORef.RunOnce (runOnce)
import Data.List hiding (concat, elem, maximumBy, any)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import Data.Ord (comparing)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T
import Data.Time.Clock (NominalDiffTime, diffUTCTime, getCurrentTime)
import Data.Typeable (Typeable)
import qualified Data.Yaml as Yaml
import Distribution.System (OS, Arch (..), Platform (..))
import qualified Distribution.System as Cabal
import Distribution.Text (simpleParse)
import Language.Haskell.TH as TH
import Network.HTTP.Client.Conduit
import Network.HTTP.Download.Verified
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
import qualified Paths_stack as Meta
import Prelude hiding (concat, elem, any) -- Fix AMP warning
import Safe (readMay)
import Stack.Build (build)
import Stack.Config (resolvePackageEntry, loadConfig)
import Stack.Constants (distRelativeDir, stackProgName)
import Stack.Exec (defaultEnvSettings)
import Stack.Fetch
import Stack.GhcPkg (createDatabase, getCabalPkgVer, getGlobalDB, mkGhcPackagePath)
import Stack.Setup.Installed
import Stack.Types
import Stack.Types.Internal (HasTerminal, HasReExec, HasLogLevel)
import Stack.Types.StackT
import qualified System.Directory as D
import System.Environment (getExecutablePath)
import System.Exit (ExitCode (ExitSuccess))
import System.FilePath (searchPathSeparator)
import qualified System.FilePath as FP
import System.Process (rawSystem)
import System.Process.Read
import System.Process.Run (runIn)
import Text.Printf (printf)
-- | Default location of the stack-setup.yaml file
defaultStackSetupYaml :: String
defaultStackSetupYaml =
"https://raw.githubusercontent.com/fpco/stackage-content/master/stack/stack-setup-2.yaml"
data SetupOpts = SetupOpts
{ soptsInstallIfMissing :: !Bool
, soptsUseSystem :: !Bool
, soptsWantedCompiler :: !CompilerVersion
, soptsCompilerCheck :: !VersionCheck
, soptsStackYaml :: !(Maybe (Path Abs File))
-- ^ If we got the desired GHC version from that file
, soptsForceReinstall :: !Bool
, soptsSanityCheck :: !Bool
-- ^ Run a sanity check on the selected GHC
, soptsSkipGhcCheck :: !Bool
-- ^ Don't check for a compatible GHC version/architecture
, soptsSkipMsys :: !Bool
-- ^ Do not use a custom msys installation on Windows
, soptsUpgradeCabal :: !Bool
-- ^ Upgrade the global Cabal library in the database to the newest
-- version. Only works reliably with a stack-managed installation.
, soptsResolveMissingGHC :: !(Maybe Text)
-- ^ Message shown to user for how to resolve the missing GHC
, soptsStackSetupYaml :: !String
-- ^ Location of the main stack-setup.yaml file
, soptsGHCBindistURL :: !(Maybe String)
-- ^ Alternate GHC binary distribution (requires custom GHCVariant)
}
deriving Show
data SetupException = UnsupportedSetupCombo OS Arch
| MissingDependencies [String]
| UnknownCompilerVersion Text CompilerVersion [CompilerVersion]
| UnknownOSKey Text
| GHCSanityCheckCompileFailed ReadProcessException (Path Abs File)
| WantedMustBeGHC
| RequireCustomGHCVariant
| ProblemWhileDecompressing (Path Abs File)
| SetupInfoMissingSevenz
| GHCJSRequiresStandardVariant
| GHCJSNotBooted
| DockerStackExeNotFound Version Text
deriving Typeable
instance Exception SetupException
instance Show SetupException where
show (UnsupportedSetupCombo os arch) = concat
[ "I don't know how to install GHC for "
, show (os, arch)
, ", please install manually"
]
show (MissingDependencies tools) =
"The following executables are missing and must be installed: " ++
intercalate ", " tools
show (UnknownCompilerVersion oskey wanted known) = concat
[ "No information found for "
, compilerVersionString wanted
, ".\nSupported versions for OS key '" ++ T.unpack oskey ++ "': "
, intercalate ", " (map show known)
]
show (UnknownOSKey oskey) =
"Unable to find installation URLs for OS key: " ++
T.unpack oskey
show (GHCSanityCheckCompileFailed e ghc) = concat
[ "The GHC located at "
, toFilePath ghc
, " failed to compile a sanity check. Please see:\n\n"
, " https://github.com/commercialhaskell/stack/blob/release/doc/install_and_upgrade.md\n\n"
, "for more information. Exception was:\n"
, show e
]
show WantedMustBeGHC =
"The wanted compiler must be GHC"
show RequireCustomGHCVariant =
"A custom --ghc-variant must be specified to use --ghc-bindist"
show (ProblemWhileDecompressing archive) =
"Problem while decompressing " ++ toFilePath archive
show SetupInfoMissingSevenz =
"SetupInfo missing Sevenz EXE/DLL"
show GHCJSRequiresStandardVariant =
"stack does not yet support using --ghc-variant with GHCJS"
show GHCJSNotBooted =
"GHCJS does not yet have its boot packages installed. Use \"stack setup\" to attempt to run ghcjs-boot."
show (DockerStackExeNotFound stackVersion osKey) = concat
[ stackProgName
, "-"
, versionString stackVersion
, " executable not found for "
, T.unpack osKey
, "\nUse the '"
, T.unpack dockerStackExeArgName
, "' option to specify a location"]
-- | Modify the environment variables (like PATH) appropriately, possibly doing installation too
setupEnv :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasBuildConfig env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, HasGHCVariant env, MonadBaseControl IO m)
=> Maybe Text -- ^ Message to give user when necessary GHC is not available
-> m EnvConfig
setupEnv mResolveMissingGHC = do
bconfig <- asks getBuildConfig
let platform = getPlatform bconfig
wc = whichCompiler (bcWantedCompiler bconfig)
sopts = SetupOpts
{ soptsInstallIfMissing = configInstallGHC $ bcConfig bconfig
, soptsUseSystem = configSystemGHC $ bcConfig bconfig
, soptsWantedCompiler = bcWantedCompiler bconfig
, soptsCompilerCheck = configCompilerCheck $ bcConfig bconfig
, soptsStackYaml = Just $ bcStackYaml bconfig
, soptsForceReinstall = False
, soptsSanityCheck = False
, soptsSkipGhcCheck = configSkipGHCCheck $ bcConfig bconfig
, soptsSkipMsys = configSkipMsys $ bcConfig bconfig
, soptsUpgradeCabal = False
, soptsResolveMissingGHC = mResolveMissingGHC
, soptsStackSetupYaml = defaultStackSetupYaml
, soptsGHCBindistURL = Nothing
}
mghcBin <- ensureCompiler sopts
-- Modify the initial environment to include the GHC path, if a local GHC
-- is being used
menv0 <- getMinimalEnvOverride
let env = removeHaskellEnvVars
$ augmentPathMap (maybe [] edBins mghcBin)
$ unEnvOverride menv0
menv <- mkEnvOverride platform env
compilerVer <- getCompilerVersion menv wc
cabalVer <- getCabalPkgVer menv wc
packages <- mapM
(resolvePackageEntry menv (bcRoot bconfig))
(bcPackageEntries bconfig)
let envConfig0 = EnvConfig
{ envConfigBuildConfig = bconfig
, envConfigCabalVersion = cabalVer
, envConfigCompilerVersion = compilerVer
, envConfigPackages = Map.fromList $ concat packages
}
-- extra installation bin directories
mkDirs <- runReaderT extraBinDirs envConfig0
let mpath = Map.lookup "PATH" env
mkDirs' = map toFilePath . mkDirs
depsPath = augmentPath (mkDirs' False) mpath
localsPath = augmentPath (mkDirs' True) mpath
deps <- runReaderT packageDatabaseDeps envConfig0
createDatabase menv wc deps
localdb <- runReaderT packageDatabaseLocal envConfig0
createDatabase menv wc localdb
globaldb <- getGlobalDB menv wc
let mkGPP locals = mkGhcPackagePath locals localdb deps globaldb
distDir <- runReaderT distRelativeDir envConfig0
executablePath <- liftIO getExecutablePath
utf8EnvVars <- getUtf8LocaleVars menv
envRef <- liftIO $ newIORef Map.empty
let getEnvOverride' es = do
m <- readIORef envRef
case Map.lookup es m of
Just eo -> return eo
Nothing -> do
eo <- mkEnvOverride platform
$ Map.insert "PATH" (if esIncludeLocals es then localsPath else depsPath)
$ (if esIncludeGhcPackagePath es
then Map.insert
(case wc of { Ghc -> "GHC_PACKAGE_PATH"; Ghcjs -> "GHCJS_PACKAGE_PATH" })
(mkGPP (esIncludeLocals es))
else id)
$ (if esStackExe es
then Map.insert "STACK_EXE" (T.pack executablePath)
else id)
$ (if esLocaleUtf8 es
then Map.union utf8EnvVars
else id)
-- For reasoning and duplication, see: https://github.com/fpco/stack/issues/70
$ Map.insert "HASKELL_PACKAGE_SANDBOX" (T.pack $ toFilePathNoTrailingSep deps)
$ Map.insert "HASKELL_PACKAGE_SANDBOXES"
(T.pack $ if esIncludeLocals es
then intercalate [searchPathSeparator]
[ toFilePathNoTrailingSep localdb
, toFilePathNoTrailingSep deps
, ""
]
else intercalate [searchPathSeparator]
[ toFilePathNoTrailingSep deps
, ""
])
$ Map.insert "HASKELL_DIST_DIR" (T.pack $ toFilePathNoTrailingSep distDir)
$ env
!() <- atomicModifyIORef envRef $ \m' ->
(Map.insert es eo m', ())
return eo
return EnvConfig
{ envConfigBuildConfig = bconfig
{ bcConfig = maybe id addIncludeLib mghcBin
(bcConfig bconfig)
{ configEnvOverride = getEnvOverride' }
}
, envConfigCabalVersion = cabalVer
, envConfigCompilerVersion = compilerVer
, envConfigPackages = envConfigPackages envConfig0
}
-- | Add the include and lib paths to the given Config
addIncludeLib :: ExtraDirs -> Config -> Config
addIncludeLib (ExtraDirs _bins includes libs) config = config
{ configExtraIncludeDirs = Set.union
(configExtraIncludeDirs config)
(Set.fromList $ map T.pack includes)
, configExtraLibDirs = Set.union
(configExtraLibDirs config)
(Set.fromList $ map T.pack libs)
}
-- | Ensure compiler (ghc or ghcjs) is installed and provide the PATHs to add if necessary
ensureCompiler :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, HasGHCVariant env, MonadBaseControl IO m)
=> SetupOpts
-> m (Maybe ExtraDirs)
ensureCompiler sopts = do
let wc = whichCompiler (soptsWantedCompiler sopts)
when (getGhcVersion (soptsWantedCompiler sopts) < $(mkVersion "7.8")) $ do
$logWarn "stack will almost certainly fail with GHC below version 7.8"
$logWarn "Valiantly attempting to run anyway, but I know this is doomed"
$logWarn "For more information, see: https://github.com/commercialhaskell/stack/issues/648"
$logWarn ""
-- Check the available GHCs
menv0 <- getMinimalEnvOverride
msystem <-
if soptsUseSystem sopts
then getSystemCompiler menv0 wc
else return Nothing
Platform expectedArch _ <- asks getPlatform
let needLocal = case msystem of
Nothing -> True
Just _ | soptsSkipGhcCheck sopts -> False
Just (system, arch) ->
not (isWanted system) ||
arch /= expectedArch
isWanted = isWantedCompiler (soptsCompilerCheck sopts) (soptsWantedCompiler sopts)
-- If we need to install a GHC or MSYS, try to do so
-- Return the additional directory paths of GHC & MSYS.
mtools <- if needLocal
then do
getSetupInfo' <- runOnce (getSetupInfo (soptsStackSetupYaml sopts) =<< asks getHttpManager)
localPrograms <- asks $ configLocalPrograms . getConfig
installed <- listInstalled localPrograms
-- Install GHC
ghcVariant <- asks getGHCVariant
config <- asks getConfig
ghcPkgName <- parsePackageNameFromString ("ghc" ++ ghcVariantSuffix ghcVariant)
let installedCompiler =
case wc of
Ghc -> getInstalledTool installed ghcPkgName (isWanted . GhcVersion)
Ghcjs -> getInstalledGhcjs installed isWanted
compilerTool <- case installedCompiler of
Just tool -> return tool
Nothing
| soptsInstallIfMissing sopts -> do
si <- getSetupInfo'
downloadAndInstallCompiler
si
(soptsWantedCompiler sopts)
(soptsCompilerCheck sopts)
(soptsGHCBindistURL sopts)
| otherwise ->
throwM $ CompilerVersionMismatch
msystem
(soptsWantedCompiler sopts, expectedArch)
ghcVariant
(soptsCompilerCheck sopts)
(soptsStackYaml sopts)
(fromMaybe
("Try running \"stack setup\" to install the correct GHC into "
<> T.pack (toFilePath (configLocalPrograms config)))
$ soptsResolveMissingGHC sopts)
-- Install msys2 on windows, if necessary
platform <- asks getPlatform
mmsys2Tool <- case platform of
Platform _ Cabal.Windows | not (soptsSkipMsys sopts) ->
case getInstalledTool installed $(mkPackageName "msys2") (const True) of
Just tool -> return (Just tool)
Nothing
| soptsInstallIfMissing sopts -> do
si <- getSetupInfo'
osKey <- getOSKey platform
VersionedDownloadInfo version info <-
case Map.lookup osKey $ siMsys2 si of
Just x -> return x
Nothing -> error $ "MSYS2 not found for " ++ T.unpack osKey
let tool = Tool (PackageIdentifier $(mkPackageName "msys2") version)
Just <$> downloadAndInstallTool (configLocalPrograms config) si info tool (installMsys2Windows osKey)
| otherwise -> do
$logWarn "Continuing despite missing tool: msys2"
return Nothing
_ -> return Nothing
return $ Just (compilerTool, mmsys2Tool)
else return Nothing
mpaths <- case mtools of
Nothing -> return Nothing
Just (compilerTool, mmsys2Tool) -> do
-- Add GHC's and MSYS's paths to the config.
let idents = catMaybes [Just compilerTool, mmsys2Tool]
paths <- mapM extraDirs idents
return $ Just $ mconcat paths
menv <-
case mpaths of
Nothing -> return menv0
Just ed -> do
config <- asks getConfig
let m = augmentPathMap (edBins ed) (unEnvOverride menv0)
mkEnvOverride (configPlatform config) (removeHaskellEnvVars m)
when (soptsUpgradeCabal sopts) $ do
unless needLocal $ do
$logWarn "Trying to upgrade Cabal library on a GHC not installed by stack."
$logWarn "This may fail, caveat emptor!"
upgradeCabal menv wc
case mtools of
Just (ToolGhcjs cv, _) -> ensureGhcjsBooted menv cv (soptsInstallIfMissing sopts)
_ -> return ()
when (soptsSanityCheck sopts) $ sanityCheck menv wc
return mpaths
-- Ensure Docker container-compatible 'stack' executable is downloaded
ensureDockerStackExe
:: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Platform -> m (Path Abs File)
ensureDockerStackExe containerPlatform = do
config <- asks getConfig
containerPlatformDir <- runReaderT platformOnlyRelDir containerPlatform
let programsPath = configLocalProgramsBase config </> containerPlatformDir
stackVersion = fromCabalVersion Meta.version
tool = Tool (PackageIdentifier $(mkPackageName "stack") stackVersion)
stackExePath <- (</> $(mkRelFile "stack")) <$> installDir programsPath tool
stackExeExists <- fileExists stackExePath
unless stackExeExists $
do
$logInfo $ mconcat ["Downloading Docker-compatible ", T.pack stackProgName, " executable"]
si <- getSetupInfo defaultStackSetupYaml =<< asks getHttpManager
osKey <- getOSKey containerPlatform
info <-
case Map.lookup osKey (siStack si) of
Just versions ->
case Map.lookup stackVersion versions of
Just x -> return x
Nothing -> throwM (DockerStackExeNotFound stackVersion osKey)
Nothing -> throwM (DockerStackExeNotFound stackVersion osKey)
_ <-
downloadAndInstallTool
programsPath
si
info
tool
installDockerStackExe
return ()
return stackExePath
-- | Install the newest version of Cabal globally
upgradeCabal :: (MonadIO m, MonadLogger m, MonadReader env m, HasHttpManager env, HasConfig env, MonadBaseControl IO m, MonadMask m)
=> EnvOverride
-> WhichCompiler
-> m ()
upgradeCabal menv wc = do
let name = $(mkPackageName "Cabal")
rmap <- resolvePackages menv Set.empty (Set.singleton name)
newest <-
case Map.keys rmap of
[] -> error "No Cabal library found in index, cannot upgrade"
[PackageIdentifier name' version]
| name == name' -> return version
x -> error $ "Unexpected results for resolvePackages: " ++ show x
installed <- getCabalPkgVer menv wc
if installed >= newest
then $logInfo $ T.concat
[ "Currently installed Cabal is "
, T.pack $ versionString installed
, ", newest is "
, T.pack $ versionString newest
, ". I'm not upgrading Cabal."
]
else withCanonicalizedSystemTempDirectory "stack-cabal-upgrade" $ \tmpdir -> do
$logInfo $ T.concat
[ "Installing Cabal-"
, T.pack $ versionString newest
, " to replace "
, T.pack $ versionString installed
]
let ident = PackageIdentifier name newest
m <- unpackPackageIdents menv tmpdir Nothing (Set.singleton ident)
compilerPath <- join $ findExecutable menv (compilerExeName wc)
newestDir <- parseRelDir $ versionString newest
let installRoot = toFilePath $ parent (parent compilerPath)
</> $(mkRelDir "new-cabal")
</> newestDir
dir <-
case Map.lookup ident m of
Nothing -> error "upgradeCabal: Invariant violated, dir missing"
Just dir -> return dir
runIn dir (compilerExeName wc) menv ["Setup.hs"] Nothing
platform <- asks getPlatform
let setupExe = toFilePath $ dir </>
(case platform of
Platform _ Cabal.Windows -> $(mkRelFile "Setup.exe")
_ -> $(mkRelFile "Setup"))
dirArgument name' = concat
[ "--"
, name'
, "dir="
, installRoot FP.</> name'
]
runIn dir setupExe menv
( "configure"
: map dirArgument (words "lib bin data doc")
)
Nothing
runIn dir setupExe menv ["build"] Nothing
runIn dir setupExe menv ["install"] Nothing
$logInfo "New Cabal library installed"
-- | Get the version of the system compiler, if available
getSystemCompiler :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m) => EnvOverride -> WhichCompiler -> m (Maybe (CompilerVersion, Arch))
getSystemCompiler menv wc = do
let exeName = case wc of
Ghc -> "ghc"
Ghcjs -> "ghcjs"
exists <- doesExecutableExist menv exeName
if exists
then do
eres <- tryProcessStdout Nothing menv exeName ["--info"]
let minfo = do
Right bs <- Just eres
pairs <- readMay $ S8.unpack bs :: Maybe [(String, String)]
version <- lookup "Project version" pairs >>= parseVersionFromString
arch <- lookup "Target platform" pairs >>= simpleParse . takeWhile (/= '-')
return (version, arch)
case (wc, minfo) of
(Ghc, Just (version, arch)) -> return (Just (GhcVersion version, arch))
(Ghcjs, Just (_, arch)) -> do
eversion <- tryAny $ getCompilerVersion menv Ghcjs
case eversion of
Left _ -> return Nothing
Right version -> return (Just (version, arch))
(_, Nothing) -> return Nothing
else return Nothing
-- | Download the most recent SetupInfo
getSetupInfo
:: (MonadIO m, MonadThrow m, MonadLogger m, MonadReader env m, HasConfig env)
=> String -> Manager -> m SetupInfo
getSetupInfo stackSetupYaml manager = do
config <- asks getConfig
setupInfos <-
mapM
loadSetupInfo
(SetupInfoFileOrURL stackSetupYaml :
configSetupInfoLocations config)
return (mconcat setupInfos)
where
loadSetupInfo (SetupInfoInline si) = return si
loadSetupInfo (SetupInfoFileOrURL urlOrFile) = do
bs <-
case parseUrl urlOrFile of
Just req -> do
bss <-
liftIO $
flip runReaderT manager $
withResponse req $
\res ->
responseBody res $$ CL.consume
return $ S8.concat bss
Nothing -> liftIO $ S.readFile urlOrFile
(si,warnings) <- either throwM return (Yaml.decodeEither' bs)
when (urlOrFile /= defaultStackSetupYaml) $
logJSONWarnings urlOrFile warnings
return si
getInstalledTool :: [Tool] -- ^ already installed
-> PackageName -- ^ package to find
-> (Version -> Bool) -- ^ which versions are acceptable
-> Maybe Tool
getInstalledTool installed name goodVersion =
if null available
then Nothing
else Just $ Tool $ maximumBy (comparing packageIdentifierVersion) available
where
available = mapMaybe goodPackage installed
goodPackage (Tool pi') =
if packageIdentifierName pi' == name &&
goodVersion (packageIdentifierVersion pi')
then Just pi'
else Nothing
goodPackage _ = Nothing
getInstalledGhcjs :: [Tool]
-> (CompilerVersion -> Bool)
-> Maybe Tool
getInstalledGhcjs installed goodVersion =
if null available
then Nothing
else Just $ ToolGhcjs $ maximum available
where
available = mapMaybe goodPackage installed
goodPackage (ToolGhcjs cv) = if goodVersion cv then Just cv else Nothing
goodPackage _ = Nothing
downloadAndInstallTool :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Path Abs Dir
-> SetupInfo
-> DownloadInfo
-> Tool
-> (SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> m ())
-> m Tool
downloadAndInstallTool programsDir si downloadInfo tool installer = do
(file, at) <- downloadFromInfo programsDir downloadInfo tool
dir <- installDir programsDir tool
unmarkInstalled programsDir tool
installer si file at dir
markInstalled programsDir tool
return tool
downloadAndInstallCompiler :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasGHCVariant env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, MonadBaseControl IO m)
=> SetupInfo
-> CompilerVersion
-> VersionCheck
-> Maybe String
-> m Tool
downloadAndInstallCompiler si wanted@(GhcVersion{}) versionCheck mbindistURL = do
ghcVariant <- asks getGHCVariant
(selectedVersion, downloadInfo) <- case mbindistURL of
Just bindistURL -> do
case ghcVariant of
GHCCustom _ -> return ()
_ -> throwM RequireCustomGHCVariant
case wanted of
GhcVersion version ->
return (version, DownloadInfo (T.pack bindistURL) Nothing Nothing)
_ ->
throwM WantedMustBeGHC
_ -> do
ghcKey <- getGhcKey
case Map.lookup ghcKey $ siGHCs si of
Nothing -> throwM $ UnknownOSKey ghcKey
Just pairs -> getWantedCompilerInfo ghcKey versionCheck wanted GhcVersion pairs
config <- asks getConfig
let installer =
case configPlatform config of
Platform _ Cabal.Windows -> installGHCWindows selectedVersion
_ -> installGHCPosix selectedVersion
$logInfo $
"Preparing to install GHC" <>
(case ghcVariant of
GHCStandard -> ""
v -> " (" <> T.pack (ghcVariantName v) <> ")") <>
" to an isolated location."
$logInfo "This will not interfere with any system-level installation."
ghcPkgName <- parsePackageNameFromString ("ghc" ++ ghcVariantSuffix ghcVariant)
let tool = Tool $ PackageIdentifier ghcPkgName selectedVersion
downloadAndInstallTool (configLocalPrograms config) si downloadInfo tool installer
downloadAndInstallCompiler si wanted versionCheck _mbindistUrl = do
config <- asks getConfig
ghcVariant <- asks getGHCVariant
case ghcVariant of
GHCStandard -> return ()
_ -> throwM GHCJSRequiresStandardVariant
(selectedVersion, downloadInfo) <- case Map.lookup "source" $ siGHCJSs si of
Nothing -> throwM $ UnknownOSKey "source"
Just pairs -> getWantedCompilerInfo "source" versionCheck wanted id pairs
$logInfo "Preparing to install GHCJS to an isolated location."
$logInfo "This will not interfere with any system-level installation."
let tool = ToolGhcjs selectedVersion
installer = installGHCJS $ case selectedVersion of
GhcjsVersion version _ -> version
_ -> error "Invariant violated: expected ghcjs version in downloadAndInstallCompiler."
downloadAndInstallTool (configLocalPrograms config) si downloadInfo tool installer
getWantedCompilerInfo :: (Ord k, MonadThrow m)
=> Text
-> VersionCheck
-> CompilerVersion
-> (k -> CompilerVersion)
-> Map k a
-> m (k, a)
getWantedCompilerInfo key versionCheck wanted toCV pairs =
case mpair of
Just pair -> return pair
Nothing -> throwM $ UnknownCompilerVersion key wanted (map toCV (Map.keys pairs))
where
mpair =
listToMaybe $
sortBy (flip (comparing fst)) $
filter (isWantedCompiler versionCheck wanted . toCV . fst) (Map.toList pairs)
getGhcKey :: (MonadReader env m, MonadThrow m, HasPlatform env, HasGHCVariant env, MonadLogger m, MonadIO m, MonadCatch m, MonadBaseControl IO m)
=> m Text
getGhcKey = do
ghcVariant <- asks getGHCVariant
platform <- asks getPlatform
osKey <- getOSKey platform
return $ osKey <> T.pack (ghcVariantSuffix ghcVariant)
getOSKey :: (MonadReader env m, MonadThrow m, HasPlatform env, MonadLogger m, MonadIO m, MonadCatch m, MonadBaseControl IO m)
=> Platform -> m Text
getOSKey platform =
case platform of
Platform I386 Cabal.Linux -> return "linux32"
Platform X86_64 Cabal.Linux -> return "linux64"
Platform I386 Cabal.OSX -> return "macosx"
Platform X86_64 Cabal.OSX -> return "macosx"
Platform I386 Cabal.FreeBSD -> return "freebsd32"
Platform X86_64 Cabal.FreeBSD -> return "freebsd64"
Platform I386 Cabal.OpenBSD -> return "openbsd32"
Platform X86_64 Cabal.OpenBSD -> return "openbsd64"
Platform I386 Cabal.Windows -> return "windows32"
Platform X86_64 Cabal.Windows -> return "windows64"
Platform arch os -> throwM $ UnsupportedSetupCombo os arch
downloadFromInfo
:: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Path Abs Dir -> DownloadInfo -> Tool -> m (Path Abs File, ArchiveType)
downloadFromInfo programsDir downloadInfo tool = do
at <-
case extension of
".tar.xz" -> return TarXz
".tar.bz2" -> return TarBz2
".tar.gz" -> return TarGz
".7z.exe" -> return SevenZ
_ -> error $ "Unknown extension for url: " ++ T.unpack url
relfile <- parseRelFile $ toolString tool ++ extension
let path = programsDir </> relfile
chattyDownload (T.pack (toolString tool)) downloadInfo path
return (path, at)
where
url = downloadInfoUrl downloadInfo
extension =
loop $ T.unpack url
where
loop fp
| ext `elem` [".tar", ".bz2", ".xz", ".exe", ".7z", ".gz"] = loop fp' ++ ext
| otherwise = ""
where
(fp', ext) = FP.splitExtension fp
data ArchiveType
= TarBz2
| TarXz
| TarGz
| SevenZ
installGHCPosix :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Version
-> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installGHCPosix version _ archiveFile archiveType destDir = do
platform <- asks getPlatform
menv0 <- getMinimalEnvOverride
menv <- mkEnvOverride platform (removeHaskellEnvVars (unEnvOverride menv0))
$logDebug $ "menv = " <> T.pack (show (unEnvOverride menv))
zipTool' <-
case archiveType of
TarXz -> return "xz"
TarBz2 -> return "bzip2"
TarGz -> return "gzip"
SevenZ -> error "Don't know how to deal with .7z files on non-Windows"
(zipTool, makeTool, tarTool) <- checkDependencies $ (,,)
<$> checkDependency zipTool'
<*> (checkDependency "gmake" <|> checkDependency "make")
<*> checkDependency "tar"
$logDebug $ "ziptool: " <> T.pack zipTool
$logDebug $ "make: " <> T.pack makeTool
$logDebug $ "tar: " <> T.pack tarTool
withCanonicalizedSystemTempDirectory "stack-setup" $ \root -> do
dir <-
liftM (root Path.</>) $
parseRelDir $
"ghc-" ++ versionString version
$logSticky $ T.concat ["Unpacking GHC into ", (T.pack . toFilePath $ root), " ..."]
$logDebug $ "Unpacking " <> T.pack (toFilePath archiveFile)
readInNull root tarTool menv ["xf", toFilePath archiveFile] Nothing
$logSticky "Configuring GHC ..."
readInNull dir (toFilePath $ dir Path.</> $(mkRelFile "configure"))
menv ["--prefix=" ++ toFilePath destDir] Nothing
$logSticky "Installing GHC ..."
readInNull dir makeTool menv ["install"] Nothing
$logStickyDone $ "Installed GHC."
$logDebug $ "GHC installed to " <> T.pack (toFilePath destDir)
installGHCJS :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, MonadBaseControl IO m)
=> Version
-> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installGHCJS version si archiveFile archiveType destDir = do
platform <- asks getPlatform
menv0 <- getMinimalEnvOverride
-- This ensures that locking is disabled for the invocations of
-- stack below.
let removeLockVar = Map.delete "STACK_LOCK"
menv <- mkEnvOverride platform (removeLockVar (removeHaskellEnvVars (unEnvOverride menv0)))
$logDebug $ "menv = " <> T.pack (show (unEnvOverride menv))
-- NOTE: this is a bit of a hack - instead of using a temp
-- directory, leave the unpacked source tarball in the destination
-- directory. This way, the absolute paths in the wrapper scripts
-- will point to executables that exist in
-- src/.stack-work/install/... - see
-- https://github.com/commercialhaskell/stack/issues/1016
--
-- This is also used by 'ensureGhcjsBooted', because it can use the
-- environment of the stack.yaml which came with ghcjs, in order to
-- install cabal-install. This lets us also fix the version of
-- cabal-install used.
let unpackDir = destDir Path.</> $(mkRelDir "src")
tarComponent <- parseRelDir ("ghcjs-" ++ versionString version)
runUnpack <- case platform of
Platform _ Cabal.Windows -> return $
withUnpackedTarball7z "GHCJS" si archiveFile archiveType tarComponent unpackDir
_ -> do
zipTool' <-
case archiveType of
TarXz -> return "xz"
TarBz2 -> return "bzip2"
TarGz -> return "gzip"
SevenZ -> error "Don't know how to deal with .7z files on non-Windows"
(zipTool, tarTool) <- checkDependencies $ (,)
<$> checkDependency zipTool'
<*> checkDependency "tar"
$logDebug $ "ziptool: " <> T.pack zipTool
$logDebug $ "tar: " <> T.pack tarTool
return $ do
removeTreeIfExists unpackDir
readInNull destDir tarTool menv ["xf", toFilePath archiveFile] Nothing
renameDir (destDir Path.</> tarComponent) unpackDir
$logSticky $ T.concat ["Unpacking GHCJS into ", (T.pack . toFilePath $ unpackDir), " ..."]
$logDebug $ "Unpacking " <> T.pack (toFilePath archiveFile)
runUnpack
$logSticky "Setting up GHCJS build environment"
let stackYaml = unpackDir </> $(mkRelFile "stack.yaml")
destBinDir = destDir Path.</> $(mkRelDir "bin")
createTree destBinDir
envConfig <- loadGhcjsEnvConfig stackYaml destBinDir
-- On windows we need to copy options files out of the install dir. Argh!
-- This is done before the build, so that if it fails, things fail
-- earlier.
mwindowsInstallDir <- case platform of
Platform _ Cabal.Windows ->
liftM Just $ runInnerStackT envConfig installationRootLocal
_ -> return Nothing
$logSticky "Installing GHCJS (this will take a long time) ..."
runInnerStackT envConfig $
build (\_ -> return ()) Nothing defaultBuildOpts { boptsInstallExes = True }
-- Copy over *.options files needed on windows.
forM_ mwindowsInstallDir $ \dir -> do
(_, files) <- listDirectory (dir </> $(mkRelDir "bin"))
forM_ (filter ((".options" `isSuffixOf`). toFilePath) files) $ \optionsFile -> do
let dest = destDir </> $(mkRelDir "bin") </> filename optionsFile
removeFileIfExists dest
copyFile optionsFile dest
$logStickyDone "Installed GHCJS."
-- Install the downloaded stack binary distribution
installDockerStackExe
:: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installDockerStackExe _ archiveFile _ destDir = do
(_,tarTool) <-
checkDependencies $
(,) <$> checkDependency "gzip" <*> checkDependency "tar"
menv <- getMinimalEnvOverride
createTree destDir
readInNull
destDir
tarTool
menv
["xf", toFilePath archiveFile, "--strip-components", "1"]
Nothing
ensureGhcjsBooted :: (MonadIO m, MonadBaseControl IO m, MonadLogger m, MonadCatch m, HasConfig env, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, MonadReader env m)
=> EnvOverride -> CompilerVersion -> Bool -> m ()
ensureGhcjsBooted menv cv shouldBoot = do
eres <- try $ sinkProcessStdout Nothing menv "ghcjs" [] (return ())
case eres of
Right () -> return ()
Left (ReadProcessException _ _ _ err) | "no input files" `S.isInfixOf` LBS.toStrict err ->
return ()
Left (ReadProcessException _ _ _ err) | "ghcjs_boot.completed" `S.isInfixOf` LBS.toStrict err ->
if not shouldBoot then throwM GHCJSNotBooted else do
config <- asks getConfig
destDir <- installDir (configLocalPrograms config) (ToolGhcjs cv)
let stackYaml = destDir </> $(mkRelFile "src/stack.yaml")
-- TODO: Remove 'actualStackYaml' and just use
-- 'stackYaml' for a version after 0.1.6. It's for
-- compatibility with the directories setup used for
-- most of the life of the development branch between
-- 0.1.5 and 0.1.6. See
-- https://github.com/commercialhaskell/stack/issues/749#issuecomment-147382783
-- This only affects the case where GHCJS has been
-- installed with an older version and not yet booted.
stackYamlExists <- fileExists stackYaml
actualStackYaml <- if stackYamlExists then return stackYaml
else case cv of
GhcjsVersion version _ ->
liftM ((destDir Path.</> $(mkRelDir "src")) Path.</>) $
parseRelFile $ "ghcjs-" ++ versionString version ++ "/stack.yaml"
_ -> fail "ensureGhcjsBooted invoked on non GhcjsVersion"
actualStackYamlExists <- fileExists actualStackYaml
unless actualStackYamlExists $
fail "Couldn't find GHCJS stack.yaml in old or new location."
bootGhcjs actualStackYaml destDir
Left err -> throwM err
bootGhcjs :: (MonadIO m, MonadBaseControl IO m, MonadLogger m, MonadCatch m, HasHttpManager env, HasTerminal env, HasReExec env, HasLogLevel env, MonadReader env m)
=> Path Abs File -> Path Abs Dir -> m ()
bootGhcjs stackYaml destDir = do
envConfig <- loadGhcjsEnvConfig stackYaml (destDir </> $(mkRelDir "bin"))
menv <- liftIO $ configEnvOverride (getConfig envConfig) defaultEnvSettings
-- Install cabal-install if missing, or if the installed one is old.
mcabal <- getCabalInstallVersion menv
shouldInstallCabal <- case mcabal of
Nothing -> do
$logInfo "No cabal-install binary found for use with GHCJS. Installing a local copy of cabal-install from source."
return True
Just v
| v < $(mkVersion "1.22.4") -> do
$logInfo $
"cabal-install found on PATH is too old to be used for booting GHCJS (version " <>
versionText v <>
"). Installing a local copy of cabal-install from source."
return True
| otherwise -> return False
when shouldInstallCabal $ do
$logSticky "Building cabal-install for use by ghcjs-boot ... "
runInnerStackT envConfig $
build (\_ -> return ())
Nothing
defaultBuildOpts { boptsTargets = ["cabal-install"] }
$logSticky "Booting GHCJS (this will take a long time) ..."
let envSettings = defaultEnvSettings { esIncludeGhcPackagePath = False }
menv' <- liftIO $ configEnvOverride (getConfig envConfig) envSettings
runAndLog Nothing "ghcjs-boot" menv' ["--clean"]
$logStickyDone "GHCJS booted."
-- TODO: something similar is done in Stack.Build.Execute. Create some utilities
-- for this?
runAndLog :: (MonadIO m, MonadBaseControl IO m, MonadLogger m)
=> Maybe (Path Abs Dir) -> String -> EnvOverride -> [String] -> m ()
runAndLog mdir name menv args = liftBaseWith $ \restore -> do
let logLines = CB.lines =$ CL.mapM_ (void . restore . monadLoggerLog $(TH.location >>= liftLoc) "" LevelInfo . toLogStr)
void $ restore $ sinkProcessStderrStdout mdir menv name args logLines logLines
loadGhcjsEnvConfig :: (MonadIO m, HasHttpManager r, MonadReader r m, HasTerminal r, HasReExec r, HasLogLevel r)
=> Path Abs File -> Path b t -> m EnvConfig
loadGhcjsEnvConfig stackYaml binPath = runInnerStackLoggingT $ do
lc <- loadConfig
(mempty
{ configMonoidInstallGHC = Just True
, configMonoidLocalBinPath = Just (toFilePath binPath)
})
(Just stackYaml)
bconfig <- lcLoadBuildConfig lc Nothing Nothing
runInnerStackT bconfig $ setupEnv Nothing
getCabalInstallVersion :: (MonadIO m, MonadBaseControl IO m, MonadLogger m, MonadCatch m)
=> EnvOverride -> m (Maybe Version)
getCabalInstallVersion menv = do
ebs <- tryProcessStdout Nothing menv "cabal" ["--numeric-version"]
case ebs of
Left _ -> return Nothing
Right bs -> Just <$> parseVersion (T.encodeUtf8 (T.dropWhileEnd isSpace (T.decodeUtf8 bs)))
-- | Check if given processes appear to be present, throwing an exception if
-- missing.
checkDependencies :: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env)
=> CheckDependency a -> m a
checkDependencies (CheckDependency f) = do
menv <- getMinimalEnvOverride
liftIO (f menv) >>= either (throwM . MissingDependencies) return
checkDependency :: String -> CheckDependency String
checkDependency tool = CheckDependency $ \menv -> do
exists <- doesExecutableExist menv tool
return $ if exists then Right tool else Left [tool]
newtype CheckDependency a = CheckDependency (EnvOverride -> IO (Either [String] a))
deriving Functor
instance Applicative CheckDependency where
pure x = CheckDependency $ \_ -> return (Right x)
CheckDependency f <*> CheckDependency x = CheckDependency $ \menv -> do
f' <- f menv
x' <- x menv
return $
case (f', x') of
(Left e1, Left e2) -> Left $ e1 ++ e2
(Left e, Right _) -> Left e
(Right _, Left e) -> Left e
(Right f'', Right x'') -> Right $ f'' x''
instance Alternative CheckDependency where
empty = CheckDependency $ \_ -> return $ Left []
CheckDependency x <|> CheckDependency y = CheckDependency $ \menv -> do
res1 <- x menv
case res1 of
Left _ -> y menv
Right x' -> return $ Right x'
installGHCWindows :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Version
-> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installGHCWindows version si archiveFile archiveType destDir = do
tarComponent <- parseRelDir $ "ghc-" ++ versionString version
withUnpackedTarball7z "GHC" si archiveFile archiveType tarComponent destDir
$logInfo $ "GHC installed to " <> T.pack (toFilePath destDir)
installMsys2Windows :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> Text -- ^ OS Key
-> SetupInfo
-> Path Abs File
-> ArchiveType
-> Path Abs Dir
-> m ()
installMsys2Windows osKey si archiveFile archiveType destDir = do
exists <- liftIO $ D.doesDirectoryExist $ toFilePath destDir
when exists $ liftIO (D.removeDirectoryRecursive $ toFilePath destDir) `catchIO` \e -> do
$logError $ T.pack $
"Could not delete existing msys directory: " ++
toFilePath destDir
throwM e
msys <- parseRelDir $ "msys" ++ T.unpack (fromMaybe "32" $ T.stripPrefix "windows" osKey)
withUnpackedTarball7z "MSYS2" si archiveFile archiveType msys destDir
platform <- asks getPlatform
menv0 <- getMinimalEnvOverride
let oldEnv = unEnvOverride menv0
newEnv = augmentPathMap
[toFilePath $ destDir </> $(mkRelDir "usr") </> $(mkRelDir "bin")]
oldEnv
menv <- mkEnvOverride platform newEnv
-- I couldn't find this officially documented anywhere, but you need to run
-- the shell once in order to initialize some pacman stuff. Once that run
-- happens, you can just run commands as usual.
runIn destDir "sh" menv ["--login", "-c", "true"] Nothing
-- No longer installing git, it's unreliable
-- (https://github.com/commercialhaskell/stack/issues/1046) and the
-- MSYS2-installed version has bad CRLF defaults.
--
-- Install git. We could install other useful things in the future too.
-- runIn destDir "pacman" menv ["-Sy", "--noconfirm", "git"] Nothing
-- | Unpack a compressed tarball using 7zip. Expects a single directory in
-- the unpacked results, which is renamed to the destination directory.
withUnpackedTarball7z :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m)
=> String -- ^ Name of tool, used in error messages
-> SetupInfo
-> Path Abs File -- ^ Path to archive file
-> ArchiveType
-> Path Rel Dir -- ^ Name of directory expected to be in archive.
-> Path Abs Dir -- ^ Destination directory.
-> m ()
withUnpackedTarball7z name si archiveFile archiveType srcDir destDir = do
suffix <-
case archiveType of
TarXz -> return ".xz"
TarBz2 -> return ".bz2"
TarGz -> return ".gz"
_ -> error $ name ++ " must be a tarball file"
tarFile <-
case T.stripSuffix suffix $ T.pack $ toFilePath archiveFile of
Nothing -> error $ "Invalid " ++ name ++ " filename: " ++ show archiveFile
Just x -> parseAbsFile $ T.unpack x
run7z <- setup7z si
let tmpName = (toFilePathNoTrailingSep $ dirname destDir) ++ "-tmp"
createTree (parent destDir)
withCanonicalizedTempDirectory (toFilePath $ parent destDir) tmpName $ \tmpDir -> do
let absSrcDir = tmpDir </> srcDir
removeTreeIfExists destDir
run7z (parent archiveFile) archiveFile
run7z tmpDir tarFile
removeFile tarFile `catchIO` \e ->
$logWarn (T.concat
[ "Exception when removing "
, T.pack $ toFilePath tarFile
, ": "
, T.pack $ show e
])
renameDir absSrcDir destDir
-- | Download 7z as necessary, and get a function for unpacking things.
--
-- Returned function takes an unpack directory and archive.
setup7z :: (MonadReader env m, HasHttpManager env, HasConfig env, MonadThrow m, MonadIO m, MonadIO n, MonadLogger m, MonadBaseControl IO m)
=> SetupInfo
-> m (Path Abs Dir -> Path Abs File -> n ())
setup7z si = do
dir <- asks $ configLocalPrograms . getConfig
let exe = dir </> $(mkRelFile "7z.exe")
dll = dir </> $(mkRelFile "7z.dll")
case (siSevenzDll si, siSevenzExe si) of
(Just sevenzDll, Just sevenzExe) -> do
chattyDownload "7z.dll" sevenzDll dll
chattyDownload "7z.exe" sevenzExe exe
return $ \outdir archive -> liftIO $ do
ec <- rawSystem (toFilePath exe)
[ "x"
, "-o" ++ toFilePath outdir
, "-y"
, toFilePath archive
]
when (ec /= ExitSuccess)
$ throwM (ProblemWhileDecompressing archive)
_ -> throwM SetupInfoMissingSevenz
chattyDownload :: (MonadReader env m, HasHttpManager env, MonadIO m, MonadLogger m, MonadThrow m, MonadBaseControl IO m)
=> Text -- ^ label
-> DownloadInfo -- ^ URL, content-length, and sha1
-> Path Abs File -- ^ destination
-> m ()
chattyDownload label downloadInfo path = do
let url = downloadInfoUrl downloadInfo
req <- parseUrl $ T.unpack url
$logSticky $ T.concat
[ "Preparing to download "
, label
, " ..."
]
$logDebug $ T.concat
[ "Downloading from "
, url
, " to "
, T.pack $ toFilePath path
, " ..."
]
hashChecks <- case downloadInfoSha1 downloadInfo of
Just sha1ByteString -> do
let sha1 = CheckHexDigestByteString sha1ByteString
$logDebug $ T.concat
[ "Will check against sha1 hash: "
, T.decodeUtf8With T.lenientDecode sha1ByteString
]
return [HashCheck SHA1 sha1]
Nothing -> do
$logWarn $ T.concat
[ "No sha1 found in metadata,"
, " download hash won't be checked."
]
return []
let dReq = DownloadRequest
{ drRequest = req
, drHashChecks = hashChecks
, drLengthCheck = mtotalSize
, drRetryPolicy = drRetryPolicyDefault
}
runInBase <- liftBaseWith $ \run -> return (void . run)
x <- verifiedDownload dReq path (chattyDownloadProgress runInBase)
if x
then $logStickyDone ("Downloaded " <> label <> ".")
else $logStickyDone "Already downloaded."
where
mtotalSize = downloadInfoContentLength downloadInfo
chattyDownloadProgress runInBase _ = do
_ <- liftIO $ runInBase $ $logSticky $
label <> ": download has begun"
CL.map (Sum . S.length)
=$ chunksOverTime 1
=$ go
where
go = evalStateC 0 $ awaitForever $ \(Sum size) -> do
modify (+ size)
totalSoFar <- get
liftIO $ runInBase $ $logSticky $ T.pack $
case mtotalSize of
Nothing -> chattyProgressNoTotal totalSoFar
Just 0 -> chattyProgressNoTotal totalSoFar
Just totalSize -> chattyProgressWithTotal totalSoFar totalSize
-- Example: ghc: 42.13 KiB downloaded...
chattyProgressNoTotal totalSoFar =
printf ("%s: " <> bytesfmt "%7.2f" totalSoFar <> " downloaded...")
(T.unpack label)
-- Example: ghc: 50.00 MiB / 100.00 MiB (50.00%) downloaded...
chattyProgressWithTotal totalSoFar total =
printf ("%s: " <>
bytesfmt "%7.2f" totalSoFar <> " / " <>
bytesfmt "%.2f" total <>
" (%6.2f%%) downloaded...")
(T.unpack label)
percentage
where percentage :: Double
percentage = fromIntegral totalSoFar / fromIntegral total * 100
-- | Given a printf format string for the decimal part and a number of
-- bytes, formats the bytes using an appropiate unit and returns the
-- formatted string.
--
-- >>> bytesfmt "%.2" 512368
-- "500.359375 KiB"
bytesfmt :: Integral a => String -> a -> String
bytesfmt formatter bs = printf (formatter <> " %s")
(fromIntegral (signum bs) * dec :: Double)
(bytesSuffixes !! i)
where
(dec,i) = getSuffix (abs bs)
getSuffix n = until p (\(x,y) -> (x / 1024, y+1)) (fromIntegral n,0)
where p (n',numDivs) = n' < 1024 || numDivs == (length bytesSuffixes - 1)
bytesSuffixes :: [String]
bytesSuffixes = ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB","YiB"]
-- Await eagerly (collect with monoidal append),
-- but space out yields by at least the given amount of time.
-- The final yield may come sooner, and may be a superfluous mempty.
-- Note that Integer and Float literals can be turned into NominalDiffTime
-- (these literals are interpreted as "seconds")
chunksOverTime :: (Monoid a, MonadIO m) => NominalDiffTime -> Conduit a m a
chunksOverTime diff = do
currentTime <- liftIO getCurrentTime
evalStateC (currentTime, mempty) go
where
-- State is a tuple of:
-- * the last time a yield happened (or the beginning of the sink)
-- * the accumulated awaits since the last yield
go = await >>= \case
Nothing -> do
(_, acc) <- get
yield acc
Just a -> do
(lastTime, acc) <- get
let acc' = acc <> a
currentTime <- liftIO getCurrentTime
if diff < diffUTCTime currentTime lastTime
then put (currentTime, mempty) >> yield acc'
else put (lastTime, acc')
go
-- | Perform a basic sanity check of GHC
sanityCheck :: (MonadIO m, MonadMask m, MonadLogger m, MonadBaseControl IO m)
=> EnvOverride
-> WhichCompiler
-> m ()
sanityCheck menv wc = withCanonicalizedSystemTempDirectory "stack-sanity-check" $ \dir -> do
let fp = toFilePath $ dir </> $(mkRelFile "Main.hs")
liftIO $ writeFile fp $ unlines
[ "import Distribution.Simple" -- ensure Cabal library is present
, "main = putStrLn \"Hello World\""
]
let exeName = compilerExeName wc
ghc <- join $ findExecutable menv exeName
$logDebug $ "Performing a sanity check on: " <> T.pack (toFilePath ghc)
eres <- tryProcessStdout (Just dir) menv exeName
[ fp
, "-no-user-package-db"
]
case eres of
Left e -> throwM $ GHCSanityCheckCompileFailed e ghc
Right _ -> return () -- TODO check that the output of running the command is correct
-- Remove potentially confusing environment variables
removeHaskellEnvVars :: Map Text Text -> Map Text Text
removeHaskellEnvVars =
Map.delete "GHCJS_PACKAGE_PATH" .
Map.delete "GHC_PACKAGE_PATH" .
Map.delete "HASKELL_PACKAGE_SANDBOX" .
Map.delete "HASKELL_PACKAGE_SANDBOXES" .
Map.delete "HASKELL_DIST_DIR"
-- | Get map of environment variables to set to change the locale's encoding to UTF-8
getUtf8LocaleVars
:: forall m env.
(MonadReader env m, HasPlatform env, MonadLogger m, MonadCatch m, MonadBaseControl IO m, MonadIO m)
=> EnvOverride -> m (Map Text Text)
getUtf8LocaleVars menv = do
Platform _ os <- asks getPlatform
if os == Cabal.Windows
then
-- On Windows, locale is controlled by the code page, so we don't set any environment
-- variables.
return
Map.empty
else do
let checkedVars = map checkVar (Map.toList $ eoTextMap menv)
-- List of environment variables that will need to be updated to set UTF-8 (because
-- they currently do not specify UTF-8).
needChangeVars = concatMap fst checkedVars
-- Set of locale-related environment variables that have already have a value.
existingVarNames = Set.unions (map snd checkedVars)
-- True if a locale is already specified by one of the "global" locale variables.
hasAnyExisting =
any (`Set.member` existingVarNames) ["LANG", "LANGUAGE", "LC_ALL"]
if null needChangeVars && hasAnyExisting
then
-- If no variables need changes and at least one "global" variable is set, no
-- changes to environment need to be made.
return
Map.empty
else do
-- Get a list of known locales by running @locale -a@.
elocales <- tryProcessStdout Nothing menv "locale" ["-a"]
let
-- Filter the list to only include locales with UTF-8 encoding.
utf8Locales =
case elocales of
Left _ -> []
Right locales ->
filter
isUtf8Locale
(T.lines $
T.decodeUtf8With
T.lenientDecode
locales)
mfallback = getFallbackLocale utf8Locales
when
(isNothing mfallback)
($logWarn
"Warning: unable to set locale to UTF-8 encoding; GHC may fail with 'invalid character'")
let
-- Get the new values of variables to adjust.
changes =
Map.unions $
map
(adjustedVarValue utf8Locales mfallback)
needChangeVars
-- Get the values of variables to add.
adds
| hasAnyExisting =
-- If we already have a "global" variable, then nothing needs
-- to be added.
Map.empty
| otherwise =
-- If we don't already have a "global" variable, then set LANG to the
-- fallback.
case mfallback of
Nothing -> Map.empty
Just fallback ->
Map.singleton "LANG" fallback
return (Map.union changes adds)
where
-- Determines whether an environment variable is locale-related and, if so, whether it needs to
-- be adjusted.
checkVar
:: (Text, Text) -> ([Text], Set Text)
checkVar (k,v) =
if k `elem` ["LANG", "LANGUAGE"] || "LC_" `T.isPrefixOf` k
then if isUtf8Locale v
then ([], Set.singleton k)
else ([k], Set.singleton k)
else ([], Set.empty)
-- Adjusted value of an existing locale variable. Looks for valid UTF-8 encodings with
-- same language /and/ territory, then with same language, and finally the first UTF-8 locale
-- returned by @locale -a@.
adjustedVarValue
:: [Text] -> Maybe Text -> Text -> Map Text Text
adjustedVarValue utf8Locales mfallback k =
case Map.lookup k (eoTextMap menv) of
Nothing -> Map.empty
Just v ->
case concatMap
(matchingLocales utf8Locales)
[ T.takeWhile (/= '.') v <> "."
, T.takeWhile (/= '_') v <> "_"] of
(v':_) -> Map.singleton k v'
[] ->
case mfallback of
Just fallback -> Map.singleton k fallback
Nothing -> Map.empty
-- Determine the fallback locale, by looking for any UTF-8 locale prefixed with the list in
-- @fallbackPrefixes@, and if not found, picking the first UTF-8 encoding returned by @locale
-- -a@.
getFallbackLocale
:: [Text] -> Maybe Text
getFallbackLocale utf8Locales =
case concatMap (matchingLocales utf8Locales) fallbackPrefixes of
(v:_) -> Just v
[] ->
case utf8Locales of
[] -> Nothing
(v:_) -> Just v
-- Filter the list of locales for any with the given prefixes (case-insitive).
matchingLocales
:: [Text] -> Text -> [Text]
matchingLocales utf8Locales prefix =
filter
(\v ->
(T.toLower prefix) `T.isPrefixOf` T.toLower v)
utf8Locales
-- Does the locale have one of the encodings in @utf8Suffixes@ (case-insensitive)?
isUtf8Locale locale =
any (\ v -> T.toLower v `T.isSuffixOf` T.toLower locale) utf8Suffixes
-- Prefixes of fallback locales (case-insensitive)
fallbackPrefixes = ["C.", "en_US.", "en_"]
-- Suffixes of UTF-8 locales (case-insensitive)
utf8Suffixes = [".UTF-8", ".utf8"]
| supermario/stack | src/Stack/Setup.hs | bsd-3-clause | 64,606 | 0 | 31 | 21,435 | 14,408 | 7,124 | 7,284 | 1,207 | 13 |
module ListUtils where
import Graphics.Vty.Widgets.All
import Control.Applicative ((<$>))
import Control.Monad (liftM, void)
import Data.Maybe (catMaybes, fromMaybe)
import Data.List (find)
-- | List helper
-- Replaces an item in a Vty Widget List at a given position. If the index is
-- `Nothing`, the item will be added at the end of the list. Otherwise it's
-- placed at the index.
replaceAtIndex :: Show b => Widget (List a b) -- ^ the list
-> a -- ^ the backing item in the list
-> Widget b -- ^ the widget representing the item in the list.
-> Maybe Int -- ^ the index or nothing, if adding to the end.
-> IO ()
replaceAtIndex l i w Nothing = addToList l i w
replaceAtIndex l i w (Just idx) = do
-- get the last selected index
lastIdx <- getSelected l
-- insert the new item
insertIntoList l i w idx
-- remove the "old" item (now one positon below)
removeFromList l (succ idx)
-- reselect the last selected item if a selection was given.
case lastIdx of Just (p, _) -> setSelected l p
return ()
-- Given a List Widget, find the index of a given Item.
indexOfItem :: (a -> Bool) -- ^ compare function for the backing items.
-> Widget (List a b) -- ^ the list
-> IO (Maybe Int) -- ^ returns the index or nothing if not found.
indexOfItem cmp lst = do
-- get the listsize ...
len <- getListSize lst
-- ... to get all items.
itms <- mapM (getListItem lst) [0..len-1]
-- index all "just" items, and find the first item that matches with the `cmp`.
-- this is technically not correct, as we are potentially skipping "Nothing"
-- and therfore compute the wrong index.
let mtch = liftM snd $ find (cmp . fst . fst) $ zip (catMaybes itms) [0..]
return mtch
-- | Removes an item (idx, item) from a list
dropItem :: Widget (List a b) -- ^ the list
-> Maybe (Int, b1) -- ^ the (idx, item) pair.
-> IO ()
dropItem l e = fromMaybe (return ()) $ (void . removeFromList l . fst) <$> e
-- | Removes the selected item from a list
dropSelected :: Widget (List a b) -- ^ the list
-> IO ()
dropSelected l = getSelected l >>= dropItem l
| angerman/HInvoice | ListUtils.hs | bsd-3-clause | 2,214 | 0 | 15 | 586 | 512 | 265 | 247 | 33 | 1 |
-- | This module allows for the 'promote' function to be imported
-- correctly from different QuickCheck versions. QuickCheck 2.6 used
-- Test.QuickCheck.Gen.promote; QuickCheck 2.7 uses
-- Test.QuickCheck.Gen.Unsafe.promote. For use with old versions of
-- QuickCheck, Cabal will import a different version of this module
-- from the oldQuickCheck directory.
module Barecheck.Promote (promote) where
import Test.QuickCheck.Gen.Unsafe (promote)
| massysett/barecheck | lib/Barecheck/Promote.hs | bsd-3-clause | 449 | 0 | 5 | 61 | 29 | 21 | 8 | 2 | 0 |
import qualified Network.HTTP as HTTP
import Data.List.Split (splitOn)
import Data.List (isInfixOf, sortBy, reverse, foldl')
import Data.Char (toLower)
import qualified Data.Map as Map
import Options.Applicative
import Data.Semigroup ((<>))
data Args = Args
{ fname :: String
, addr :: String
, numwords :: Int } deriving (Eq, Show)
arg :: Parser Args
arg = Args
<$> strOption
( long "filter"
<> short 'f'
<> metavar "FILENAME"
<> help "File containing filter words" )
<*> strOption
( long "address"
<> short 'a'
<> metavar "ADDR"
<> showDefault
<> value "http://hckrnews.com"
<> help "Address of web-page to analyze" )
<*> option auto
( long "words"
<> help "Number of top words to show"
<> short 'w'
<> metavar "INT"
<> showDefault
<> value 100 )
type WordFrequency = [(String, Int)]
getMainPage :: String -> IO String
getMainPage p = do
resp <- HTTP.simpleHTTP (HTTP.getRequest p)
HTTP.getResponseBody resp
getHTMLBody :: String -> String
getHTMLBody resp = unwords . dropWhile (/= "<body>") $ words resp
splitOnBlocks :: String -> [String]
splitOnBlocks = getLinks . getBlocks
where getBlocks s = concatMap (splitOn "</a>") $ splitOn "<a " s
getLinks = filter (isInfixOf "href=")
getHeadlines :: [String] -> [String]
getHeadlines = map (cleanHL . getHL)
where getHL = takeWhile (/='<') . tail . dropWhile (/='>')
cleanHL = filter (`notElem` "(){}[]<>,./:;%&\\!?")
getWords :: [String] -> [String]
getWords = map (map toLower) . concatMap words
histogram :: [String] -> Map.Map String Int
histogram = foldl' helper Map.empty
where helper acc w = if Map.member w acc
then Map.update (\ n -> Just (n + 1)) w acc
else Map.insert w 1 acc
-- compare arguments are flipped for the descending order
sortHist :: Map.Map String Int -> WordFrequency
sortHist = sortBy (\x1 x2 -> compare (snd x2) (snd x1)) . Map.toList
prettyPrint :: WordFrequency -> Int -> IO ()
prettyPrint wfl num = do
putStrLn $ "Most frequent words (top " ++ show num ++ "):"
mapM_ (\(e, n) -> putStrLn $ e ++ ": " ++ show n) $ take num wfl
filterWords :: [String] -> WordFrequency -> WordFrequency
filterWords fw = filter (\(w,_) -> w `notElem` fw)
main = do
let opts = info (arg <**> helper) ( fullDesc
<> progDesc "Get most frequent words from Hacker News"
<> header "Parse main page of HackerNews and show the most frequent words" )
args <- execParser opts
fd <- readFile (fname args)
mp <- getMainPage (addr args)
let fw = lines fd
h = filterWords fw . sortHist . histogram . getWords . getHeadlines . splitOnBlocks . getHTMLBody $ mp
prettyPrint h (numwords args)
| dgtony/hckrnewsparser | src/main.hs | bsd-3-clause | 2,878 | 0 | 17 | 769 | 957 | 494 | 463 | 72 | 2 |
module Main where
import System.Environment
import qualified Lib
main :: IO ()
main =
getArgs
>>= (\[x, y] -> Lib.buildTree 0 x y)
>>= print
| koulakis/FiletreeAnalyser | app/Main.hs | bsd-3-clause | 163 | 0 | 10 | 47 | 59 | 34 | 25 | 8 | 1 |
-- Utility for adding the Fibon directory for a new benchmark
module Main (main) where
import Control.Monad
import Data.Char as Char
import Data.List
import Distribution.Package
import Distribution.PackageDescription
import Distribution.PackageDescription.Configuration
import Distribution.PackageDescription.Parse
import Distribution.Verbosity
import System.Directory
import System.Exit
import System.FilePath
import System.IO
main :: IO ()
main = do
cf <- findCabalFile
pkg <- parsePackage cf
createDirectoryStructure
writeInstanceFile pkg
findCabalFile :: IO String
findCabalFile = do
cwd <- getCurrentDirectory
files <- getDirectoryContents cwd
let cabalfiles = filter (".cabal" `isSuffixOf`) files
case cabalfiles of
[f] -> return f
[] -> do
putStrLn "Error: no cabal files found in current directory"
exitFailure
_ -> do
putStrLn "Error: multiple cabal files found in current directory"
exitFailure
parsePackage :: FilePath -> IO PackageDescription
parsePackage cabalFile = do
gps <- (readPackageDescription silent) cabalFile
return $ flattenPackageDescription gps
getName :: PackageDescription -> String
getName pkg = upCase $ toS $ (packageName . package) pkg
where
toS (PackageName p) = p
upCase :: String -> String
upCase [] = []
upCase (x:xs) = Char.toUpper x : xs
createDirectoryStructure :: IO ()
createDirectoryStructure = do
safeCreateDir "Fibon"
safeCreateDir $ "Fibon" </> "data"
safeCreateDir $ "Fibon" </> "data" </> "test"
safeCreateDir $ "Fibon" </> "data" </> "test" </> "input"
safeCreateDir $ "Fibon" </> "data" </> "test" </> "output"
safeCreateDir $ "Fibon" </> "data" </> "ref"
safeCreateDir $ "Fibon" </> "data" </> "ref" </> "input"
safeCreateDir $ "Fibon" </> "data" </> "ref" </> "output"
safeCreateDir :: FilePath -> IO ()
safeCreateDir path = do
exists <- doesDirectoryExist path
unless exists (createDirectory path)
writeInstanceFile :: PackageDescription -> IO ()
writeInstanceFile pkg = do
exists <- doesFileExist outFile
when exists (putStrLn "Error: Instance.hs already exists" >> exitFailure)
cwd <- getCurrentDirectory
let bName = getName pkg
gName = getGrpName cwd
eName = getExeName pkg
h <- openFile outFile WriteMode
hPutStrLn h (template gName bName eName)
hClose h
putStrLn $ "Wrote instance file to "++outFile
where
outFile = "Fibon" </> "Instance.hs"
getGrpName :: FilePath -> String
getGrpName path =
case dirs of
(_:_:_) -> upCase $ (head . drop 1 . reverse) dirs
_ -> "Unknown"
where dirs = splitDirectories path
getExeName :: PackageDescription -> String
getExeName pkg =
case executables pkg of
(e:_) -> exeName e
_ -> "Unknown"
template :: String -> String -> String -> String
template grpName bmName exName = unlines [
"{-# OPTIONS_GHC -fno-warn-missing-signatures #-}",
"module "++modName++"(",
" mkInstance",
")",
"where",
"import Fibon.BenchmarkInstance",
"",
"sharedConfig = BenchmarkInstance {",
" flagConfig = FlagConfig {",
" configureFlags = []",
" , buildFlags = []",
" , runFlags = []",
" }",
" , stdinInput = Nothing",
" , output = [(Stdout, Diff "++(show expectedOut)++")]",
" , expectedExit = ExitSuccess",
" , exeName = "++(show exName),
" }",
"flgCfg = flagConfig sharedConfig",
"",
"mkInstance Test = sharedConfig {",
" flagConfig = flgCfg",
" }",
"mkInstance Train = sharedConfig {",
" flagConfig = flgCfg",
" }",
"mkInstance Ref = sharedConfig {",
" flagConfig = flgCfg",
" }"
]
where
modName = "Fibon.Benchmarks."++grpName++"."++bmName++".Fibon.Instance"
expectedOut = exName ++ ".stdout.expected"
| dmpots/fibon | tools/fibon-init/Main.hs | bsd-3-clause | 3,817 | 0 | 13 | 838 | 989 | 501 | 488 | 113 | 3 |
{-# LANGUAGE PatternGuards #-}
module Idris.Elab.Clause where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.DSL
import Idris.Error
import Idris.Delaborate
import Idris.Imports
import Idris.ElabTerm
import Idris.Coverage
import Idris.DataOpts
import Idris.Providers
import Idris.Primitives
import Idris.Inliner
import Idris.PartialEval
import Idris.Transforms
import Idris.DeepSeq
import Idris.Output (iputStrLn, pshow, iWarn, iRenderResult)
import IRTS.Lang
import Idris.Elab.AsPat
import Idris.Elab.Type
import Idris.Elab.Transform
import Idris.Elab.Utils
import Idris.Core.TT
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.Evaluate
import Idris.Core.Execute
import Idris.Core.Typecheck
import Idris.Core.CaseTree
import Idris.Docstrings hiding (Unchecked)
import Util.Pretty hiding ((<$>))
import Prelude hiding (id, (.))
import Control.Category
import Control.Applicative hiding (Const)
import Control.DeepSeq
import Control.Monad
import Control.Monad.State.Strict as State
import Data.List
import Data.Maybe
import Debug.Trace
import qualified Data.Map as Map
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Char(isLetter, toLower)
import Data.List.Split (splitOn)
import Util.Pretty(pretty, text)
import Numeric
-- | Elaborate a collection of left-hand and right-hand pairs - that is, a
-- top-level definition.
elabClauses :: ElabInfo -> FC -> FnOpts -> Name -> [PClause] -> Idris ()
elabClauses info' fc opts n_in cs =
do let n = liftname info n_in
info = info' { elabFC = Just fc }
ctxt <- getContext
ist <- getIState
optimise <- getOptimise
let petrans = PETransform `elem` optimise
inacc <- map fst <$> fgetState (opt_inaccessible . ist_optimisation n)
-- Check n actually exists, with no definition yet
let tys = lookupTy n ctxt
let reflect = Reflection `elem` opts
checkUndefined n ctxt
unless (length tys > 1) $ do
fty <- case tys of
[] -> -- TODO: turn into a CAF if there's no arguments
-- question: CAFs in where blocks?
tclift $ tfail $ At fc (NoTypeDecl n)
[ty] -> return ty
let atys = map snd (getArgTys fty)
cs_elab <- mapM (elabClause info opts)
(zip [0..] cs)
-- pats_raw is the version we'll work with at compile time:
-- no simplification or PE
let (pats_in, cs_full) = unzip cs_elab
let pats_raw = map (simple_lhs (tt_ctxt ist)) pats_in
logLvl 3 $ "Elaborated patterns:\n" ++ show pats_raw
solveDeferred n
-- just ensure that the structure exists
fmodifyState (ist_optimisation n) id
addIBC (IBCOpt n)
ist <- getIState
-- Don't apply rules if this is a partial evaluation definition,
-- or we'll make something that just runs itself!
let tpats = case specNames opts of
Nothing -> transformPats ist pats_in
_ -> pats_in
-- If the definition is specialisable, this reduces the
-- RHS
pe_tm <- doPartialEval ist tpats
let pats_pe = if petrans
then map (simple_lhs (tt_ctxt ist)) pe_tm
else pats_raw
let tcase = opt_typecase (idris_options ist)
-- Look for 'static' names and generate new specialised
-- definitions for them, as well as generating rewrite rules
-- for partially evaluated definitions
newrules <- if petrans
then mapM (\ e -> case e of
Left _ -> return []
Right (l, r) -> elabPE info fc n r) pats_pe
else return []
-- Redo transforms with the newly generated transformations, so
-- that the specialised application we've just made gets
-- used in place of the general one
ist <- getIState
let pats_transformed = if petrans
then transformPats ist pats_pe
else pats_pe
-- Summary of what's about to happen: Definitions go:
--
-- pats_in -> pats -> pdef -> pdef'
-- addCaseDef builds case trees from <pdef> and <pdef'>
-- pdef is the compile-time pattern definition.
-- This will get further inlined to help with totality checking.
let pdef = map debind pats_raw
-- pdef_pe is the one which will get further optimised
-- for run-time, and, partially evaluated
let pdef_pe = map debind pats_transformed
logLvl 5 $ "Initial typechecked patterns:\n" ++ show pats_raw
logLvl 5 $ "Initial typechecked pattern def:\n" ++ show pdef
-- NOTE: Need to store original definition so that proofs which
-- rely on its structure aren't affected by any changes to the
-- inliner. Just use the inlined version to generate pdef' and to
-- help with later inlinings.
ist <- getIState
let pdef_inl = inlineDef ist pdef
numArgs <- tclift $ sameLength pdef
case specNames opts of
Just _ ->
do logLvl 3 $ "Partially evaluated:\n" ++ show pats_pe
_ -> return ()
logLvl 3 $ "Transformed:\n" ++ show pats_transformed
erInfo <- getErasureInfo <$> getIState
tree@(CaseDef scargs sc _) <- tclift $
simpleCase tcase False reflect CompileTime fc inacc atys pdef erInfo
cov <- coverage
pmissing <-
if cov && not (hasDefault cs)
then do missing <- genClauses fc n (map getLHS pdef) cs_full
-- missing <- genMissing n scargs sc
missing' <- filterM (checkPossible info fc True n) missing
let clhs = map getLHS pdef
logLvl 2 $ "Must be unreachable:\n" ++
showSep "\n" (map showTmImpls missing') ++
"\nAgainst: " ++
showSep "\n" (map (\t -> showTmImpls (delab ist t)) (map getLHS pdef))
-- filter out anything in missing' which is
-- matched by any of clhs. This might happen since
-- unification may force a variable to take a
-- particular form, rather than force a case
-- to be impossible.
return (filter (noMatch ist clhs) missing')
else return []
let pcover = null pmissing
-- pdef' is the version that gets compiled for run-time,
-- so we start from the partially evaluated version
pdef_in' <- applyOpts pdef_pe
let pdef' = map (simple_rt (tt_ctxt ist)) pdef_in'
logLvl 5 $ "After data structure transformations:\n" ++ show pdef'
ist <- getIState
-- let wf = wellFounded ist n sc
let tot = if pcover || AssertTotal `elem` opts
then Unchecked -- finish checking later
else Partial NotCovering -- already know it's not total
-- case lookupCtxt (namespace info) n (idris_flags ist) of
-- [fs] -> if TotalFn `elem` fs
-- then case tot of
-- Total _ -> return ()
-- t -> tclift $ tfail (At fc (Msg (show n ++ " is " ++ show t)))
-- _ -> return ()
case tree of
CaseDef _ _ [] -> return ()
CaseDef _ _ xs -> mapM_ (\x ->
iputStrLn $ show fc ++
":warning - Unreachable case: " ++
show (delab ist x)) xs
let knowncovering = (pcover && cov) || AssertTotal `elem` opts
tree' <- tclift $ simpleCase tcase knowncovering reflect
RunTime fc inacc atys pdef' erInfo
logLvl 3 $ "Unoptimised " ++ show n ++ ": " ++ show tree
logLvl 3 $ "Optimised: " ++ show tree'
ctxt <- getContext
ist <- getIState
let opt = idris_optimisation ist
putIState (ist { idris_patdefs = addDef n (force pdef_pe, force pmissing)
(idris_patdefs ist) })
let caseInfo = CaseInfo (inlinable opts) (dictionary opts)
case lookupTy n ctxt of
[ty] -> do updateContext (addCasedef n erInfo caseInfo
tcase knowncovering
reflect
(AssertTotal `elem` opts)
atys
inacc
pats_pe
pdef
pdef -- compile time
pdef_inl -- inlined
pdef' ty)
addIBC (IBCDef n)
setTotality n tot
when (not reflect) $ do totcheck (fc, n)
defer_totcheck (fc, n)
when (tot /= Unchecked) $ addIBC (IBCTotal n tot)
i <- getIState
case lookupDef n (tt_ctxt i) of
(CaseOp _ _ _ _ _ cd : _) ->
let (scargs, sc) = cases_compiletime cd
(scargs', sc') = cases_runtime cd in
do let calls = findCalls sc' scargs'
let used = findUsedArgs sc' scargs'
-- let scg = buildSCG i sc scargs
-- add SCG later, when checking totality
let cg = CGInfo scargs' calls [] used [] -- TODO: remove this, not needed anymore
logLvl 2 $ "Called names: " ++ show cg
addToCG n cg
addToCalledG n (nub (map fst calls)) -- plus names in type!
addIBC (IBCCG n)
_ -> return ()
return ()
-- addIBC (IBCTotal n tot)
[] -> return ()
-- Check it's covering, if 'covering' option is used. Chase
-- all called functions, and fail if any of them are also
-- 'Partial NotCovering'
when (CoveringFn `elem` opts) $ checkAllCovering fc [] n n
where
noMatch i cs tm = all (\x -> case matchClause i (delab' i x True True) tm of
Right _ -> False
Left miss -> True) cs
checkUndefined n ctxt = case lookupDef n ctxt of
[] -> return ()
[TyDecl _ _] -> return ()
_ -> tclift $ tfail (At fc (AlreadyDefined n))
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
depat acc (Bind n (PVar t) sc) = depat (n : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
hasDefault cs | (PClause _ _ last _ _ _ :_) <- reverse cs
, (PApp fn s args) <- last = all ((==Placeholder) . getTm) args
hasDefault _ = False
getLHS (_, l, _) = l
simple_lhs ctxt (Right (x, y)) = Right (normalise ctxt [] x, y)
simple_lhs ctxt t = t
simple_rt ctxt (p, x, y) = (p, x, force (uniqueBinders p
(rt_simplify ctxt [] y)))
specNames [] = Nothing
specNames (Specialise ns : _) = Just ns
specNames (_ : xs) = specNames xs
sameLength ((_, x, _) : xs)
= do l <- sameLength xs
let (f, as) = unApply x
if (null xs || l == length as) then return (length as)
else tfail (At fc (Msg "Clauses have differing numbers of arguments "))
sameLength [] = return 0
-- Partially evaluate, if the definition is marked as specialisable
doPartialEval ist pats =
case specNames opts of
Nothing -> return pats
Just ns -> case partial_eval (tt_ctxt ist) ns pats of
Just t -> return t
Nothing -> ierror (At fc (Msg "No specialisation achieved"))
-- | Find 'static' applications in a term and partially evaluate them.
-- Return any new transformation rules
elabPE :: ElabInfo -> FC -> Name -> Term -> Idris [(Term, Term)]
elabPE info fc caller r =
do ist <- getIState
let sa = filter (\ap -> fst ap /= caller) $ getSpecApps ist [] r
rules <- mapM mkSpecialised sa
return $ concat rules
where
-- Make a specialised version of the application, and
-- add a PTerm level transformation rule, which is basically the
-- new definition in reverse (before specialising it).
-- RHS => LHS where implicit arguments are left blank in the
-- transformation.
-- Transformation rules are applied after every PClause elaboration
mkSpecialised :: (Name, [(PEArgType, Term)]) -> Idris [(Term, Term)]
mkSpecialised specapp_in = do
ist <- getIState
let (specTy, specapp) = getSpecTy ist specapp_in
let (n, newnm, specdecl) = getSpecClause ist specapp
let lhs = pe_app specdecl
let rhs = pe_def specdecl
let undef = case lookupDefExact newnm (tt_ctxt ist) of
Nothing -> True
_ -> False
logLvl 5 $ show (newnm, undef, map (concreteArg ist) (snd specapp))
idrisCatch
(if (undef && all (concreteArg ist) (snd specapp)) then do
cgns <- getAllNames n
-- on the RHS of the new definition, we should reduce
-- everything that's not itself static (because we'll
-- want to be a PE version of those next)
let cgns' = filter (\x -> x /= n &&
notStatic ist x) cgns
-- set small reduction limit on partial/productive things
let maxred = case lookupTotal n (tt_ctxt ist) of
[Total _] -> 65536
[Productive] -> 16
_ -> 1
let opts = [Specialise ((if pe_simple specdecl
then map (\x -> (x, Nothing)) cgns'
else []) ++
(n, Just maxred) :
mapMaybe (specName (pe_simple specdecl))
(snd specapp))]
logLvl 3 $ "Specialising application: " ++ show specapp
++ " in " ++ show caller ++
" with " ++ show opts
logLvl 3 $ "New name: " ++ show newnm
logLvl 3 $ "PE definition type : " ++ (show specTy)
++ "\n" ++ show opts
logLvl 3 $ "PE definition " ++ show newnm ++ ":\n" ++
showSep "\n"
(map (\ (lhs, rhs) ->
(show lhs ++ " = " ++
showTmImpls rhs)) (pe_clauses specdecl))
logLvl 2 $ show n ++ " transformation rule: " ++
show rhs ++ " ==> " ++ show lhs
elabType info defaultSyntax emptyDocstring [] fc opts newnm specTy
let def = map (\(lhs, rhs) ->
PClause fc newnm lhs [] rhs [])
(pe_clauses specdecl)
trans <- elabTransform info fc False rhs lhs
elabClauses info fc opts newnm def
return [trans]
else return [])
-- if it doesn't work, just don't specialise. Could happen for lots
-- of valid reasons (e.g. local variables in scope which can't be
-- lifted out).
(\e -> do logLvl 3 $ "Couldn't specialise: " ++ (pshow ist e)
return [])
specName simpl (ImplicitS, tm)
| (P Ref n _, _) <- unApply tm = Just (n, Just (if simpl then 1 else 0))
specName simpl (ExplicitS, tm)
| (P Ref n _, _) <- unApply tm = Just (n, Just (if simpl then 1 else 0))
specName simpl _ = Nothing
notStatic ist n = case lookupCtxtExact n (idris_statics ist) of
Just s -> not (or s)
_ -> True
concreteArg ist (ImplicitS, tm) = concreteTm ist tm
concreteArg ist (ExplicitS, tm) = concreteTm ist tm
concreteArg ist _ = True
concreteTm ist tm | (P _ n _, _) <- unApply tm =
case lookupTy n (tt_ctxt ist) of
[] -> False
_ -> True
concreteTm ist (Constant _) = True
concreteTm ist (Bind n (Lam _) sc) = True
concreteTm ist (Bind n (Let _ _) sc) = concreteTm ist sc
concreteTm ist _ = False
-- get the type of a specialised application
getSpecTy ist (n, args)
= case lookupTy n (tt_ctxt ist) of
[ty] -> let (specty_in, args') = specType args (explicitNames ty)
specty = normalise (tt_ctxt ist) [] (finalise specty_in)
t = mkPE_TyDecl ist args' (explicitNames specty) in
(t, (n, args'))
-- (normalise (tt_ctxt ist) [] (specType args ty))
_ -> error "Can't happen (getSpecTy)"
-- get the clause of a specialised application
getSpecClause ist (n, args)
= let newnm = sUN ("PE_" ++ show (nsroot n) ++ "_" ++
qhash 5381 (showSep "_" (map showArg args))) in
-- UN (show n ++ show (map snd args)) in
(n, newnm, mkPE_TermDecl ist newnm n args)
where showArg (ExplicitS, n) = qshow n
showArg (ImplicitS, n) = qshow n
showArg _ = ""
qshow (Bind _ _ _) = "fn"
qshow (App f a) = qshow f ++ qshow a
qshow (P _ n _) = show n
qshow (Constant c) = show c
qshow _ = ""
-- Simple but effective string hashing...
-- Keep it to 32 bits for readability/debuggability
qhash :: Int -> String -> String
qhash hash [] = showHex (abs hash `mod` 0xffffffff) ""
qhash hash (x:xs) = qhash (hash * 33 + fromEnum x) xs
-- checks if the clause is a possible left hand side. Returns the term if
-- possible, otherwise Nothing.
checkPossible :: ElabInfo -> FC -> Bool -> Name -> PTerm -> Idris Bool
checkPossible info fc tcgen fname lhs_in
= do ctxt <- getContext
i <- getIState
let lhs = addImplPat i lhs_in
-- if the LHS type checks, it is possible
case elaborate ctxt (sMN 0 "patLHS") infP []
(erun fc (buildTC i info ELHS [] fname (infTerm lhs))) of
OK ((lhs', _, _), _) ->
do let lhs_tm = orderPats (getInferTerm lhs')
case recheck ctxt [] (forget lhs_tm) lhs_tm of
OK _ -> return True
err -> return False
-- if it's a recoverable error, the case may become possible
Error err -> if tcgen then return (recoverable ctxt err)
else return (validCase ctxt err ||
recoverable ctxt err)
where validCase ctxt (CantUnify _ topx topy e _ _)
= let topx' = normalise ctxt [] topx
topy' = normalise ctxt [] topy in
not (sameFam topx' topy' || not (validCase ctxt e))
validCase ctxt (CantConvert _ _ _) = False
validCase ctxt (At _ e) = validCase ctxt e
validCase ctxt (Elaborating _ _ e) = validCase ctxt e
validCase ctxt (ElaboratingArg _ _ _ e) = validCase ctxt e
validCase ctxt _ = True
recoverable ctxt (CantUnify r topx topy e _ _)
= let topx' = normalise ctxt [] topx
topy' = normalise ctxt [] topy in
checkRec topx' topy'
recoverable ctxt (At _ e) = recoverable ctxt e
recoverable ctxt (Elaborating _ _ e) = recoverable ctxt e
recoverable ctxt (ElaboratingArg _ _ _ e) = recoverable ctxt e
recoverable _ _ = False
sameFam topx topy
= case (unApply topx, unApply topy) of
((P _ x _, _), (P _ y _, _)) -> x == y
_ -> False
-- different notion of recoverable than in unification, since we
-- have no metavars -- just looking to see if a constructor is failing
-- to unify with a function that may be reduced later
checkRec (App f a) p@(P _ _ _) = checkRec f p
checkRec p@(P _ _ _) (App f a) = checkRec p f
checkRec fa@(App _ _) fa'@(App _ _)
| (f, as) <- unApply fa,
(f', as') <- unApply fa'
= if (length as /= length as')
then checkRec f f'
else checkRec f f' && and (zipWith checkRec as as')
checkRec (P xt x _) (P yt y _) = x == y || ntRec xt yt
checkRec _ _ = False
ntRec x y | Ref <- x = True
| Ref <- y = True
| otherwise = False -- name is different, unrecoverable
propagateParams :: IState -> [Name] -> Type -> PTerm -> PTerm
propagateParams i ps t tm@(PApp _ (PRef fc n) args)
= PApp fc (PRef fc n) (addP t args)
where addP (Bind n _ sc) (t : ts)
| Placeholder <- getTm t,
n `elem` ps,
not (n `elem` allNamesIn tm)
= t { getTm = PRef fc n } : addP sc ts
addP (Bind n _ sc) (t : ts) = t : addP sc ts
addP _ ts = ts
propagateParams i ps t (PRef fc n)
= case lookupCtxt n (idris_implicits i) of
[is] -> let ps' = filter (isImplicit is) ps in
PApp fc (PRef fc n) (map (\x -> pimp x (PRef fc x) True) ps')
_ -> PRef fc n
where isImplicit [] n = False
isImplicit (PImp _ _ _ x _ : is) n | x == n = True
isImplicit (_ : is) n = isImplicit is n
propagateParams i ps t x = x
findUnique :: Context -> Env -> Term -> [Name]
findUnique ctxt env (Bind n b sc)
= let rawTy = forgetEnv (map fst env) (binderTy b)
uniq = case check ctxt env rawTy of
OK (_, UType UniqueType) -> True
OK (_, UType NullType) -> True
OK (_, UType AllTypes) -> True
_ -> False in
if uniq then n : findUnique ctxt ((n, b) : env) sc
else findUnique ctxt ((n, b) : env) sc
findUnique _ _ _ = []
-- Return the elaborated LHS/RHS, and the original LHS with implicits added
elabClause :: ElabInfo -> FnOpts -> (Int, PClause) ->
Idris (Either Term (Term, Term), PTerm)
elabClause info opts (_, PClause fc fname lhs_in [] PImpossible [])
= do let tcgen = Dictionary `elem` opts
i <- get
let lhs = addImpl i lhs_in
b <- checkPossible info fc tcgen fname lhs_in
case b of
True -> tclift $ tfail (At fc
(Msg $ show lhs_in ++ " is a valid case"))
False -> do ptm <- mkPatTm lhs_in
return (Left ptm, lhs)
elabClause info opts (cnum, PClause fc fname lhs_in_as withs rhs_in_as whereblock)
= do let tcgen = Dictionary `elem` opts
ctxt <- getContext
let (lhs_in, rhs_in) = desugarAs lhs_in_as rhs_in_as
-- Build the LHS as an "Infer", and pull out its type and
-- pattern bindings
i <- getIState
inf <- isTyInferred fname
-- get the parameters first, to pass through to any where block
let fn_ty = case lookupTy fname (tt_ctxt i) of
[t] -> t
_ -> error "Can't happen (elabClause function type)"
let fn_is = case lookupCtxt fname (idris_implicits i) of
[t] -> t
_ -> []
let params = getParamsInType i [] fn_is fn_ty
let lhs = mkLHSapp $ stripUnmatchable i $
propagateParams i params fn_ty (addImplPat i (stripLinear i lhs_in))
logLvl 5 ("LHS: " ++ show fc ++ " " ++ showTmImpls lhs)
logLvl 4 ("Fixed parameters: " ++ show params ++ " from " ++ show lhs_in ++
"\n" ++ show (fn_ty, fn_is))
(((lhs', dlhs, []), probs, inj), _) <-
tclift $ elaborate ctxt (sMN 0 "patLHS") infP []
(do res <- errAt "left hand side of " fname
(erun fc (buildTC i info ELHS opts fname (infTerm lhs)))
probs <- get_probs
inj <- get_inj
return (res, probs, inj))
when inf $ addTyInfConstraints fc (map (\(x,y,_,_,_,_,_) -> (x,y)) probs)
let lhs_tm = orderPats (getInferTerm lhs')
let lhs_ty = getInferType lhs'
let static_names = getStaticNames i lhs_tm
logLvl 3 ("Elaborated: " ++ show lhs_tm)
logLvl 3 ("Elaborated type: " ++ show lhs_ty)
logLvl 5 ("Injective: " ++ show fname ++ " " ++ show inj)
-- If we're inferring metavariables in the type, don't recheck,
-- because we're only doing this to try to work out those metavariables
(clhs_c, clhsty) <- if not inf
then recheckC fc [] lhs_tm
else return (lhs_tm, lhs_ty)
let clhs = normalise ctxt [] clhs_c
let borrowed = borrowedNames [] clhs
-- These are the names we're not allowed to use on the RHS, because
-- they're UniqueTypes and borrowed from another function.
-- FIXME: There is surely a nicer way than this...
-- Issue #1615 on the Issue Tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1615
when (not (null borrowed)) $
logLvl 5 ("Borrowed names on LHS: " ++ show borrowed)
logLvl 3 ("Normalised LHS: " ++ showTmImpls (delabMV i clhs))
rep <- useREPL
when rep $ do
addInternalApp (fc_fname fc) (fst . fc_start $ fc) (delabMV i clhs) -- TODO: Should use span instead of line and filename?
addIBC (IBCLineApp (fc_fname fc) (fst . fc_start $ fc) (delabMV i clhs))
logLvl 5 ("Checked " ++ show clhs ++ "\n" ++ show clhsty)
-- Elaborate where block
ist <- getIState
windex <- getName
let decls = nub (concatMap declared whereblock)
let defs = nub (decls ++ concatMap defined whereblock)
let newargs_all = pvars ist lhs_tm
-- Unique arguments must be passed to the where block explicitly
-- (since we can't control "usage" easlily otherwise). Remove them
-- from newargs here
let uniqargs = findUnique (tt_ctxt ist) [] lhs_tm
let newargs = filter (\(n,_) -> n `notElem` uniqargs) newargs_all
let winfo = (pinfo info newargs defs windex) { elabFC = Just fc }
let wb = map (mkStatic static_names) $
map (expandParamsD False ist decorate newargs defs) whereblock
-- Split the where block into declarations with a type, and those
-- without
-- Elaborate those with a type *before* RHS, those without *after*
let (wbefore, wafter) = sepBlocks wb
logLvl 2 $ "Where block:\n " ++ show wbefore ++ "\n" ++ show wafter
mapM_ (rec_elabDecl info EAll winfo) wbefore
-- Now build the RHS, using the type of the LHS as the goal.
i <- getIState -- new implicits from where block
logLvl 5 (showTmImpls (expandParams decorate newargs defs (defs \\ decls) rhs_in))
let rhs = addImplBoundInf i (map fst newargs) (defs \\ decls)
(expandParams decorate newargs defs (defs \\ decls) rhs_in)
logLvl 2 $ "RHS: " ++ showTmImpls rhs
ctxt <- getContext -- new context with where block added
logLvl 5 "STARTING CHECK"
((rhs', defer, is, probs), _) <-
tclift $ elaborate ctxt (sMN 0 "patRHS") clhsty []
(do pbinds ist lhs_tm
mapM_ setinj (nub (params ++ inj))
setNextName
(_, _, is) <- errAt "right hand side of " fname
(erun fc (build i winfo ERHS opts fname rhs))
errAt "right hand side of " fname
(erun fc $ psolve lhs_tm)
hs <- get_holes
aux <- getAux
mapM_ (elabCaseHole aux) hs
tt <- get_term
let (tm, ds) = runState (collectDeferred (Just fname) tt) []
probs <- get_probs
return (tm, ds, is, probs))
when inf $ addTyInfConstraints fc (map (\(x,y,_,_,_,_,_) -> (x,y)) probs)
logLvl 5 "DONE CHECK"
logLvl 2 $ "---> " ++ show rhs'
when (not (null defer)) $ iLOG $ "DEFERRED " ++
show (map (\ (n, (_,_,t)) -> (n, t)) defer)
def' <- checkDef fc defer
let def'' = map (\(n, (i, top, t)) -> (n, (i, top, t, False))) def'
addDeferred def''
mapM_ (\(n, _) -> addIBC (IBCDef n)) def''
when (not (null def')) $ do
mapM_ defer_totcheck (map (\x -> (fc, fst x)) def'')
-- Now the remaining deferred (i.e. no type declarations) clauses
-- from the where block
mapM_ (rec_elabDecl info EAll winfo) wafter
mapM_ (elabCaseBlock winfo opts) is
ctxt <- getContext
logLvl 5 $ "Rechecking"
logLvl 6 $ " ==> " ++ show (forget rhs')
(crhs, crhsty) <- if not inf
then recheckC_borrowing True borrowed fc [] rhs'
else return (rhs', clhsty)
logLvl 6 $ " ==> " ++ show crhsty ++ " against " ++ show clhsty
case converts ctxt [] clhsty crhsty of
OK _ -> return ()
Error e -> ierror (At fc (CantUnify False clhsty crhsty e [] 0))
i <- getIState
checkInferred fc (delab' i crhs True True) rhs
-- if the function is declared '%error_reverse', or its type,
-- then we'll try running it in reverse to improve error messages
let (ret_fam, _) = unApply (getRetTy crhsty)
rev <- case ret_fam of
P _ rfamn _ ->
case lookupCtxt rfamn (idris_datatypes i) of
[TI _ _ dopts _ _] ->
return (DataErrRev `elem` dopts)
_ -> return False
_ -> return False
when (rev || ErrorReverse `elem` opts) $ do
addIBC (IBCErrRev (crhs, clhs))
addErrRev (crhs, clhs)
return $ (Right (clhs, crhs), lhs)
where
pinfo :: ElabInfo -> [(Name, PTerm)] -> [Name] -> Int -> ElabInfo
pinfo info ns ds i
= let newps = params info ++ ns
dsParams = map (\n -> (n, map fst newps)) ds
newb = addAlist dsParams (inblock info)
l = liftname info in
info { params = newps,
inblock = newb,
liftname = id -- (\n -> case lookupCtxt n newb of
-- Nothing -> n
-- _ -> MN i (show n)) . l
}
-- Find the variable names which appear under a 'Ownership.Read' so that
-- we know they can't be used on the RHS
borrowedNames :: [Name] -> Term -> [Name]
borrowedNames env (App (App (P _ (NS (UN lend) [owner]) _) _) arg)
| owner == txt "Ownership" &&
(lend == txt "lend" || lend == txt "Read") = getVs arg
where
getVs (V i) = [env!!i]
getVs (App f a) = nub $ getVs f ++ getVs a
getVs _ = []
borrowedNames env (App f a) = nub $ borrowedNames env f ++ borrowedNames env a
borrowedNames env (Bind n b sc) = nub $ borrowedB b ++ borrowedNames (n:env) sc
where borrowedB (Let t v) = nub $ borrowedNames env t ++ borrowedNames env v
borrowedB b = borrowedNames env (binderTy b)
borrowedNames _ _ = []
mkLHSapp t@(PRef _ _) = PApp fc t []
mkLHSapp t = t
decorate (NS x ns)
= NS (SN (WhereN cnum fname x)) ns -- ++ [show cnum])
-- = NS (UN ('#':show x)) (ns ++ [show cnum, show fname])
decorate x
= SN (WhereN cnum fname x)
-- = NS (SN (WhereN cnum fname x)) [show cnum]
-- = NS (UN ('#':show x)) [show cnum, show fname]
sepBlocks bs = sepBlocks' [] bs where
sepBlocks' ns (d@(PTy _ _ _ _ _ n t) : bs)
= let (bf, af) = sepBlocks' (n : ns) bs in
(d : bf, af)
sepBlocks' ns (d@(PClauses _ _ n _) : bs)
| not (n `elem` ns) = let (bf, af) = sepBlocks' ns bs in
(bf, d : af)
sepBlocks' ns (b : bs) = let (bf, af) = sepBlocks' ns bs in
(b : bf, af)
sepBlocks' ns [] = ([], [])
-- if a hole is just an argument/result of a case block, treat it as
-- the unit type. Hack to help elaborate case in do blocks.
elabCaseHole aux h = do
focus h
g <- goal
case g of
TType _ -> when (any (isArg h) aux) $ do apply (Var unitTy) []; solve
_ -> return ()
-- Is the name a pattern argument in the declaration
isArg :: Name -> PDecl -> Bool
isArg n (PClauses _ _ _ cs) = any isArg' cs
where
isArg' (PClause _ _ (PApp _ _ args) _ _ _)
= any (\x -> case x of
PRef _ n' -> n == n'
_ -> False) (map getTm args)
isArg' _ = False
isArg _ _ = False
elabClause info opts (_, PWith fc fname lhs_in withs wval_in withblock)
= do let tcgen = Dictionary `elem` opts
ctxt <- getContext
-- Build the LHS as an "Infer", and pull out its type and
-- pattern bindings
i <- getIState
-- get the parameters first, to pass through to any where block
let fn_ty = case lookupTy fname (tt_ctxt i) of
[t] -> t
_ -> error "Can't happen (elabClause function type)"
let fn_is = case lookupCtxt fname (idris_implicits i) of
[t] -> t
_ -> []
let params = getParamsInType i [] fn_is fn_ty
let lhs = propagateParams i params fn_ty (addImplPat i (stripLinear i lhs_in))
logLvl 2 ("LHS: " ++ show lhs)
((lhs', dlhs, []), _) <-
tclift $ elaborate ctxt (sMN 0 "patLHS") infP []
(errAt "left hand side of with in " fname
(erun fc (buildTC i info ELHS opts fname (infTerm lhs))) )
let lhs_tm = orderPats (getInferTerm lhs')
let lhs_ty = getInferType lhs'
let ret_ty = getRetTy (explicitNames (normalise ctxt [] lhs_ty))
let static_names = getStaticNames i lhs_tm
logLvl 5 (show lhs_tm ++ "\n" ++ show static_names)
(clhs, clhsty) <- recheckC fc [] lhs_tm
logLvl 5 ("Checked " ++ show clhs)
let bargs = getPBtys (explicitNames (normalise ctxt [] lhs_tm))
let wval = addImplBound i (map fst bargs) wval_in
logLvl 5 ("Checking " ++ showTmImpls wval)
-- Elaborate wval in this context
((wval', defer, is), _) <-
tclift $ elaborate ctxt (sMN 0 "withRHS")
(bindTyArgs PVTy bargs infP) []
(do pbinds i lhs_tm
setNextName
-- TODO: may want where here - see winfo abpve
(_', d, is) <- errAt "with value in " fname
(erun fc (build i info ERHS opts fname (infTerm wval)))
erun fc $ psolve lhs_tm
tt <- get_term
return (tt, d, is))
def' <- checkDef fc defer
let def'' = map (\(n, (i, top, t)) -> (n, (i, top, t, False))) def'
addDeferred def''
mapM_ (elabCaseBlock info opts) is
logLvl 5 ("Checked wval " ++ show wval')
(cwval, cwvalty) <- recheckC fc [] (getInferTerm wval')
let cwvaltyN = explicitNames (normalise ctxt [] cwvalty)
let cwvalN = explicitNames (normalise ctxt [] cwval)
logLvl 3 ("With type " ++ show cwvalty ++ "\nRet type " ++ show ret_ty)
-- We're going to assume the with type is not a function shortly,
-- so report an error if it is (you can't match on a function anyway
-- so this doesn't lose anything)
case getArgTys cwvaltyN of
[] -> return ()
(_:_) -> ierror $ At fc (WithFnType cwvalty)
let pvars = map fst (getPBtys cwvalty)
-- we need the unelaborated term to get the names it depends on
-- rather than a de Bruijn index.
let pdeps = usedNamesIn pvars i (delab i cwvalty)
let (bargs_pre, bargs_post) = split pdeps bargs []
logLvl 10 ("With type " ++ show (getRetTy cwvaltyN) ++
" depends on " ++ show pdeps ++ " from " ++ show pvars)
logLvl 10 ("Pre " ++ show bargs_pre ++ "\nPost " ++ show bargs_post)
windex <- getName
-- build a type declaration for the new function:
-- (ps : Xs) -> (withval : cwvalty) -> (ps' : Xs') -> ret_ty
let wargval = getRetTy cwvalN
let wargtype = getRetTy cwvaltyN
logLvl 5 ("Abstract over " ++ show wargval ++ " in " ++ show wargtype)
let wtype = bindTyArgs (flip Pi (TType (UVar 0))) (bargs_pre ++
(sMN 0 "warg", wargtype) :
map (abstract (sMN 0 "warg") wargval wargtype) bargs_post)
(substTerm wargval (P Bound (sMN 0 "warg") wargtype) ret_ty)
logLvl 5 ("New function type " ++ show wtype)
let wname = SN (WithN windex fname)
let imps = getImps wtype -- add to implicits context
putIState (i { idris_implicits = addDef wname imps (idris_implicits i) })
let statics = getStatics static_names wtype
logLvl 5 ("Static positions " ++ show statics)
i <- getIState
putIState (i { idris_statics = addDef wname statics (idris_statics i) })
addIBC (IBCDef wname)
addIBC (IBCImp wname)
addIBC (IBCStatic wname)
def' <- checkDef fc [(wname, (-1, Nothing, wtype))]
let def'' = map (\(n, (i, top, t)) -> (n, (i, top, t, False))) def'
addDeferred def''
-- in the subdecls, lhs becomes:
-- fname pats | wpat [rest]
-- ==> fname' ps wpat [rest], match pats against toplevel for ps
wb <- mapM (mkAuxC wname lhs (map fst bargs_pre) (map fst bargs_post))
withblock
logLvl 3 ("with block " ++ show wb)
-- propagate totality assertion to the new definitions
when (AssertTotal `elem` opts) $ setFlags wname [AssertTotal]
mapM_ (rec_elabDecl info EAll info) wb
-- rhs becomes: fname' ps wval
let rhs = PApp fc (PRef fc wname)
(map (pexp . (PRef fc) . fst) bargs_pre ++
pexp wval :
(map (pexp . (PRef fc) . fst) bargs_post))
logLvl 5 ("New RHS " ++ showTmImpls rhs)
ctxt <- getContext -- New context with block added
i <- getIState
((rhs', defer, is), _) <-
tclift $ elaborate ctxt (sMN 0 "wpatRHS") clhsty []
(do pbinds i lhs_tm
setNextName
(_, d, is) <- erun fc (build i info ERHS opts fname rhs)
psolve lhs_tm
tt <- get_term
return (tt, d, is))
def' <- checkDef fc defer
let def'' = map (\(n, (i, top, t)) -> (n, (i, top, t, False))) def'
addDeferred def''
mapM_ (elabCaseBlock info opts) is
logLvl 5 ("Checked RHS " ++ show rhs')
(crhs, crhsty) <- recheckC fc [] rhs'
return $ (Right (clhs, crhs), lhs)
where
getImps (Bind n (Pi _ _) t) = pexp Placeholder : getImps t
getImps _ = []
mkAuxC wname lhs ns ns' (PClauses fc o n cs)
| True = do cs' <- mapM (mkAux wname lhs ns ns') cs
return $ PClauses fc o wname cs'
| otherwise = ifail $ show fc ++ "with clause uses wrong function name " ++ show n
mkAuxC wname lhs ns ns' d = return $ d
mkAux wname toplhs ns ns' (PClause fc n tm_in (w:ws) rhs wheres)
= do i <- getIState
let tm = addImplPat i tm_in
logLvl 2 ("Matching " ++ showTmImpls tm ++ " against " ++
showTmImpls toplhs)
case matchClause i toplhs tm of
Left (a,b) -> ifail $ show fc ++ ":with clause does not match top level"
Right mvars ->
do logLvl 3 ("Match vars : " ++ show mvars)
lhs <- updateLHS n wname mvars ns ns' (fullApp tm) w
return $ PClause fc wname lhs ws rhs wheres
mkAux wname toplhs ns ns' (PWith fc n tm_in (w:ws) wval withs)
= do i <- getIState
let tm = addImplPat i tm_in
logLvl 2 ("Matching " ++ showTmImpls tm ++ " against " ++
showTmImpls toplhs)
withs' <- mapM (mkAuxC wname toplhs ns ns') withs
case matchClause i toplhs tm of
Left (a,b) -> trace ("matchClause: " ++ show a ++ " =/= " ++ show b) (ifail $ show fc ++ "with clause does not match top level")
Right mvars ->
do lhs <- updateLHS n wname mvars ns ns' (fullApp tm) w
return $ PWith fc wname lhs ws wval withs'
mkAux wname toplhs ns ns' c
= ifail $ show fc ++ ":badly formed with clause"
addArg (PApp fc f args) w = PApp fc f (args ++ [pexp w])
addArg (PRef fc f) w = PApp fc (PRef fc f) [pexp w]
updateLHS n wname mvars ns_in ns_in' (PApp fc (PRef fc' n') args) w
= let ns = map (keepMvar (map fst mvars) fc') ns_in
ns' = map (keepMvar (map fst mvars) fc') ns_in' in
return $ substMatches mvars $
PApp fc (PRef fc' wname)
(map pexp ns ++ pexp w : (map pexp ns'))
updateLHS n wname mvars ns_in ns_in' tm w
= updateLHS n wname mvars ns_in ns_in' (PApp fc tm []) w
keepMvar mvs fc v | v `elem` mvs = PRef fc v
| otherwise = Placeholder
fullApp (PApp _ (PApp fc f args) xs) = fullApp (PApp fc f (args ++ xs))
fullApp x = x
split [] rest pre = (reverse pre, rest)
split deps ((n, ty) : rest) pre
| n `elem` deps = split (deps \\ [n]) rest ((n, ty) : pre)
| otherwise = split deps rest ((n, ty) : pre)
split deps [] pre = (reverse pre, [])
abstract wn wv wty (n, argty) = (n, substTerm wv (P Bound wn wty) argty)
| andyarvanitis/Idris-dev | src/Idris/Elab/Clause.hs | bsd-3-clause | 44,724 | 10 | 28 | 17,926 | 13,156 | 6,529 | 6,627 | 724 | 38 |
{-# LANGUAGE CPP, ScopedTypeVariables #-}
{-# LANGUAGE GADTs #-}
module Llvm.Pass.Mem2Reg (mem2reg) where
import Compiler.Hoopl
import Llvm.Hir.Data
import qualified Data.Map as Dm
import qualified Data.Set as Ds
import Control.Monad
import Llvm.Pass.Rewriter (rwNode,nodeToGraph)
import Prelude hiding(lookup)
#ifdef DEBUG
import Debug.Trace
#endif
-- | TODO: describe in details what this pass is doing
type Mem2RegFact g = Dm.Map LValue (WithTop (Value g))
data LValue = Mem Lname
| Ref Lname
deriving (Eq, Show, Ord)
mem2RegLattice :: Eq g => DataflowLattice (Mem2RegFact g)
mem2RegLattice = DataflowLattice { fact_name = "Mem 2 Reg"
, fact_bot = Dm.empty
, fact_join = joinMaps (extendJoinDomain factAdd)
}
where
factAdd _ (OldFact old) (NewFact new)
= if new == old then (NoChange, PElem new)
else (SomeChange, Top)
isReg :: Eq g => FwdTransfer (Node g a) (Mem2RegFact g)
isReg = mkFTransfer ft
ft :: Eq g => (Node g a) e x -> Mem2RegFact g -> Fact x (Mem2RegFact g)
ft (Lnode _) f = f
ft (Pnode _ _) f = f
ft (Cnode cinst _) f = cinstft cinst f
ft n@(Tnode tinst _) f = tinstft n tinst f
tinstft :: Eq g => (Node g a) O C -> Tinst g -> Mem2RegFact g -> Fact C (Mem2RegFact g)
tinstft n term f =
let targets = successors n -- targetOf term
in case targets of
[] -> mapEmpty
l -> mkFactBase mem2RegLattice
(map (\x -> (x, f)) l)
cinstft :: Cinst g -> Mem2RegFact g -> Fact O (Mem2RegFact g)
cinstft = undefined
{-
cinstft (ComputingInstWithDbg (ComputingInst lhs rhs) _) f = cinstft' lhs rhs f
cinstft' :: Maybe GlobalOrLname -> Rhs -> Mem2RegFact -> Fact O Mem2RegFact
cinstft' lhs (RmO m) f = memOp lhs m f
cinstft' lhs (Re (Ev tv)) f = maybe f (\a -> let TypedData _ v = tv in
case a of
GolG _ -> f
GolL s -> Dm.insert (Ref $ localIdToLstring s) (PElem v) f) lhs
cinstft' _ (Re _) f = f
cinstft' _ _ f = f
memOp :: Maybe GlobalOrLname -> MemOp -> Mem2RegFact -> Fact O Mem2RegFact
memOp (Just (GolL lhs)) (Allocate _ _ Nothing _) f = insert (Mem $ localIdToLstring lhs) Top f
memOp _ (Store _ (TypedData _ v1) (TypedData _ (Pointer (VgOl (GolL ptr)))) _ _) f =
let x = Mem $ localIdToLstring ptr
in if (x `Dm.member` f) then insert x (PElem v1) f
else f
memOp _ (StoreAtomic _ _ (TypedData _ v1) (TypedData _ (Pointer (VgOl (GolL ptr)))) _) f =
let x = Mem $ localIdToLstring ptr
in if (x `Dm.member` f) then insert x (PElem v1) f
else f
memOp _ _ f = f
-}
insert :: Ord k => k -> v -> Dm.Map k v -> Dm.Map k v
#ifdef DEBUG
insert x v1 f | trace ("insert " ++ (show x) ++ "->" ++ (show v1)) False = undefined
#endif
insert x v1 f = Dm.insert x v1 f
badAss :: Monad m => (Value g -> Maybe (Value g)) -> Node g a e x -> m (Maybe (Node g a e x))
badAss f node = return (rwNode f node)
mem2Reg :: forall g.forall a.forall m . FuelMonad m => FwdRewrite m (Node g a) (Mem2RegFact g)
mem2Reg = mkFRewrite cp
where
-- each node is rewritten to a one node graph.
cp :: FuelMonad m => Node g a e x -> Mem2RegFact g -> m (Maybe (Graph (Node g a) e x))
cp node f = do { x <- badAss (lookup f) node
; return $ liftM {-Maybe-} nodeToGraph x
}
lookup :: Mem2RegFact g -> Value g -> Maybe (Value g)
lookup f x = do { x' <- case x of
Val_ssa s -> Just $ Ref s
-- Deref (Pointer (VgOl (GolL s))) -> Just $ Mem $ localIdToLstring s
_ -> Nothing
; case Dm.lookup x' f of
Just (PElem v) -> Just v
_ -> Nothing
}
mem2RegPass :: forall g.forall a. forall m. (Eq g, FuelMonad m) => FwdPass m (Node g a) (Mem2RegFact g)
mem2RegPass = FwdPass { fp_lattice = mem2RegLattice
, fp_transfer = isReg
, fp_rewrite = mem2Reg
}
mem2reg :: (CheckpointMonad m, FuelMonad m, Eq g) => Ds.Set (Dtype, g) -> Label -> Graph (Node g a) C C -> m (Graph (Node g a) C C)
mem2reg _ entry graph =
do { (graph', _, _) <- analyzeAndRewriteFwd fwd (JustC [entry]) graph
(mapSingleton entry (Dm.empty)) -- initFact gs))
; return graph'
}
where fwd = mem2RegPass -- debugFwdJoins trace (const True) mem2RegPas
| mlite/hLLVM | src/Llvm/Pass/Mem2Reg.hs | bsd-3-clause | 4,577 | 0 | 16 | 1,472 | 1,294 | 669 | 625 | 63 | 3 |
{-# LANGUAGE OverloadedStrings, RankNTypes, RecordWildCards,
ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns -fno-warn-missing-signatures
-fno-warn-unused-binds #-}
module UnitTests (testWith) where
import Control.Applicative ((<$>))
import Control.Concurrent (forkIO, killThread)
import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
import Control.Exception (Exception, toException)
import Control.Lens ((^.), (^?), (.~), (?~), (&))
import Control.Monad (unless, void)
import Data.Aeson
import Data.Aeson.Lens (key)
import Data.ByteString (ByteString)
import Data.Char (toUpper)
import Data.Maybe (isJust)
import Data.Monoid ((<>))
import HttpBin.Server (serve)
import Network.HTTP.Client (HttpException(..))
import Network.HTTP.Types.Status (Status(Status), status200, status401)
import Network.HTTP.Types.Version (http11)
import Network.Wreq hiding
(get, post, head_, put, options, delete,
getWith, postWith, headWith, putWith, optionsWith, deleteWith)
import Network.Wreq.Lens
import Network.Wreq.Types (Postable, Deletable, Putable)
import Snap.Http.Server.Config
import System.IO (hClose, hPutStr)
import System.IO.Temp (withSystemTempFile)
import Test.Framework (Test, defaultMain, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (assertBool, assertEqual, assertFailure)
import qualified Control.Exception as E
import qualified Data.Text as T
import qualified Network.Wreq.Session as Session
import qualified Data.ByteString.Lazy as L
import qualified Network.Wreq as Wreq
data Verb = Verb {
get :: String -> IO (Response L.ByteString)
, getWith :: Options -> String -> IO (Response L.ByteString)
, post :: Postable a => String -> a -> IO (Response L.ByteString)
, postWith :: Postable a => Options -> String -> a
-> IO (Response L.ByteString)
, head_ :: String -> IO (Response ())
, headWith :: Options -> String -> IO (Response ())
, put :: Putable a => String -> a -> IO (Response L.ByteString)
, putWith :: Putable a => Options -> String -> a -> IO (Response L.ByteString)
, options :: String -> IO (Response ())
, optionsWith :: Options -> String -> IO (Response ())
, delete :: Deletable a => String -> Maybe a -> IO (Response L.ByteString)
, deleteWith :: Deletable a => Options -> String -> Maybe a -> IO (Response L.ByteString)
}
basic :: Verb
basic = Verb { get = Wreq.get, getWith = Wreq.getWith, post = Wreq.post
, postWith = Wreq.postWith, head_ = Wreq.head_
, headWith = Wreq.headWith, put = Wreq.put
, putWith = Wreq.putWith, options = Wreq.options
, optionsWith = Wreq.optionsWith, delete = Wreq.delete
, deleteWith = Wreq.deleteWith }
session :: Session.Session -> Verb
session s = Verb { get = Session.get s
, getWith = flip Session.getWith s
, post = Session.post s
, postWith = flip Session.postWith s
, head_ = Session.head_ s
, headWith = flip Session.headWith s
, put = Session.put s
, putWith = flip Session.putWith s
, options = Session.options s
, optionsWith = flip Session.optionsWith s
, delete = Session.delete s
, deleteWith = flip Session.deleteWith s }
basicGet Verb{..} site = do
r <- get (site "/get")
assertBool "GET request has User-Agent header" $
isJust (r ^. responseBody ^? key "headers" . key "User-Agent")
-- test the various lenses
assertEqual "GET succeeds" status200 (r ^. responseStatus)
assertEqual "GET succeeds 200" 200 (r ^. responseStatus . statusCode)
assertEqual "GET succeeds OK" "OK" (r ^. responseStatus . statusMessage)
assertEqual "GET response has HTTP/1.1 version" http11 (r ^. responseVersion)
assertBool "GET response has Content-Type header" $
isJust (r ^? responseHeader "Content-Type")
assertBool "GET response has Date header" $
isJust (lookup "Date" <$> r ^? responseHeaders)
basicPost Verb{..} site = do
r <- post (site "/post") ("wibble" :: ByteString) >>= asValue
let body = r ^. responseBody
assertEqual "POST succeeds" status200 (r ^. responseStatus)
assertEqual "POST echoes input" (Just "wibble") (body ^? key "data")
assertEqual "POST is binary" (Just "application/octet-stream")
(body ^? key "headers" . key "Content-Type")
multipartPost Verb{..} site =
withSystemTempFile "foo.html" $ \name handle -> do
hPutStr handle "<!DOCTYPE html><html></html"
hClose handle
r <- post (site "/post") (partFile "html" name)
assertEqual "POST succeeds" status200 (r ^. responseStatus)
basicHead Verb{..} site = do
r <- head_ (site "/get")
assertEqual "HEAD succeeds" status200 (r ^. responseStatus)
basicPut Verb{..} site = do
r <- put (site "/put") ("wibble" :: ByteString)
assertEqual "PUT succeeds" status200 (r ^. responseStatus)
data SolrAdd = SolrAdd
{ doc :: String
, boost :: Float
, overwrite :: Bool
, commitWithin :: Integer
}
instance ToJSON SolrAdd where
toJSON (SolrAdd doc boost overwrite commitWithin) =
object
[
"add" .= object
[ "doc" .= toJSON doc
, "boost" .= boost
, "overwrite" .= overwrite
, "commitWithin" .= commitWithin
]
]
solrAdd :: SolrAdd
solrAdd = SolrAdd "wibble" 1.0 True 10000
jsonPut Verb{..} site = do
r <- put (site "/put") $ toJSON solrAdd
assertEqual "toJSON PUT request has correct Content-Type header"
(Just "application/json")
(r ^. responseBody ^? key "headers" . key "Content-Type")
byteStringPut Verb{..} site = do
let opts = defaults & header "Content-Type" .~ ["application/json"]
r <- putWith opts (site "/put") $ encode solrAdd
assertEqual "ByteString PUT request has correct Content-Type header"
(Just "application/json")
(r ^. responseBody ^? key "headers" . key "Content-Type")
basicDelete Verb{..} site = do
r <- delete (site "/delete") (Nothing :: Maybe ByteString)
assertEqual "DELETE succeeds" status200 (r ^. responseStatus)
throwsStatusCode Verb{..} site =
assertThrows "404 causes exception to be thrown" inspect $
head_ (site "/status/404")
where inspect e = case e of
StatusCodeException _ _ _ -> return ()
_ -> assertFailure "unexpected exception thrown"
getBasicAuth Verb{..} site = do
let opts = defaults & auth ?~ basicAuth "user" "passwd"
r <- getWith opts (site "/basic-auth/user/passwd")
assertEqual "basic auth GET succeeds" status200 (r ^. responseStatus)
let inspect e = case e of
StatusCodeException status _ _ ->
assertEqual "basic auth failed GET gives 401"
status401 status
assertThrows "basic auth GET fails if password is bad" inspect $
getWith opts (site "/basic-auth/user/asswd")
getOAuth2 Verb{..} kind ctor site = do
let opts = defaults & auth ?~ ctor "token1234"
r <- getWith opts (site $ "/oauth2/" <> kind <> "/token1234")
assertEqual ("oauth2 " <> kind <> " GET succeeds")
status200 (r ^. responseStatus)
let inspect e = case e of
StatusCodeException status _ _ ->
assertEqual ("oauth2 " <> kind <> " failed GET gives 401")
status401 status
assertThrows ("oauth2 " <> kind <> " GET fails if token is bad") inspect $
getWith opts (site $ "/oauth2/" <> kind <> "/token123")
getRedirect Verb{..} site = do
r <- get (site "/redirect/3")
let stripProto = T.dropWhile (/=':')
smap f (String s) = String (f s)
assertEqual "redirect goes to /get"
(Just . String . stripProto . T.pack . site $ "/get")
(smap stripProto <$> (r ^. responseBody ^? key "url"))
getParams Verb{..} site = do
let opts1 = defaults & param "foo" .~ ["bar"]
r1 <- getWith opts1 (site "/get")
assertEqual "params set correctly 1" (Just (object [("foo","bar")]))
(r1 ^. responseBody ^? key "args")
let opts2 = defaults & params .~ [("quux","baz")]
r2 <- getWith opts2 (site "/get")
assertEqual "params set correctly 2" (Just (object [("quux","baz")]))
(r2 ^. responseBody ^? key "args")
r3 <- getWith opts2 (site "/get?whee=wat")
assertEqual "correctly handle mix of params from URI and Options"
(Just (object [("quux","baz"),("whee","wat")]))
(r3 ^. responseBody ^? key "args")
getHeaders Verb{..} site = do
let opts = defaults & header "X-Wibble" .~ ["bar"]
r <- getWith opts (site "/get")
assertEqual "extra header set correctly"
(Just "bar")
(r ^. responseBody ^? key "headers" . key "X-Wibble")
getCheckStatus Verb {..} site = do
let opts = defaults & checkStatus .~ (Just customCs)
r <- getWith opts (site "/status/404")
assertThrows "Non 404 throws error" inspect $
getWith opts (site "/get")
assertEqual "Status 404"
404
(r ^. responseStatus . statusCode)
where
customCs (Status 404 _) _ _ = Nothing
customCs s h cj = Just . toException . StatusCodeException s h $ cj
inspect e = case e of
(StatusCodeException (Status sc _) _ _) ->
assertEqual "200 Status Error" sc 200
getGzip Verb{..} site = do
r <- get (site "/gzip")
assertEqual "gzip decoded for us" (Just (Bool True))
(r ^. responseBody ^? key "gzipped")
headRedirect Verb{..} site =
assertThrows "HEAD of redirect throws exception" inspect $
head_ (site "/redirect/3")
where inspect e = case e of
StatusCodeException status _ _ ->
let code = status ^. statusCode
in assertBool "code is redirect"
(code >= 300 && code < 400)
redirectOverflow Verb{..} site =
assertThrows "GET with too many redirects throws exception" inspect $
getWith (defaults & redirects .~ 3) (site "/redirect/5")
where inspect e = case e of TooManyRedirects _ -> return ()
invalidURL Verb{..} _site = do
let noProto (InvalidUrlException _ _) = return ()
assertThrows "exception if no protocol" noProto (get "wheeee")
let noHost (InvalidDestinationHost _) = return ()
assertThrows "exception if no host" noHost (get "http://")
funkyScheme Verb{..} site = do
-- schemes are case insensitive, per RFC 3986 section 3.1
let (scheme, rest) = break (==':') $ site "/get"
void . get $ map toUpper scheme <> rest
cookiesSet Verb{..} site = do
r <- get (site "/cookies/set?x=y")
assertEqual "cookies are set correctly" (Just "y")
(r ^? responseCookie "x" . cookieValue)
cookieSession site = Session.withSession $ \s -> do
r0 <- Session.get s (site "/cookies/set?foo=bar")
assertEqual "after set foo, foo set" (Just "bar")
(r0 ^? responseCookie "foo" . cookieValue)
assertEqual "a different accessor works" (Just "bar")
(r0 ^. responseBody ^? key "cookies" . key "foo")
r1 <- Session.get s (site "/cookies")
assertEqual "long after set foo, foo still set" (Just "bar")
(r1 ^? responseCookie "foo" . cookieValue)
r2 <- Session.get s (site "/cookies/set?baz=quux")
assertEqual "after set baz, foo still set" (Just "bar")
(r2 ^? responseCookie "foo" . cookieValue)
assertEqual "after set baz, baz set" (Just "quux")
(r2 ^? responseCookie "baz" . cookieValue)
r3 <- Session.get s (site "/cookies")
assertEqual "long after set baz, foo still set" (Just "bar")
(r3 ^? responseCookie "foo" . cookieValue)
assertEqual "long after set baz, baz still set" (Just "quux")
(r3 ^? responseCookie "baz" . cookieValue)
r4 <- Session.get s (site "/cookies/delete?foo")
assertEqual "after delete foo, foo deleted" Nothing
(r4 ^? responseCookie "foo" . cookieValue)
assertEqual "after delete foo, baz still set" (Just "quux")
(r4 ^? responseCookie "baz" . cookieValue)
r5 <- Session.get s (site "/cookies")
assertEqual "long after delete foo, foo still deleted" Nothing
(r5 ^? responseCookie "foo" . cookieValue)
assertEqual "long after delete foo, baz still set" (Just "quux")
(r5 ^? responseCookie "baz" . cookieValue)
getWithManager site = withManager $ \opts -> do
void $ Wreq.getWith opts (site "/get?a=b")
void $ Wreq.getWith opts (site "/get?b=c")
assertThrows :: (Show e, Exception e) => String -> (e -> IO ()) -> IO a -> IO ()
assertThrows desc inspect act = do
let myInspect e = inspect e `E.catch` \(ee :: E.PatternMatchFail) ->
assertFailure (desc <> ": unexpected exception (" <>
show e <> "): " <> show ee)
caught <- (act >> return False) `E.catch` \e -> myInspect e >> return True
unless caught (assertFailure desc)
commonTestsWith verb site = [
testGroup "basic" [
testCase "get" $ basicGet verb site
, testCase "post" $ basicPost verb site
, testCase "head" $ basicHead verb site
, testCase "put" $ basicPut verb site
, testCase "delete" $ basicDelete verb site
, testCase "404" $ throwsStatusCode verb site
, testCase "headRedirect" $ headRedirect verb site
, testCase "redirectOverflow" $ redirectOverflow verb site
, testCase "invalidURL" $ invalidURL verb site
, testCase "funkyScheme" $ funkyScheme verb site
]
, testGroup "fancy" [
testCase "basic auth" $ getBasicAuth verb site
, testCase "redirect" $ getRedirect verb site
, testCase "params" $ getParams verb site
, testCase "headers" $ getHeaders verb site
, testCase "gzip" $ getGzip verb site
, testCase "json put" $ jsonPut verb site
, testCase "bytestring put" $ byteStringPut verb site
, testCase "cookiesSet" $ cookiesSet verb site
, testCase "getWithManager" $ getWithManager site
, testCase "cookieSession" $ cookieSession site
, testCase "getCheckStatus" $ getCheckStatus verb site
]
]
-- Snap responds incorrectly to HEAD (by sending a response body),
-- thereby killing http-client's ability to continue a session.
-- https://github.com/snapframework/snap-core/issues/192
snapHeadSessionBug site = Session.withSession $ \s -> do
basicHead (session s) site
-- will crash with (InvalidStatusLine "0")
basicGet (session s) site
httpbinTestsWith verb site = commonTestsWith verb site <> [
]
-- Tests that our local httpbin clone doesn't yet support.
httpbinTests verb = [testGroup "httpbin" [
testGroup "http" $ httpbinTestsWith verb ("http://httpbin.org" <>)
, testGroup "https" $ httpbinTestsWith verb ("https://httpbin.org" <>)
]]
-- Tests that httpbin.org doesn't support.
localTests verb site = commonTestsWith verb site <> [
testCase "oauth2 Bearer" $ getOAuth2 verb "Bearer" oauth2Bearer site
, testCase "oauth2 token" $ getOAuth2 verb "token" oauth2Token site
]
startServer = do
started <- newEmptyMVar
let go n | n >= 100 = putMVar started Nothing
| otherwise = do
let port = 8000 + n
startedUp p = putMVar started (Just ("http://localhost:" <> p))
mkCfg = return . setBind ("localhost") . setPort port .
setVerbose False .
setStartupHook (const (startedUp (show port)))
serve mkCfg `E.catch` \(_::E.IOException) -> go (n+1)
tid <- forkIO $ go 0
(,) tid <$> takeMVar started
testWith :: [Test] -> IO ()
testWith tests = do
(tid, mserv) <- startServer
Session.withSession $ \s ->
flip E.finally (killThread tid) .
defaultMain $ tests <>
[ testGroup "plain" $ httpbinTests basic
, testGroup "session" $ httpbinTests (session s)] <>
case mserv of
Nothing -> []
Just binding -> [
testGroup "localhost" [
testGroup "plain" $ localTests basic (binding <>)
, testGroup "session" $ localTests (session s) (binding <>)
]
]
| Feeniks/wreq | tests/UnitTests.hs | bsd-3-clause | 15,911 | 0 | 23 | 3,770 | 5,085 | 2,545 | 2,540 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module VSim.Data.TInt where
import Data.Int
import Data.Bits
import Foreign.Storable
import Text.Printf
newtype TInt = TInt { tintToInt32 :: Int32 }
deriving (Eq, Ord, Num, Enum, Real, Integral, Storable, Bounded, Bits, PrintfArg)
instance Show TInt where
showsPrec d = showsPrec d . tintToInt32
instance Read TInt where
readsPrec d s = map (\(a, r) -> (TInt a, r)) $ readsPrec d s
sizeOfTInt :: Int
sizeOfTInt = fromIntegral $ sizeOf (undefined :: TInt)
| ierton/vsim | src/VSim/Data/TInt.hs | bsd-3-clause | 516 | 0 | 11 | 93 | 183 | 101 | 82 | 14 | 1 |
-- | doctest utilities
module Flat.Instances.Test (
tst,
tstBits,
asList,
flatBits,
allBits,
prettyShow,
module Data.Word,
) where
import Data.Word
import Flat.Bits (
asBytes,
bits,
paddedBits,
)
import Flat.Class (Flat (..))
import Flat.Run (
flat,
unflat,
)
import Flat.Types (NumBits)
import Text.PrettyPrint.HughesPJClass (prettyShow)
-- | Returns: result of flat/unflat test, encoding size in bits, byte encoding
tst :: (Eq a, Flat a) => a -> (Bool, NumBits, [Word8])
tst v = (unflat (flat v) == Right v, size v 0, showBytes v)
-- | Returns: result of flat/unflat test, encoding size in bits, bits encoding
tstBits :: (Eq a, Flat a) => a -> (Bool, NumBits, String)
tstBits v = (unflat (flat v) == Right v, Flat.Class.size v 0, flatBits v)
-- | Test that container is serialised as a List
asList :: (Eq a1, Eq a2, Flat a1, Flat a2) => (a2 -> a1) -> a2 -> Bool
asList f l = tst (f l) == tst l
flatBits :: Flat a => a -> String
flatBits = prettyShow . bits
allBits :: Flat a => a -> String
allBits = prettyShow . paddedBits
showBytes :: Flat a => a -> [Word8]
showBytes = asBytes . bits
| tittoassini/flat | src/Flat/Instances/Test.hs | bsd-3-clause | 1,146 | 0 | 9 | 254 | 414 | 232 | 182 | 31 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.BitVectors.Polynomials
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Implementation of polynomial arithmetic
-----------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Data.SBV.Tools.Polynomial (Polynomial(..), crc, crcBV, ites, mdp, addPoly) where
import Data.Bits (Bits(..))
import Data.List (genericTake)
import Data.Maybe (fromJust)
import Data.Word (Word8, Word16, Word32, Word64)
import Data.SBV.BitVectors.Data
import Data.SBV.BitVectors.Model
import Data.SBV.BitVectors.Splittable
import Data.SBV.Utils.Boolean
-- | Implements polynomial addition, multiplication, division, and modulus operations
-- over GF(2^n). NB. Similar to 'sQuotRem', division by @0@ is interpreted as follows:
--
-- @x `pDivMod` 0 = (0, x)@
--
-- for all @x@ (including @0@)
--
-- Minimal complete definition: 'pMult', 'pDivMod', 'showPolynomial'
class (Num a, Bits a) => Polynomial a where
-- | Given bit-positions to be set, create a polynomial
-- For instance
--
-- @polynomial [0, 1, 3] :: SWord8@
--
-- will evaluate to @11@, since it sets the bits @0@, @1@, and @3@. Mathematicans would write this polynomial
-- as @x^3 + x + 1@. And in fact, 'showPoly' will show it like that.
polynomial :: [Int] -> a
-- | Add two polynomials in GF(2^n).
pAdd :: a -> a -> a
-- | Multiply two polynomials in GF(2^n), and reduce it by the irreducible specified by
-- the polynomial as specified by coefficients of the third argument. Note that the third
-- argument is specifically left in this form as it is usally in GF(2^(n+1)), which is not available in our
-- formalism. (That is, we would need SWord9 for SWord8 multiplication, etc.) Also note that we do not
-- support symbolic irreducibles, which is a minor shortcoming. (Most GF's will come with fixed irreducibles,
-- so this should not be a problem in practice.)
--
-- Passing [] for the third argument will multiply the polynomials and then ignore the higher bits that won't
-- fit into the resulting size.
pMult :: (a, a, [Int]) -> a
-- | Divide two polynomials in GF(2^n), see above note for division by 0.
pDiv :: a -> a -> a
-- | Compute modulus of two polynomials in GF(2^n), see above note for modulus by 0.
pMod :: a -> a -> a
-- | Division and modulus packed together.
pDivMod :: a -> a -> (a, a)
-- | Display a polynomial like a mathematician would (over the monomial @x@), with a type.
showPoly :: a -> String
-- | Display a polynomial like a mathematician would (over the monomial @x@), the first argument
-- controls if the final type is shown as well.
showPolynomial :: Bool -> a -> String
-- defaults.. Minumum complete definition: pMult, pDivMod, showPolynomial
polynomial = foldr (flip setBit) 0
pAdd = xor
pDiv x y = fst (pDivMod x y)
pMod x y = snd (pDivMod x y)
showPoly = showPolynomial False
instance Polynomial Word8 where {showPolynomial = sp; pMult = lift polyMult; pDivMod = liftC polyDivMod}
instance Polynomial Word16 where {showPolynomial = sp; pMult = lift polyMult; pDivMod = liftC polyDivMod}
instance Polynomial Word32 where {showPolynomial = sp; pMult = lift polyMult; pDivMod = liftC polyDivMod}
instance Polynomial Word64 where {showPolynomial = sp; pMult = lift polyMult; pDivMod = liftC polyDivMod}
instance Polynomial SWord8 where {showPolynomial b = liftS (sp b); pMult = polyMult; pDivMod = polyDivMod}
instance Polynomial SWord16 where {showPolynomial b = liftS (sp b); pMult = polyMult; pDivMod = polyDivMod}
instance Polynomial SWord32 where {showPolynomial b = liftS (sp b); pMult = polyMult; pDivMod = polyDivMod}
instance Polynomial SWord64 where {showPolynomial b = liftS (sp b); pMult = polyMult; pDivMod = polyDivMod}
lift :: SymWord a => ((SBV a, SBV a, [Int]) -> SBV a) -> (a, a, [Int]) -> a
lift f (x, y, z) = fromJust $ unliteral $ f (literal x, literal y, z)
liftC :: SymWord a => (SBV a -> SBV a -> (SBV a, SBV a)) -> a -> a -> (a, a)
liftC f x y = let (a, b) = f (literal x) (literal y) in (fromJust (unliteral a), fromJust (unliteral b))
liftS :: SymWord a => (a -> String) -> SBV a -> String
liftS f s
| Just x <- unliteral s = f x
| True = show s
-- | Pretty print as a polynomial
sp :: Bits a => Bool -> a -> String
sp st a
| null cs = '0' : t
| True = foldr (\x y -> sh x ++ " + " ++ y) (sh (last cs)) (init cs) ++ t
where t | st = " :: GF(2^" ++ show n ++ ")"
| True = ""
#if __GLASGOW_HASKELL__ >= 708
n = maybe (error "SBV.Polynomial.sp: Unexpected non-finite usage!") id (bitSizeMaybe a)
#else
n = bitSize a
#endif
is = [n-1, n-2 .. 0]
cs = map fst $ filter snd $ zip is (map (testBit a) is)
sh 0 = "1"
sh 1 = "x"
sh i = "x^" ++ show i
-- | Add two polynomials
addPoly :: [SBool] -> [SBool] -> [SBool]
addPoly xs [] = xs
addPoly [] ys = ys
addPoly (x:xs) (y:ys) = x <+> y : addPoly xs ys
-- | Run down a boolean condition over two lists. Note that this is
-- different than zipWith as shorter list is assumed to be filled with
-- false at the end (i.e., zero-bits); which nicely pads it when
-- considered as an unsigned number in little-endian form.
ites :: SBool -> [SBool] -> [SBool] -> [SBool]
ites s xs ys
| Just t <- unliteral s
= if t then xs else ys
| True
= go xs ys
where go [] [] = []
go [] (b:bs) = ite s false b : go [] bs
go (a:as) [] = ite s a false : go as []
go (a:as) (b:bs) = ite s a b : go as bs
-- | Multiply two polynomials and reduce by the third (concrete) irreducible, given by its coefficients.
-- See the remarks for the 'pMult' function for this design choice
polyMult :: (Num a, Bits a, SymWord a, FromBits (SBV a)) => (SBV a, SBV a, [Int]) -> SBV a
polyMult (x, y, red)
| isReal x
= error $ "SBV.polyMult: Received a real value: " ++ show x
| not (isBounded x)
= error $ "SBV.polyMult: Received infinite precision value: " ++ show x
| True
= fromBitsLE $ genericTake sz $ r ++ repeat false
where (_, r) = mdp ms rs
ms = genericTake (2*sz) $ mul (blastLE x) (blastLE y) [] ++ repeat false
rs = genericTake (2*sz) $ [if i `elem` red then true else false | i <- [0 .. foldr max 0 red] ] ++ repeat false
sz = intSizeOf x
mul _ [] ps = ps
mul as (b:bs) ps = mul (false:as) bs (ites b (as `addPoly` ps) ps)
polyDivMod :: (Num a, Bits a, SymWord a, FromBits (SBV a)) => SBV a -> SBV a -> (SBV a, SBV a)
polyDivMod x y
| isReal x
= error $ "SBV.polyDivMod: Received a real value: " ++ show x
| not (isBounded x)
= error $ "SBV.polyDivMod: Received infinite precision value: " ++ show x
| True
= ite (y .== 0) (0, x) (adjust d, adjust r)
where adjust xs = fromBitsLE $ genericTake sz $ xs ++ repeat false
sz = intSizeOf x
(d, r) = mdp (blastLE x) (blastLE y)
-- conservative over-approximation of the degree
degree :: [SBool] -> Int
degree xs = walk (length xs - 1) $ reverse xs
where walk n [] = n
walk n (b:bs)
| Just t <- unliteral b
= if t then n else walk (n-1) bs
| True
= n -- over-estimate
-- | Compute modulus/remainder of polynomials on bit-vectors.
mdp :: [SBool] -> [SBool] -> ([SBool], [SBool])
mdp xs ys = go (length ys - 1) (reverse ys)
where degTop = degree xs
go _ [] = error "SBV.Polynomial.mdp: Impossible happened; exhausted ys before hitting 0"
go n (b:bs)
| n == 0 = (reverse qs, rs)
| True = let (rqs, rrs) = go (n-1) bs
in (ites b (reverse qs) rqs, ites b rs rrs)
where degQuot = degTop - n
ys' = replicate degQuot false ++ ys
(qs, rs) = divx (degQuot+1) degTop xs ys'
-- return the element at index i; if not enough elements, return false
-- N.B. equivalent to '(xs ++ repeat false) !! i', but more efficient
idx :: [SBool] -> Int -> SBool
idx [] _ = false
idx (x:_) 0 = x
idx (_:xs) i = idx xs (i-1)
divx :: Int -> Int -> [SBool] -> [SBool] -> ([SBool], [SBool])
divx n _ xs _ | n <= 0 = ([], xs)
divx n i xs ys' = (q:qs, rs)
where q = xs `idx` i
xs' = ites q (xs `addPoly` ys') xs
(qs, rs) = divx (n-1) (i-1) xs' (tail ys')
-- | Compute CRCs over bit-vectors. The call @crcBV n m p@ computes
-- the CRC of the message @m@ with respect to polynomial @p@. The
-- inputs are assumed to be blasted big-endian. The number
-- @n@ specifies how many bits of CRC is needed. Note that @n@
-- is actually the degree of the polynomial @p@, and thus it seems
-- redundant to pass it in. However, in a typical proof context,
-- the polynomial can be symbolic, so we cannot compute the degree
-- easily. While this can be worked-around by generating code that
-- accounts for all possible degrees, the resulting code would
-- be unnecessarily big and complicated, and much harder to reason
-- with. (Also note that a CRC is just the remainder from the
-- polynomial division, but this routine is much faster in practice.)
--
-- NB. The @n@th bit of the polynomial @p@ /must/ be set for the CRC
-- to be computed correctly. Note that the polynomial argument 'p' will
-- not even have this bit present most of the time, as it will typically
-- contain bits @0@ through @n-1@ as usual in the CRC literature. The higher
-- order @n@th bit is simply assumed to be set, as it does not make
-- sense to use a polynomial of a lesser degree. This is usually not a problem
-- since CRC polynomials are designed and expressed this way.
--
-- NB. The literature on CRC's has many variants on how CRC's are computed.
-- We follow the painless guide (<http://www.ross.net/crc/download/crc_v3.txt>)
-- and compute the CRC as follows:
--
-- * Extend the message 'm' by adding 'n' 0 bits on the right
--
-- * Divide the polynomial thus obtained by the 'p'
--
-- * The remainder is the CRC value.
--
-- There are many variants on final XOR's, reversed polynomials etc., so
-- it is essential to double check you use the correct /algorithm/.
crcBV :: Int -> [SBool] -> [SBool] -> [SBool]
crcBV n m p = take n $ go (replicate n false) (m ++ replicate n false)
where mask = drop (length p - n) p
go c [] = c
go c (b:bs) = go next bs
where c' = drop 1 c ++ [b]
next = ite (head c) (zipWith (<+>) c' mask) c'
-- | Compute CRC's over polynomials, i.e., symbolic words. The first
-- 'Int' argument plays the same role as the one in the 'crcBV' function.
crc :: (FromBits (SBV a), FromBits (SBV b), Num a, Num b, Bits a, Bits b, SymWord a, SymWord b) => Int -> SBV a -> SBV b -> SBV b
crc n m p
| isReal m || isReal p
= error $ "SBV.crc: Received a real value: " ++ show (m, p)
| not (isBounded m) || not (isBounded p)
= error $ "SBV.crc: Received an infinite precision value: " ++ show (m, p)
| True
= fromBitsBE $ replicate (sz - n) false ++ crcBV n (blastBE m) (blastBE p)
where sz = intSizeOf p
| Copilot-Language/sbv-for-copilot | Data/SBV/Tools/Polynomial.hs | bsd-3-clause | 11,405 | 0 | 14 | 2,772 | 3,159 | 1,685 | 1,474 | 140 | 5 |
{-# LANGUAGE RankNTypes #-}
{- |
Module : ./TopHybrid/Parse_AS.hs
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Description :
Parser for an hybridized arbitrary logic
-}
module TopHybrid.Parse_AS where
import Common.AnnoState
import Common.AS_Annotation
import Common.GlobalAnnotations (PrefixMap)
import Common.Token
import Data.Maybe
import qualified Data.Map as Map
import Text.ParserCombinators.Parsec
import Logic.Logic
import TopHybrid.AS_TopHybrid
import Control.Monad (liftM)
-- the top parser; parses an entire specification
thBasic :: (String -> AnyLogic) -> AParser st Spc_Wrap
thBasic getLogic =
do
asKey "baselogic"
logicName <- simpleId
thSpec $ getLogic $ show logicName
basicSpec :: (Syntax lid basic_spec s si sim) =>
lid -> Maybe (PrefixMap -> AParser st basic_spec)
basicSpec l = maybe (parse_basic_spec l) (Just . fst)
(parserAndPrinter Nothing l)
{- Parses the specification after knowing
the underlying logic -}
thSpec :: AnyLogic -> AParser st Spc_Wrap
thSpec (Logic l) =
do
asKey "Basic_Spec"
asKey "{"
s <- callParser l basicSpec Map.empty
asKey "}"
i <- many itemParser
fs <- sepBy (annoFormParser l s) anSemiOrComma
return $ Spc_Wrap l (Bspec i s) fs
{- Calls the underlying logic parser, only if exists. Otherwise
will throw out an error -}
callParser :: (Show a) => a -> (a -> Maybe x) -> x
callParser l f =
fromMaybe (error $ "Failed! No parser for logic " ++ show l) $ f l
-- Parses the declaration of nominals and modalities
itemParser :: AParser st TH_BASIC_ITEM
itemParser =
do
asKey "modalities"
ms <- ids
return $ Simple_mod_decl ms
<|>
do
asKey "nominals"
ns <- ids
return $ Simple_nom_decl ns
where ids = sepBy simpleId anSemiOrComma
-- Formula parser with annotations
annoFormParser :: (Logic l sub bs f s sm si mo sy rw pf) =>
l -> bs -> AParser st (Annoted Frm_Wrap)
annoFormParser l b = allAnnoParser $ formParser l b
-- Just parses the formula, and wraps it in Frm_Wrap
formParser :: (Logic l sub bs f s sm si mo sy rw pf) =>
l -> bs -> AParser st Frm_Wrap
formParser l bs = liftM (Frm_Wrap l) $ topParser l bs
-- Parser of hybridization of hybridization of sentences
formParser' :: Spc_Wrap -> AParser st Frm_Wrap
formParser' (Spc_Wrap l b _) = liftM (Frm_Wrap l) $ topParser l (und b)
{- Parser of sentences
The precendence order is left associative and when the priority
is defined is as follows : () > (not,@,[],<>) > /\ > \/ > (->,<->) -}
topParser :: (Logic l sub bs f s sm si mo sy rw pf) =>
l -> bs -> AParser st (TH_FORMULA f)
topParser l bs = chainl1 fp1 impAndBiP
where fp1 = chainl1 fp2 disjP
fp2 = chainl1 (fParser l bs) conjP
{- BinaryOps parsers, the reason to separate them, is that so we can get a
precedence order -}
conjP :: AParser st (TH_FORMULA f -> TH_FORMULA f -> TH_FORMULA f)
conjP = asKey "/\\" >> return Conjunction
disjP :: AParser st (TH_FORMULA f -> TH_FORMULA f -> TH_FORMULA f)
disjP = asKey "\\/" >> return Disjunction
impAndBiP :: AParser st (TH_FORMULA f -> TH_FORMULA f -> TH_FORMULA f)
impAndBiP = (asKey "=>" >> return Implication) <|>
(asKey "<=>" >> return BiImplication)
-- ------------
-- Parser of sentences without the binary operators
fParser :: (Logic l sub bs f s sm si mo sy rw pf) =>
l -> bs -> AParser st (TH_FORMULA f)
fParser l bs =
do
asKey "("
f <- topParser l bs
asKey ")"
return $ Par f
<|>
do
asKey "not"
f <- fParser l bs <|> topParser l bs
return $ Neg f
<|>
do
asKey "@"
n <- simpleId
f <- fParser l bs <|> topParser l bs
return $ At n f
<|>
do
asKey "!"
n <- simpleId
f <- fParser l bs
return $ Uni n f
<|>
do
asKey "?"
n <- simpleId
f <- fParser l bs
return $ Exist n f
<|>
do
asKey "["
m <- simpleId
asKey "]"
f <- fParser l bs <|> topParser l bs
return $ Box m f
<|>
try (do
asKey "<"
m <- simpleId
asKey ">\""
f <- fParser l bs <|> topParser l bs
return $ Par $ Conjunction (Dia m f) (Box m f))
<|>
do
asKey "<"
m <- simpleId
asKey ">"
f <- fParser l bs <|> topParser l bs
return $ Dia m f
<|>
do
asKey "true"
return TrueA
<|>
do
asKey "false"
return FalseA
<|>
do
n <- simpleId
return $ Here n
<|>
do
asKey "{"
f <- callParser l parse_basic_sen bs
asKey "}"
return $ UnderLogic f
| spechub/Hets | TopHybrid/Parse_AS.hs | gpl-2.0 | 5,033 | 0 | 20 | 1,631 | 1,486 | 697 | 789 | 137 | 1 |
-- The intention is that this will be the new unit test framework.
-- Please add any working tests here. This file should do nothing
-- but import tests from other modules.
--
-- Stephen Blackheath, 2009
module Main where
import PackageTests.BenchmarkExeV10.Check
import PackageTests.BenchmarkOptions.Check
import PackageTests.BenchmarkStanza.Check
-- import PackageTests.BuildDeps.GlobalBuildDepsNotAdditive1.Check
-- import PackageTests.BuildDeps.GlobalBuildDepsNotAdditive2.Check
import PackageTests.BuildDeps.InternalLibrary0.Check
import PackageTests.BuildDeps.InternalLibrary1.Check
import PackageTests.BuildDeps.InternalLibrary2.Check
import PackageTests.BuildDeps.InternalLibrary3.Check
import PackageTests.BuildDeps.InternalLibrary4.Check
import PackageTests.BuildDeps.SameDepsAllRound.Check
import PackageTests.BuildDeps.TargetSpecificDeps1.Check
import PackageTests.BuildDeps.TargetSpecificDeps2.Check
import PackageTests.BuildDeps.TargetSpecificDeps3.Check
import PackageTests.BuildTestSuiteDetailedV09.Check
import PackageTests.PackageTester (PackageSpec(..), compileSetup)
import PackageTests.PathsModule.Executable.Check
import PackageTests.PathsModule.Library.Check
import PackageTests.PreProcess.Check
import PackageTests.TemplateHaskell.Check
import PackageTests.CMain.Check
import PackageTests.EmptyLib.Check
import PackageTests.TestOptions.Check
import PackageTests.TestStanza.Check
import PackageTests.TestSuiteExeV10.Check
import PackageTests.OrderFlags.Check
import Distribution.Compat.Exception (catchIO)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..))
import Distribution.Simple.Program.Types (programPath)
import Distribution.Simple.Program.Builtin (ghcProgram, ghcPkgProgram)
import Distribution.Simple.Program.Db (requireProgram)
import Distribution.Simple.Utils (cabalVersion, die, withFileContents)
import Distribution.Text (display)
import Distribution.Verbosity (normal)
import Distribution.Version (Version(Version))
import Data.Maybe (isJust)
import System.Directory (doesFileExist, getCurrentDirectory,
setCurrentDirectory)
import System.Environment (getEnv)
import System.FilePath ((</>))
import Test.Framework (Test, TestName, defaultMain, testGroup)
import Test.Framework.Providers.HUnit (hUnitTestToTests)
import qualified Test.HUnit as HUnit
hunit :: TestName -> HUnit.Test -> Test
hunit name test = testGroup name $ hUnitTestToTests test
tests :: Version -> PackageSpec -> FilePath -> FilePath -> Bool -> [Test]
tests version inplaceSpec ghcPath ghcPkgPath runningOnTravis =
[ hunit "BuildDeps/SameDepsAllRound"
(PackageTests.BuildDeps.SameDepsAllRound.Check.suite ghcPath)
-- The two following tests were disabled by Johan Tibell as
-- they have been failing for a long time:
-- , hunit "BuildDeps/GlobalBuildDepsNotAdditive1/"
-- (PackageTests.BuildDeps.GlobalBuildDepsNotAdditive1.Check.suite ghcPath)
-- , hunit "BuildDeps/GlobalBuildDepsNotAdditive2/"
-- (PackageTests.BuildDeps.GlobalBuildDepsNotAdditive2.Check.suite ghcPath)
, hunit "BuildDeps/InternalLibrary0"
(PackageTests.BuildDeps.InternalLibrary0.Check.suite version ghcPath)
, hunit "PreProcess" (PackageTests.PreProcess.Check.suite ghcPath)
, hunit "TestStanza" (PackageTests.TestStanza.Check.suite ghcPath)
-- ^ The Test stanza test will eventually be required
-- only for higher versions.
, hunit "TestSuiteExeV10/Test" (PackageTests.TestSuiteExeV10.Check.checkTest ghcPath)
, hunit "TestSuiteExeV10/TestWithHpc"
(PackageTests.TestSuiteExeV10.Check.checkTestWithHpc ghcPath)
, hunit "TestOptions" (PackageTests.TestOptions.Check.suite ghcPath)
, hunit "BenchmarkStanza" (PackageTests.BenchmarkStanza.Check.suite ghcPath)
-- ^ The benchmark stanza test will eventually be required
-- only for higher versions.
, hunit "BenchmarkExeV10/Test"
(PackageTests.BenchmarkExeV10.Check.checkBenchmark ghcPath)
, hunit "BenchmarkOptions" (PackageTests.BenchmarkOptions.Check.suite ghcPath)
, hunit "TemplateHaskell/vanilla"
(PackageTests.TemplateHaskell.Check.vanilla ghcPath)
, hunit "PathsModule/Executable"
(PackageTests.PathsModule.Executable.Check.suite ghcPath)
, hunit "PathsModule/Library" (PackageTests.PathsModule.Library.Check.suite ghcPath)
, hunit "EmptyLib/emptyLib"
(PackageTests.EmptyLib.Check.emptyLib ghcPath)
, hunit "BuildTestSuiteDetailedV09"
(PackageTests.BuildTestSuiteDetailedV09.Check.suite inplaceSpec ghcPath)
, hunit "OrderFlags"
(PackageTests.OrderFlags.Check.suite ghcPath)
] ++
-- These tests are expected to fail on Travis because hvr's PPA GHCs don't
-- include profiling and dynamic libs.
(if not runningOnTravis
then [ hunit "TemplateHaskell/profiling"
(PackageTests.TemplateHaskell.Check.profiling ghcPath)
, hunit "TemplateHaskell/dynamic"
(PackageTests.TemplateHaskell.Check.dynamic ghcPath)
]
else []) ++
-- These tests are only required to pass on cabal version >= 1.7
(if version >= Version [1, 7] []
then [ hunit "BuildDeps/TargetSpecificDeps1"
(PackageTests.BuildDeps.TargetSpecificDeps1.Check.suite ghcPath)
, hunit "BuildDeps/TargetSpecificDeps2"
(PackageTests.BuildDeps.TargetSpecificDeps2.Check.suite ghcPath)
, hunit "BuildDeps/TargetSpecificDeps3"
(PackageTests.BuildDeps.TargetSpecificDeps3.Check.suite ghcPath)
, hunit "BuildDeps/InternalLibrary1"
(PackageTests.BuildDeps.InternalLibrary1.Check.suite ghcPath)
, hunit "BuildDeps/InternalLibrary2"
(PackageTests.BuildDeps.InternalLibrary2.Check.suite ghcPath ghcPkgPath)
, hunit "BuildDeps/InternalLibrary3"
(PackageTests.BuildDeps.InternalLibrary3.Check.suite ghcPath ghcPkgPath)
, hunit "BuildDeps/InternalLibrary4"
(PackageTests.BuildDeps.InternalLibrary4.Check.suite ghcPath ghcPkgPath)
, hunit "PackageTests/CMain"
(PackageTests.CMain.Check.checkBuild ghcPath)
]
else [])
main :: IO ()
main = do
wd <- getCurrentDirectory
let dbFile = wd </> "dist/package.conf.inplace"
inplaceSpec = PackageSpec
{ directory = []
, configOpts = [ "--package-db=" ++ dbFile
, "--constraint=Cabal == " ++ display cabalVersion
]
}
putStrLn $ "Cabal test suite - testing cabal version " ++
display cabalVersion
lbi <- getPersistBuildConfig_ ("dist" </> "setup-config")
(ghc, _) <- requireProgram normal ghcProgram (withPrograms lbi)
(ghcPkg, _) <- requireProgram normal ghcPkgProgram (withPrograms lbi)
let ghcPath = programPath ghc
ghcPkgPath = programPath ghcPkg
putStrLn $ "Using ghc: " ++ ghcPath
putStrLn $ "Using ghc-pkg: " ++ ghcPkgPath
setCurrentDirectory "tests"
-- Are we running on Travis-CI?
runningOnTravis <- checkRunningOnTravis
-- Create a shared Setup executable to speed up Simple tests
compileSetup "." ghcPath
defaultMain (tests cabalVersion inplaceSpec
ghcPath ghcPkgPath runningOnTravis)
-- | Is the test suite running on the Travis-CI build bot?
checkRunningOnTravis :: IO Bool
checkRunningOnTravis = fmap isJust (lookupEnv "CABAL_TEST_RUNNING_ON_TRAVIS")
where
lookupEnv :: String -> IO (Maybe String)
lookupEnv name = (Just `fmap` getEnv name) `catchIO` const (return Nothing)
-- Like Distribution.Simple.Configure.getPersistBuildConfig but
-- doesn't check that the Cabal version matches, which it doesn't when
-- we run Cabal's own test suite, due to bootstrapping issues.
getPersistBuildConfig_ :: FilePath -> IO LocalBuildInfo
getPersistBuildConfig_ filename = do
exists <- doesFileExist filename
if not exists
then die missing
else withFileContents filename $ \str ->
case lines str of
[_header, rest] -> case reads rest of
[(bi,_)] -> return bi
_ -> die cantParse
_ -> die cantParse
where
missing = "Run the 'configure' command first."
cantParse = "Saved package config file seems to be corrupt. "
++ "Try re-running the 'configure' command."
| jwiegley/ghc-release | libraries/Cabal/cabal/tests/PackageTests.hs | gpl-3.0 | 8,351 | 0 | 17 | 1,494 | 1,476 | 844 | 632 | 136 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.ListSigningCertificates
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns information about the signing certificates associated with the
-- specified user. If there are none, the action returns an empty list.
--
-- Although each user is limited to a small number of signing certificates,
-- you can still paginate the results using the 'MaxItems' and 'Marker' parameters.
--
-- If the 'UserName' field is not specified, the user name is determined
-- implicitly based on the AWS access key ID used to sign the request. Because
-- this action works for access keys under the AWS account, you can use this
-- action to manage root credentials even if the AWS account has no associated
-- users.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_ListSigningCertificates.html>
module Network.AWS.IAM.ListSigningCertificates
(
-- * Request
ListSigningCertificates
-- ** Request constructor
, listSigningCertificates
-- ** Request lenses
, lsc1Marker
, lsc1MaxItems
, lsc1UserName
-- * Response
, ListSigningCertificatesResponse
-- ** Response constructor
, listSigningCertificatesResponse
-- ** Response lenses
, lscr1Certificates
, lscr1IsTruncated
, lscr1Marker
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
data ListSigningCertificates = ListSigningCertificates
{ _lsc1Marker :: Maybe Text
, _lsc1MaxItems :: Maybe Nat
, _lsc1UserName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ListSigningCertificates' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lsc1Marker' @::@ 'Maybe' 'Text'
--
-- * 'lsc1MaxItems' @::@ 'Maybe' 'Natural'
--
-- * 'lsc1UserName' @::@ 'Maybe' 'Text'
--
listSigningCertificates :: ListSigningCertificates
listSigningCertificates = ListSigningCertificates
{ _lsc1UserName = Nothing
, _lsc1Marker = Nothing
, _lsc1MaxItems = Nothing
}
-- | Use this only when paginating results, and only in a subsequent request
-- after you've received a response where the results are truncated. Set it to
-- the value of the 'Marker' element in the response you just received.
lsc1Marker :: Lens' ListSigningCertificates (Maybe Text)
lsc1Marker = lens _lsc1Marker (\s a -> s { _lsc1Marker = a })
-- | Use this only when paginating results to indicate the maximum number of
-- certificate IDs you want in the response. If there are additional certificate
-- IDs beyond the maximum you specify, the 'IsTruncated' response element is 'true'.
-- This parameter is optional. If you do not include it, it defaults to 100.
lsc1MaxItems :: Lens' ListSigningCertificates (Maybe Natural)
lsc1MaxItems = lens _lsc1MaxItems (\s a -> s { _lsc1MaxItems = a }) . mapping _Nat
-- | The name of the user.
lsc1UserName :: Lens' ListSigningCertificates (Maybe Text)
lsc1UserName = lens _lsc1UserName (\s a -> s { _lsc1UserName = a })
data ListSigningCertificatesResponse = ListSigningCertificatesResponse
{ _lscr1Certificates :: List "member" SigningCertificate
, _lscr1IsTruncated :: Maybe Bool
, _lscr1Marker :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'ListSigningCertificatesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lscr1Certificates' @::@ ['SigningCertificate']
--
-- * 'lscr1IsTruncated' @::@ 'Maybe' 'Bool'
--
-- * 'lscr1Marker' @::@ 'Maybe' 'Text'
--
listSigningCertificatesResponse :: ListSigningCertificatesResponse
listSigningCertificatesResponse = ListSigningCertificatesResponse
{ _lscr1Certificates = mempty
, _lscr1IsTruncated = Nothing
, _lscr1Marker = Nothing
}
-- | A list of the user's signing certificate information.
lscr1Certificates :: Lens' ListSigningCertificatesResponse [SigningCertificate]
lscr1Certificates =
lens _lscr1Certificates (\s a -> s { _lscr1Certificates = a })
. _List
-- | A flag that indicates whether there are more certificate IDs to list. If
-- your results were truncated, you can make a subsequent pagination request
-- using the 'Marker' request parameter to retrieve more certificates in the list.
lscr1IsTruncated :: Lens' ListSigningCertificatesResponse (Maybe Bool)
lscr1IsTruncated = lens _lscr1IsTruncated (\s a -> s { _lscr1IsTruncated = a })
-- | If 'IsTruncated' is 'true', this element is present and contains the value to
-- use for the 'Marker' parameter in a subsequent pagination request.
lscr1Marker :: Lens' ListSigningCertificatesResponse (Maybe Text)
lscr1Marker = lens _lscr1Marker (\s a -> s { _lscr1Marker = a })
instance ToPath ListSigningCertificates where
toPath = const "/"
instance ToQuery ListSigningCertificates where
toQuery ListSigningCertificates{..} = mconcat
[ "Marker" =? _lsc1Marker
, "MaxItems" =? _lsc1MaxItems
, "UserName" =? _lsc1UserName
]
instance ToHeaders ListSigningCertificates
instance AWSRequest ListSigningCertificates where
type Sv ListSigningCertificates = IAM
type Rs ListSigningCertificates = ListSigningCertificatesResponse
request = post "ListSigningCertificates"
response = xmlResponse
instance FromXML ListSigningCertificatesResponse where
parseXML = withElement "ListSigningCertificatesResult" $ \x -> ListSigningCertificatesResponse
<$> x .@? "Certificates" .!@ mempty
<*> x .@? "IsTruncated"
<*> x .@? "Marker"
instance AWSPager ListSigningCertificates where
page rq rs
| stop (rs ^. lscr1IsTruncated) = Nothing
| otherwise = Just $ rq
& lsc1Marker .~ rs ^. lscr1Marker
| kim/amazonka | amazonka-iam/gen/Network/AWS/IAM/ListSigningCertificates.hs | mpl-2.0 | 6,637 | 0 | 14 | 1,320 | 800 | 475 | 325 | 83 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.ModifyImageAttribute
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Modifies the specified attribute of the specified AMI. You can specify
-- only one attribute at a time.
--
-- AWS Marketplace product codes cannot be modified. Images with an AWS
-- Marketplace product code cannot be made public.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-ModifyImageAttribute.html AWS API Reference> for ModifyImageAttribute.
module Network.AWS.EC2.ModifyImageAttribute
(
-- * Creating a Request
modifyImageAttribute
, ModifyImageAttribute
-- * Request Lenses
, miaAttribute
, miaUserIds
, miaUserGroups
, miaValue
, miaLaunchPermission
, miaOperationType
, miaProductCodes
, miaDescription
, miaDryRun
, miaImageId
-- * Destructuring the Response
, modifyImageAttributeResponse
, ModifyImageAttributeResponse
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'modifyImageAttribute' smart constructor.
data ModifyImageAttribute = ModifyImageAttribute'
{ _miaAttribute :: !(Maybe Text)
, _miaUserIds :: !(Maybe [Text])
, _miaUserGroups :: !(Maybe [Text])
, _miaValue :: !(Maybe Text)
, _miaLaunchPermission :: !(Maybe LaunchPermissionModifications)
, _miaOperationType :: !(Maybe OperationType)
, _miaProductCodes :: !(Maybe [Text])
, _miaDescription :: !(Maybe AttributeValue)
, _miaDryRun :: !(Maybe Bool)
, _miaImageId :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ModifyImageAttribute' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'miaAttribute'
--
-- * 'miaUserIds'
--
-- * 'miaUserGroups'
--
-- * 'miaValue'
--
-- * 'miaLaunchPermission'
--
-- * 'miaOperationType'
--
-- * 'miaProductCodes'
--
-- * 'miaDescription'
--
-- * 'miaDryRun'
--
-- * 'miaImageId'
modifyImageAttribute
:: Text -- ^ 'miaImageId'
-> ModifyImageAttribute
modifyImageAttribute pImageId_ =
ModifyImageAttribute'
{ _miaAttribute = Nothing
, _miaUserIds = Nothing
, _miaUserGroups = Nothing
, _miaValue = Nothing
, _miaLaunchPermission = Nothing
, _miaOperationType = Nothing
, _miaProductCodes = Nothing
, _miaDescription = Nothing
, _miaDryRun = Nothing
, _miaImageId = pImageId_
}
-- | The name of the attribute to modify.
miaAttribute :: Lens' ModifyImageAttribute (Maybe Text)
miaAttribute = lens _miaAttribute (\ s a -> s{_miaAttribute = a});
-- | One or more AWS account IDs. This is only valid when modifying the
-- 'launchPermission' attribute.
miaUserIds :: Lens' ModifyImageAttribute [Text]
miaUserIds = lens _miaUserIds (\ s a -> s{_miaUserIds = a}) . _Default . _Coerce;
-- | One or more user groups. This is only valid when modifying the
-- 'launchPermission' attribute.
miaUserGroups :: Lens' ModifyImageAttribute [Text]
miaUserGroups = lens _miaUserGroups (\ s a -> s{_miaUserGroups = a}) . _Default . _Coerce;
-- | The value of the attribute being modified. This is only valid when
-- modifying the 'description' attribute.
miaValue :: Lens' ModifyImageAttribute (Maybe Text)
miaValue = lens _miaValue (\ s a -> s{_miaValue = a});
-- | A launch permission modification.
miaLaunchPermission :: Lens' ModifyImageAttribute (Maybe LaunchPermissionModifications)
miaLaunchPermission = lens _miaLaunchPermission (\ s a -> s{_miaLaunchPermission = a});
-- | The operation type.
miaOperationType :: Lens' ModifyImageAttribute (Maybe OperationType)
miaOperationType = lens _miaOperationType (\ s a -> s{_miaOperationType = a});
-- | One or more product codes. After you add a product code to an AMI, it
-- can\'t be removed. This is only valid when modifying the 'productCodes'
-- attribute.
miaProductCodes :: Lens' ModifyImageAttribute [Text]
miaProductCodes = lens _miaProductCodes (\ s a -> s{_miaProductCodes = a}) . _Default . _Coerce;
-- | A description for the AMI.
miaDescription :: Lens' ModifyImageAttribute (Maybe AttributeValue)
miaDescription = lens _miaDescription (\ s a -> s{_miaDescription = a});
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
miaDryRun :: Lens' ModifyImageAttribute (Maybe Bool)
miaDryRun = lens _miaDryRun (\ s a -> s{_miaDryRun = a});
-- | The ID of the AMI.
miaImageId :: Lens' ModifyImageAttribute Text
miaImageId = lens _miaImageId (\ s a -> s{_miaImageId = a});
instance AWSRequest ModifyImageAttribute where
type Rs ModifyImageAttribute =
ModifyImageAttributeResponse
request = postQuery eC2
response = receiveNull ModifyImageAttributeResponse'
instance ToHeaders ModifyImageAttribute where
toHeaders = const mempty
instance ToPath ModifyImageAttribute where
toPath = const "/"
instance ToQuery ModifyImageAttribute where
toQuery ModifyImageAttribute'{..}
= mconcat
["Action" =: ("ModifyImageAttribute" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
"Attribute" =: _miaAttribute,
toQuery (toQueryList "UserId" <$> _miaUserIds),
toQuery (toQueryList "UserGroup" <$> _miaUserGroups),
"Value" =: _miaValue,
"LaunchPermission" =: _miaLaunchPermission,
"OperationType" =: _miaOperationType,
toQuery
(toQueryList "ProductCode" <$> _miaProductCodes),
"Description" =: _miaDescription,
"DryRun" =: _miaDryRun, "ImageId" =: _miaImageId]
-- | /See:/ 'modifyImageAttributeResponse' smart constructor.
data ModifyImageAttributeResponse =
ModifyImageAttributeResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ModifyImageAttributeResponse' with the minimum fields required to make a request.
--
modifyImageAttributeResponse
:: ModifyImageAttributeResponse
modifyImageAttributeResponse = ModifyImageAttributeResponse'
| fmapfmapfmap/amazonka | amazonka-ec2/gen/Network/AWS/EC2/ModifyImageAttribute.hs | mpl-2.0 | 7,051 | 0 | 12 | 1,439 | 1,122 | 666 | 456 | 126 | 1 |
-- |
-- Module : Foundation.Parser
-- License : BSD-style
-- Maintainer : Haskell Foundation
-- Stability : experimental
-- Portability : portable
--
-- The current implementation is mainly, if not copy/pasted, inspired from
-- `memory`'s Parser.
--
-- Foundation Parser makes use of the Foundation's @Collection@ and
-- @Sequential@ classes to allow you to define generic parsers over any
-- @Sequential@ of inpu.
--
-- This way you can easily implements parsers over @LString@, @String@.
--
--
-- > flip parseOnly "[email protected]" $ do
-- > EmailAddress
-- > <$> (takeWhile ((/=) '@' <* element '@')
-- > <*> takeAll
--
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
module Foundation.Parser
( Parser
, parse
, parseFeed
, parseOnly
, -- * Result
Result(..)
, ParseError(..)
, reportError
, -- * Parser source
ParserSource(..)
, -- * combinator
peek
, element
, anyElement
, elements
, string
, satisfy
, satisfy_
, take
, takeWhile
, takeAll
, skip
, skipWhile
, skipAll
, (<|>)
, many
, some
, optional
, repeat, Condition(..), And(..)
) where
import Control.Applicative (Alternative, empty, (<|>), many, some, optional)
import Control.Monad (MonadPlus, mzero, mplus)
import Basement.Compat.Base
import Basement.Types.OffsetSize
import Foundation.Numerical
import Foundation.Collection hiding (take, takeWhile)
import qualified Foundation.Collection as C
import Foundation.String
-- Error handling -------------------------------------------------------------
-- | common parser error definition
data ParseError input
= NotEnough (CountOf (Element input))
-- ^ meaning the parser was short of @CountOf@ @Element@ of `input`.
| NotEnoughParseOnly
-- ^ The parser needed more data, only when using @parseOnly@
| ExpectedElement (Element input) (Element input)
-- ^ when using @element@
| Expected (Chunk input) (Chunk input)
-- ^ when using @elements@ or @string@
| Satisfy (Maybe String)
-- ^ the @satisfy@ or @satisfy_@ function failed,
deriving (Typeable)
instance (Typeable input, Show input) => Exception (ParseError input)
instance Show input => Show (ParseError input) where
show (NotEnough (CountOf sz)) = "NotEnough: missing " <> show sz <> " element(s)"
show NotEnoughParseOnly = "NotEnough, parse only"
show (ExpectedElement _ _) = "Expected _ but received _"
show (Expected _ _) = "Expected _ but received _"
show (Satisfy Nothing) = "Satisfy"
show (Satisfy (Just s)) = "Satisfy: " <> toList s
instance {-# OVERLAPPING #-} Show (ParseError String) where
show (NotEnough (CountOf sz)) = "NotEnough: missing " <> show sz <> " element(s)"
show NotEnoughParseOnly = "NotEnough, parse only"
show (ExpectedElement a b) = "Expected "<>show a<>" but received " <> show b
show (Expected a b) = "Expected "<>show a<>" but received " <> show b
show (Satisfy Nothing) = "Satisfy"
show (Satisfy (Just s)) = "Satisfy: " <> toList s
-- Results --------------------------------------------------------------------
-- | result of executing the `parser` over the given `input`
data Result input result
= ParseFailed (ParseError input)
-- ^ the parser failed with the given @ParserError@
| ParseOk (Chunk input) result
-- ^ the parser complete successfuly with the remaining @Chunk@
| ParseMore (Chunk input -> Result input result)
-- ^ the parser needs more input, pass an empty @Chunk@ or @mempty@
-- to tell the parser you don't have anymore inputs.
instance (Show k, Show input) => Show (Result input k) where
show (ParseFailed err) = "Parser failed: " <> show err
show (ParseOk _ k) = "Parser succeed: " <> show k
show (ParseMore _) = "Parser incomplete: need more"
instance Functor (Result input) where
fmap f r = case r of
ParseFailed err -> ParseFailed err
ParseOk rest a -> ParseOk rest (f a)
ParseMore more -> ParseMore (fmap f . more)
-- Parser Source --------------------------------------------------------------
class (Sequential input, IndexedCollection input) => ParserSource input where
type Chunk input
nullChunk :: input -> Chunk input -> Bool
appendChunk :: input -> Chunk input -> input
subChunk :: input -> Offset (Element input) -> CountOf (Element input) -> Chunk input
spanChunk :: input -> Offset (Element input) -> (Element input -> Bool) -> (Chunk input, Offset (Element input))
endOfParserSource :: ParserSource input => input -> Offset (Element input) -> Bool
endOfParserSource l off = off .==# length l
{-# INLINE endOfParserSource #-}
-- Parser ---------------------------------------------------------------------
data NoMore = More | NoMore
deriving (Show, Eq)
type Failure input result = input -> Offset (Element input) -> NoMore -> ParseError input -> Result input result
type Success input result' result = input -> Offset (Element input) -> NoMore -> result' -> Result input result
-- | Foundation's @Parser@ monad.
--
-- Its implementation is based on the parser in `memory`.
newtype Parser input result = Parser
{ runParser :: forall result'
. input -> Offset (Element input) -> NoMore
-> Failure input result'
-> Success input result result'
-> Result input result'
}
instance Functor (Parser input) where
fmap f fa = Parser $ \buf off nm err ok ->
runParser fa buf off nm err $ \buf' off' nm' a -> ok buf' off' nm' (f a)
{-# INLINE fmap #-}
instance ParserSource input => Applicative (Parser input) where
pure a = Parser $ \buf off nm _ ok -> ok buf off nm a
{-# INLINE pure #-}
fab <*> fa = Parser $ \buf0 off0 nm0 err ok ->
runParser fab buf0 off0 nm0 err $ \buf1 off1 nm1 ab ->
runParser_ fa buf1 off1 nm1 err $ \buf2 off2 nm2 -> ok buf2 off2 nm2 . ab
{-# INLINE (<*>) #-}
instance ParserSource input => Monad (Parser input) where
return = pure
{-# INLINE return #-}
m >>= k = Parser $ \buf off nm err ok ->
runParser m buf off nm err $ \buf' off' nm' a ->
runParser_ (k a) buf' off' nm' err ok
{-# INLINE (>>=) #-}
instance ParserSource input => MonadPlus (Parser input) where
mzero = error "Foundation.Parser.Internal.MonadPlus.mzero"
mplus f g = Parser $ \buf off nm err ok ->
runParser f buf off nm (\buf' _ nm' _ -> runParser g buf' off nm' err ok) ok
{-# INLINE mplus #-}
instance ParserSource input => Alternative (Parser input) where
empty = error "Foundation.Parser.Internal.Alternative.empty"
(<|>) = mplus
{-# INLINE (<|>) #-}
runParser_ :: ParserSource input
=> Parser input result
-> input
-> Offset (Element input)
-> NoMore
-> Failure input result'
-> Success input result result'
-> Result input result'
runParser_ parser buf off NoMore err ok = runParser parser buf off NoMore err ok
runParser_ parser buf off nm err ok
| endOfParserSource buf off = ParseMore $ \chunk ->
if nullChunk buf chunk
then runParser parser buf off NoMore err ok
else runParser parser (appendChunk buf chunk) off nm err ok
| otherwise = runParser parser buf off nm err ok
{-# INLINE runParser_ #-}
-- | Run a parser on an @initial input.
--
-- If the Parser need more data than available, the @feeder function
-- is automatically called and fed to the More continuation.
parseFeed :: (ParserSource input, Monad m)
=> m (Chunk input)
-> Parser input a
-> input
-> m (Result input a)
parseFeed feeder p initial = loop $ parse p initial
where loop (ParseMore k) = feeder >>= (loop . k)
loop r = return r
-- | Run a Parser on a ByteString and return a 'Result'
parse :: ParserSource input
=> Parser input a -> input -> Result input a
parse p s = runParser p s 0 More failure success
failure :: input -> Offset (Element input) -> NoMore -> ParseError input -> Result input r
failure _ _ _ = ParseFailed
{-# INLINE failure #-}
success :: ParserSource input => input -> Offset (Element input) -> NoMore -> r -> Result input r
success buf off _ = ParseOk rest
where
!rest = subChunk buf off (length buf `sizeSub` offsetAsSize off)
{-# INLINE success #-}
-- | parse only the given input
--
-- The left-over `Element input` will be ignored, if the parser call for more
-- data it will be continuously fed with `Nothing` (up to 256 iterations).
--
parseOnly :: (ParserSource input, Monoid (Chunk input))
=> Parser input a
-> input
-> Either (ParseError input) a
parseOnly p i = case runParser p i 0 NoMore failure success of
ParseFailed err -> Left err
ParseOk _ r -> Right r
ParseMore _ -> Left NotEnoughParseOnly
-- ------------------------------------------------------------------------- --
-- String Parser --
-- ------------------------------------------------------------------------- --
instance ParserSource String where
type Chunk String = String
nullChunk _ = null
{-# INLINE nullChunk #-}
appendChunk = mappend
{-# INLINE appendChunk #-}
subChunk c off sz = C.take sz $ C.drop (offsetAsSize off) c
{-# INLINE subChunk #-}
spanChunk buf off predicate =
let c = C.drop (offsetAsSize off) buf
(t, _) = C.span predicate c
in (t, off `offsetPlusE` length t)
{-# INLINE spanChunk #-}
instance ParserSource [a] where
type Chunk [a] = [a]
nullChunk _ = null
{-# INLINE nullChunk #-}
appendChunk = mappend
{-# INLINE appendChunk #-}
subChunk c off sz = C.take sz $ C.drop (offsetAsSize off) c
{-# INLINE subChunk #-}
spanChunk buf off predicate =
let c = C.drop (offsetAsSize off) buf
(t, _) = C.span predicate c
in (t, off `offsetPlusE` length t)
{-# INLINE spanChunk #-}
-- ------------------------------------------------------------------------- --
-- Helpers --
-- ------------------------------------------------------------------------- --
-- | helper function to report error when writing parsers
--
-- This way we can provide more detailed error when building custom
-- parsers and still avoid to use the naughty _fail_.
--
-- @
-- myParser :: Parser input Int
-- myParser = reportError $ Satisfy (Just "this function is not implemented...")
-- @
--
reportError :: ParseError input -> Parser input a
reportError pe = Parser $ \buf off nm err _ -> err buf off nm pe
-- | Get the next `Element input` from the parser
anyElement :: ParserSource input => Parser input (Element input)
anyElement = Parser $ \buf off nm err ok ->
case buf ! off of
Nothing -> err buf off nm $ NotEnough 1
Just x -> ok buf (succ off) nm x
{-# INLINE anyElement #-}
-- | peek the first element from the input source without consuming it
--
-- Returns 'Nothing' if there is no more input to parse.
--
peek :: ParserSource input => Parser input (Maybe (Element input))
peek = Parser $ \buf off nm err ok ->
case buf ! off of
Nothing -> runParser_ peekOnly buf off nm err ok
Just x -> ok buf off nm (Just x)
where
peekOnly = Parser $ \buf off nm _ ok ->
ok buf off nm (buf ! off)
element :: ( ParserSource input
, Eq (Element input)
, Element input ~ Element (Chunk input)
)
=> Element input
-> Parser input ()
element expectedElement = Parser $ \buf off nm err ok ->
case buf ! off of
Nothing -> err buf off nm $ NotEnough 1
Just x | expectedElement == x -> ok buf (succ off) nm ()
| otherwise -> err buf off nm $ ExpectedElement expectedElement x
{-# INLINE element #-}
elements :: ( ParserSource input, Sequential (Chunk input)
, Element (Chunk input) ~ Element input
, Eq (Chunk input)
)
=> Chunk input -> Parser input ()
elements = consumeEq
where
consumeEq :: ( ParserSource input
, Sequential (Chunk input)
, Element (Chunk input) ~ Element input
, Eq (Chunk input)
)
=> Chunk input -> Parser input ()
consumeEq expected = Parser $ \buf off nm err ok ->
if endOfParserSource buf off
then
err buf off nm $ NotEnough lenE
else
let !lenI = sizeAsOffset (length buf) - off
in if lenI >= lenE
then
let a = subChunk buf off lenE
in if a == expected
then ok buf (off + sizeAsOffset lenE) nm ()
else err buf off nm $ Expected expected a
else
let a = subChunk buf off lenI
(e', r) = splitAt lenI expected
in if a == e'
then runParser_ (consumeEq r) buf (off + sizeAsOffset lenI) nm err ok
else err buf off nm $ Expected e' a
where
!lenE = length expected
{-# NOINLINE consumeEq #-}
{-# INLINE elements #-}
-- | take one element if satisfy the given predicate
satisfy :: ParserSource input => Maybe String -> (Element input -> Bool) -> Parser input (Element input)
satisfy desc predicate = Parser $ \buf off nm err ok ->
case buf ! off of
Nothing -> err buf off nm $ NotEnough 1
Just x | predicate x -> ok buf (succ off) nm x
| otherwise -> err buf off nm $ Satisfy desc
{-# INLINE satisfy #-}
-- | take one element if satisfy the given predicate
satisfy_ :: ParserSource input => (Element input -> Bool) -> Parser input (Element input)
satisfy_ = satisfy Nothing
{-# INLINE satisfy_ #-}
take :: ( ParserSource input
, Sequential (Chunk input)
, Element input ~ Element (Chunk input)
)
=> CountOf (Element (Chunk input))
-> Parser input (Chunk input)
take n = Parser $ \buf off nm err ok ->
let lenI = sizeAsOffset (length buf) - off
in if endOfParserSource buf off && n > 0
then err buf off nm $ NotEnough n
else case n - lenI of
Just s | s > 0 -> let h = subChunk buf off lenI
in runParser_ (take s) buf (sizeAsOffset lenI) nm err $
\buf' off' nm' t -> ok buf' off' nm' (h <> t)
_ -> ok buf (off + sizeAsOffset n) nm (subChunk buf off n)
takeWhile :: ( ParserSource input, Sequential (Chunk input)
)
=> (Element input -> Bool)
-> Parser input (Chunk input)
takeWhile predicate = Parser $ \buf off nm err ok ->
if endOfParserSource buf off
then ok buf off nm mempty
else let (b1, off') = spanChunk buf off predicate
in if endOfParserSource buf off'
then runParser_ (takeWhile predicate) buf off' nm err
$ \buf' off'' nm' b1T -> ok buf' off'' nm' (b1 <> b1T)
else ok buf off' nm b1
-- | Take the remaining elements from the current position in the stream
takeAll :: (ParserSource input, Sequential (Chunk input)) => Parser input (Chunk input)
takeAll = getAll >> returnBuffer
where
returnBuffer :: ParserSource input => Parser input (Chunk input)
returnBuffer = Parser $ \buf off nm _ ok ->
let !lenI = length buf
!off' = sizeAsOffset lenI
!sz = off' - off
in ok buf off' nm (subChunk buf off sz)
{-# INLINE returnBuffer #-}
getAll :: (ParserSource input, Sequential (Chunk input)) => Parser input ()
getAll = Parser $ \buf off nm err ok ->
case nm of
NoMore -> ok buf off nm ()
More -> ParseMore $ \nextChunk ->
if nullChunk buf nextChunk
then ok buf off NoMore ()
else runParser getAll (appendChunk buf nextChunk) off nm err ok
{-# NOINLINE getAll #-}
{-# INLINE takeAll #-}
skip :: ParserSource input => CountOf (Element input) -> Parser input ()
skip n = Parser $ \buf off nm err ok ->
let lenI = sizeAsOffset (length buf) - off
in if endOfParserSource buf off && n > 0
then err buf off nm $ NotEnough n
else case n - lenI of
Just s | s > 0 -> runParser_ (skip s) buf (sizeAsOffset lenI) nm err ok
_ -> ok buf (off + sizeAsOffset n) nm ()
skipWhile :: ( ParserSource input, Sequential (Chunk input)
)
=> (Element input -> Bool)
-> Parser input ()
skipWhile predicate = Parser $ \buf off nm err ok ->
if endOfParserSource buf off
then ok buf off nm ()
else let (_, off') = spanChunk buf off predicate
in if endOfParserSource buf off'
then runParser_ (skipWhile predicate) buf off' nm err ok
else ok buf off' nm ()
-- | consume every chunk of the stream
--
skipAll :: (ParserSource input, Collection (Chunk input)) => Parser input ()
skipAll = flushAll
where
flushAll :: (ParserSource input, Collection (Chunk input)) => Parser input ()
flushAll = Parser $ \buf off nm err ok ->
let !off' = sizeAsOffset $ length buf in
case nm of
NoMore -> ok buf off' NoMore ()
More -> ParseMore $ \nextChunk ->
if null nextChunk
then ok buf off' NoMore ()
else runParser flushAll buf off nm err ok
{-# NOINLINE flushAll #-}
{-# INLINE skipAll #-}
string :: String -> Parser String ()
string = elements
{-# INLINE string #-}
data Condition = Between !And | Exactly !Word
deriving (Show, Eq, Typeable)
data And = And !Word !Word
deriving (Eq, Typeable)
instance Show And where
show (And a b) = show a <> " and " <> show b
-- | repeat the given parser a given amount of time
--
-- Unlike @some@ or @many@, this operation will bring more precision on how
-- many times you wish a parser to be sequenced.
--
-- ## Repeat @Exactly@ a number of time
--
-- > repeat (Exactly 6) (takeWhile ((/=) ',') <* element ',')
--
-- ## Repeat @Between@ lower `@And@` upper times
--
-- > repeat (Between $ 1 `And` 10) (takeWhile ((/=) ',') <* element ',')
--
repeat :: ParserSource input
=> Condition -> Parser input a -> Parser input [a]
repeat (Exactly n) = repeatE n
repeat (Between a) = repeatA a
repeatE :: (ParserSource input)
=> Word -> Parser input a -> Parser input [a]
repeatE 0 _ = return []
repeatE n p = (:) <$> p <*> repeatE (n-1) p
repeatA :: (ParserSource input)
=> And -> Parser input a -> Parser input [a]
repeatA (And 0 0) _ = return []
repeatA (And 0 n) p = ((:) <$> p <*> repeatA (And 0 (n-1)) p) <|> return []
repeatA (And l u) p = (:) <$> p <*> repeatA (And (l-1) (u-1)) p
| vincenthz/hs-foundation | foundation/Foundation/Parser.hs | bsd-3-clause | 19,244 | 0 | 20 | 5,560 | 5,510 | 2,816 | 2,694 | -1 | -1 |
{-# LANGUAGE BangPatterns, CPP, GeneralizedNewtypeDeriving, OverloadedStrings,
Rank2Types, RecordWildCards, TypeFamilies #-}
-- |
-- Module : Data.Attoparsec.Internal.Types
-- Copyright : Bryan O'Sullivan 2007-2015
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- Simple, efficient parser combinators, loosely based on the Parsec
-- library.
module Data.Attoparsec.Internal.Types
(
Parser(..)
, State
, Failure
, Success
, Pos(..)
, IResult(..)
, More(..)
, (<>)
, Chunk(..)
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative (Applicative(..), (<$>))
import Data.Monoid (Monoid(..))
#endif
import Control.Applicative (Alternative(..))
import Control.DeepSeq (NFData(rnf))
import Control.Monad (MonadPlus(..))
import Data.Word (Word8)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.ByteString.Internal (w2c)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Text.Unsafe (Iter(..))
import Prelude hiding (getChar, succ)
import qualified Data.Attoparsec.ByteString.Buffer as B
import qualified Data.Attoparsec.Text.Buffer as T
newtype Pos = Pos { fromPos :: Int }
deriving (Eq, Ord, Show, Num)
-- | The result of a parse. This is parameterised over the type @i@
-- of string that was processed.
--
-- This type is an instance of 'Functor', where 'fmap' transforms the
-- value in a 'Done' result.
data IResult i r =
Fail i [String] String
-- ^ The parse failed. The @i@ parameter is the input that had
-- not yet been consumed when the failure occurred. The
-- @[@'String'@]@ is a list of contexts in which the error
-- occurred. The 'String' is the message describing the error, if
-- any.
| Partial (i -> IResult i r)
-- ^ Supply this continuation with more input so that the parser
-- can resume. To indicate that no more input is available, pass
-- an empty string to the continuation.
--
-- __Note__: if you get a 'Partial' result, do not call its
-- continuation more than once.
| Done i r
-- ^ The parse succeeded. The @i@ parameter is the input that had
-- not yet been consumed (if any) when the parse succeeded.
instance (Show i, Show r) => Show (IResult i r) where
showsPrec d ir = showParen (d > 10) $
case ir of
(Fail t stk msg) -> showString "Fail" . f t . f stk . f msg
(Partial _) -> showString "Partial _"
(Done t r) -> showString "Done" . f t . f r
where f :: Show a => a -> ShowS
f x = showChar ' ' . showsPrec 11 x
instance (NFData i, NFData r) => NFData (IResult i r) where
rnf (Fail t stk msg) = rnf t `seq` rnf stk `seq` rnf msg
rnf (Partial _) = ()
rnf (Done t r) = rnf t `seq` rnf r
{-# INLINE rnf #-}
instance Functor (IResult i) where
fmap _ (Fail t stk msg) = Fail t stk msg
fmap f (Partial k) = Partial (fmap f . k)
fmap f (Done t r) = Done t (f r)
-- | The core parser type. This is parameterised over the type @i@
-- of string being processed.
--
-- This type is an instance of the following classes:
--
-- * 'Monad', where 'fail' throws an exception (i.e. fails) with an
-- error message.
--
-- * 'Functor' and 'Applicative', which follow the usual definitions.
--
-- * 'MonadPlus', where 'mzero' fails (with no error message) and
-- 'mplus' executes the right-hand parser if the left-hand one
-- fails. When the parser on the right executes, the input is reset
-- to the same state as the parser on the left started with. (In
-- other words, attoparsec is a backtracking parser that supports
-- arbitrary lookahead.)
--
-- * 'Alternative', which follows 'MonadPlus'.
newtype Parser i a = Parser {
runParser :: forall r.
State i -> Pos -> More
-> Failure i (State i) r
-> Success i (State i) a r
-> IResult i r
}
type family State i
type instance State ByteString = B.Buffer
type instance State Text = T.Buffer
type Failure i t r = t -> Pos -> More -> [String] -> String
-> IResult i r
type Success i t a r = t -> Pos -> More -> a -> IResult i r
-- | Have we read all available input?
data More = Complete | Incomplete
deriving (Eq, Show)
instance Monoid More where
mappend c@Complete _ = c
mappend _ m = m
mempty = Incomplete
instance Monad (Parser i) where
fail err = Parser $ \t pos more lose _succ -> lose t pos more [] msg
where msg = "Failed reading: " ++ err
{-# INLINE fail #-}
return v = Parser $ \t pos more _lose succ -> succ t pos more v
{-# INLINE return #-}
m >>= k = Parser $ \t !pos more lose succ ->
let succ' t' !pos' more' a = runParser (k a) t' pos' more' lose succ
in runParser m t pos more lose succ'
{-# INLINE (>>=) #-}
plus :: Parser i a -> Parser i a -> Parser i a
plus f g = Parser $ \t pos more lose succ ->
let lose' t' _pos' more' _ctx _msg = runParser g t' pos more' lose succ
in runParser f t pos more lose' succ
instance MonadPlus (Parser i) where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus = plus
instance Functor (Parser i) where
fmap f p = Parser $ \t pos more lose succ ->
let succ' t' pos' more' a = succ t' pos' more' (f a)
in runParser p t pos more lose succ'
{-# INLINE fmap #-}
apP :: Parser i (a -> b) -> Parser i a -> Parser i b
apP d e = do
b <- d
a <- e
return (b a)
{-# INLINE apP #-}
instance Applicative (Parser i) where
pure = return
{-# INLINE pure #-}
(<*>) = apP
{-# INLINE (<*>) #-}
-- These definitions are equal to the defaults, but this
-- way the optimizer doesn't have to work so hard to figure
-- that out.
(*>) = (>>)
{-# INLINE (*>) #-}
x <* y = x >>= \a -> y >> return a
{-# INLINE (<*) #-}
instance Monoid (Parser i a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = plus
{-# INLINE mappend #-}
instance Alternative (Parser i) where
empty = fail "empty"
{-# INLINE empty #-}
(<|>) = plus
{-# INLINE (<|>) #-}
many v = many_v
where many_v = some_v <|> pure []
some_v = (:) <$> v <*> many_v
{-# INLINE many #-}
some v = some_v
where
many_v = some_v <|> pure []
some_v = (:) <$> v <*> many_v
{-# INLINE some #-}
(<>) :: (Monoid m) => m -> m -> m
(<>) = mappend
{-# INLINE (<>) #-}
-- | A common interface for input chunks.
class Monoid c => Chunk c where
type ChunkElem c
-- | Test if the chunk is empty.
nullChunk :: c -> Bool
-- | Append chunk to a buffer.
pappendChunk :: State c -> c -> State c
-- | Position at the end of a buffer. The first argument is ignored.
atBufferEnd :: c -> State c -> Pos
-- | Return the buffer element at the given position along with its length.
bufferElemAt :: c -> Pos -> State c -> Maybe (ChunkElem c, Int)
-- | Map an element to the corresponding character.
-- The first argument is ignored.
chunkElemToChar :: c -> ChunkElem c -> Char
instance Chunk ByteString where
type ChunkElem ByteString = Word8
nullChunk = BS.null
{-# INLINE nullChunk #-}
pappendChunk = B.pappend
{-# INLINE pappendChunk #-}
atBufferEnd _ = Pos . B.length
{-# INLINE atBufferEnd #-}
bufferElemAt _ (Pos i) buf
| i < B.length buf = Just (B.unsafeIndex buf i, 1)
| otherwise = Nothing
{-# INLINE bufferElemAt #-}
chunkElemToChar _ = w2c
{-# INLINE chunkElemToChar #-}
instance Chunk Text where
type ChunkElem Text = Char
nullChunk = Text.null
{-# INLINE nullChunk #-}
pappendChunk = T.pappend
{-# INLINE pappendChunk #-}
atBufferEnd _ = Pos . T.length
{-# INLINE atBufferEnd #-}
bufferElemAt _ (Pos i) buf
| i < T.length buf = let Iter c l = T.iter buf i in Just (c, l)
| otherwise = Nothing
{-# INLINE bufferElemAt #-}
chunkElemToChar _ = id
{-# INLINE chunkElemToChar #-}
| beni55/attoparsec | Data/Attoparsec/Internal/Types.hs | bsd-3-clause | 8,092 | 0 | 15 | 2,146 | 2,077 | 1,141 | 936 | 163 | 1 |
module ListQual (boo) where
{-@ qualif BadAppend(v:[a], xs:[a], ys:[a]): len v = len xs + len ys @-}
append [] ys = ys
append (x:xs) ys = x : append xs ys
{-@ boo :: {v:[Int] | len v = 2} @-}
boo :: [Int]
boo = append [1] [2]
| ssaavedra/liquidhaskell | tests/pos/listqual.hs | bsd-3-clause | 233 | 0 | 7 | 57 | 73 | 41 | 32 | 5 | 1 |
{-|
Module : IRTS.DumpBC
Description : Serialise Idris to its IBC format.
License : BSD3
Maintainer : The Idris Community.
-}
module IRTS.DumpBC where
import Idris.Core.TT
import IRTS.Bytecode
import IRTS.Simplified
import Data.List
interMap :: [a] -> [b] -> (a -> [b]) -> [b]
interMap xs y f = concat (intersperse y (map f xs))
indent :: Int -> String
indent n = replicate (n*4) ' '
serializeReg :: Reg -> String
serializeReg (L n) = "L" ++ show n
serializeReg (T n) = "T" ++ show n
serializeReg r = show r
serializeCase :: Show a => Int -> (a, [BC]) -> String
serializeCase n (x, bcs) =
indent n ++ show x ++ ":\n" ++ interMap bcs "\n" (serializeBC (n + 1))
serializeDefault :: Int -> [BC] -> String
serializeDefault n bcs =
indent n ++ "default:\n" ++ interMap bcs "\n" (serializeBC (n + 1))
serializeBC :: Int -> BC -> String
serializeBC n bc = indent n ++
case bc of
ASSIGN a b ->
"ASSIGN " ++ serializeReg a ++ " " ++ serializeReg b
ASSIGNCONST a b ->
"ASSIGNCONST " ++ serializeReg a ++ " " ++ show b
UPDATE a b ->
"UPDATE " ++ serializeReg a ++ " " ++ serializeReg b
MKCON a Nothing b xs ->
"MKCON " ++ serializeReg a ++ " " ++ show b ++ " [" ++ (interMap xs ", " serializeReg) ++ "]"
MKCON a (Just r) b xs ->
"MKCON@" ++ serializeReg r ++ " " ++ serializeReg a ++ " " ++ show b ++ " [" ++ (interMap xs ", " serializeReg) ++ "]"
CASE safe r cases def ->
"CASE " ++ serializeReg r ++ ":\n" ++ interMap cases "\n" (serializeCase (n + 1)) ++
maybe "" (\def' -> "\n" ++ serializeDefault (n + 1) def') def
PROJECT a b c ->
"PROJECT " ++ serializeReg a ++ " " ++ show b ++ " " ++ show c
PROJECTINTO a b c ->
"PROJECTINTO " ++ serializeReg a ++ " " ++ serializeReg b ++ " " ++ show c
CONSTCASE r cases def ->
"CONSTCASE " ++ serializeReg r ++ ":\n" ++ interMap cases "\n" (serializeCase (n + 1)) ++
maybe "" (\def' -> "\n" ++ serializeDefault (n + 1) def') def
CALL x -> "CALL " ++ show x
TAILCALL x -> "TAILCALL " ++ show x
FOREIGNCALL r ret name args ->
"FOREIGNCALL " ++ serializeReg r ++ " \"" ++ show name ++ "\" " ++ show ret ++
" [" ++ interMap args ", " (\(ty, r) -> serializeReg r ++ " : " ++ show ty) ++ "]"
SLIDE n -> "SLIDE " ++ show n
REBASE -> "REBASE"
RESERVE n -> "RESERVE " ++ show n
RESERVENOALLOC n -> "RESERVENOALLOC " ++ show n
ADDTOP n -> "ADDTOP " ++ show n
TOPBASE n -> "TOPBASE " ++ show n
BASETOP n -> "BASETOP " ++ show n
STOREOLD -> "STOREOLD"
OP a b c ->
"OP " ++ serializeReg a ++ " " ++ show b ++ " [" ++ interMap c ", " serializeReg ++ "]"
NULL r -> "NULL " ++ serializeReg r
ERROR s -> "ERROR \"" ++ s ++ "\"" -- FIXME: s may contain quotes
-- Issue #1596
serialize :: [(Name, [BC])] -> String
serialize decls =
interMap decls "\n\n" serializeDecl
where
serializeDecl :: (Name, [BC]) -> String
serializeDecl (name, bcs) =
show name ++ ":\n" ++ interMap bcs "\n" (serializeBC 1)
dumpBC :: [(Name, SDecl)] -> String -> IO ()
dumpBC c output = writeFile output $ serialize $ map toBC c
| kojiromike/Idris-dev | src/IRTS/DumpBC.hs | bsd-3-clause | 3,256 | 0 | 17 | 943 | 1,289 | 632 | 657 | 67 | 23 |
module String where
main = putStrLn str where str = "hello, world"
| kmate/HaRe | old/testing/introNewDef/String_AstOut.hs | bsd-3-clause | 68 | 0 | 6 | 13 | 19 | 11 | 8 | 2 | 1 |
-- We can use Template Haskell (TH) to generate instances of the
-- FromJSON and ToJSON classes automatically. This is the fastest way
-- to add JSON support for a type.
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
import Data.Aeson (decode, encode)
import Data.Aeson.TH (deriveJSON, defaultOptions)
import qualified Data.ByteString.Lazy.Char8 as BL
data Coord = Coord { x :: Double, y :: Double }
deriving (Show)
-- This splice will derive instances of ToJSON and FromJSON for us.
$(deriveJSON defaultOptions ''Coord)
main :: IO ()
main = do
let req = decode "{\"x\":3.0,\"y\":-1.0}" :: Maybe Coord
print req
let reply = Coord 123.4 20
BL.putStrLn (encode reply)
| plaprade/aeson | examples/TemplateHaskell.hs | bsd-3-clause | 716 | 1 | 10 | 136 | 156 | 84 | 72 | 14 | 1 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes #-}
{-# LANGUAGE TypeFamilies, EmptyDataDecls, GADTs #-}
{-# LANGUAGE TypeSynonymInstances, MultiParamTypeClasses, GeneralizedNewtypeDeriving #-}
module Main where
import qualified Database.Redis as R
import Database.Persist
import Database.Persist.Redis
import Database.Persist.TH
import Language.Haskell.TH.Syntax
import Control.Monad.IO.Class (liftIO)
import Data.Text (Text, pack, unpack)
let redisSettings = mkPersistSettings (ConT ''RedisBackend)
in share [mkPersist redisSettings] [persistLowerCase|
Person
name String
age Int
deriving Show
|]
d :: R.ConnectInfo
d = R.defaultConnectInfo
host :: Text
host = pack $ R.connectHost d
redisConf :: RedisConf
redisConf = RedisConf host (R.connectPort d) Nothing 10
mkKey :: (Monad m, PersistEntity val) => Text -> m (Key val)
mkKey s = case keyFromValues [PersistText s] of
Right z -> return z
Left a -> fail (unpack a)
main :: IO ()
main =
withRedisConn redisConf $ runRedisPool $ do
_ <- liftIO $ print "Inserting..."
s <- insert $ Person "Test" 12
_ <- liftIO $ print ("Received the key" ++ show s)
key <- mkKey (pack "person_test")
insertKey key $ Person "Test2" 45
repsert s (Person "Test3" 55)
g <- get key :: RedisT IO (Maybe Person)
liftIO $ print g
delete s
return () | jasonzoladz/persistent | persistent-redis/tests/basic-test.hs | mit | 1,385 | 0 | 13 | 300 | 426 | 215 | 211 | 36 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
module Thrift
( module Thrift.Transport
, module Thrift.Protocol
, AppExnType(..)
, AppExn(..)
, readAppExn
, writeAppExn
, ThriftException(..)
) where
import Control.Exception
import Data.Int
import Data.Text.Lazy ( Text, pack, unpack )
import Data.Text.Lazy.Encoding
import Data.Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import Thrift.Protocol
import Thrift.Transport
import Thrift.Types
data ThriftException = ThriftException
deriving ( Show, Typeable )
instance Exception ThriftException
data AppExnType
= AE_UNKNOWN
| AE_UNKNOWN_METHOD
| AE_INVALID_MESSAGE_TYPE
| AE_WRONG_METHOD_NAME
| AE_BAD_SEQUENCE_ID
| AE_MISSING_RESULT
| AE_INTERNAL_ERROR
| AE_PROTOCOL_ERROR
| AE_INVALID_TRANSFORM
| AE_INVALID_PROTOCOL
| AE_UNSUPPORTED_CLIENT_TYPE
deriving ( Eq, Show, Typeable )
instance Enum AppExnType where
toEnum 0 = AE_UNKNOWN
toEnum 1 = AE_UNKNOWN_METHOD
toEnum 2 = AE_INVALID_MESSAGE_TYPE
toEnum 3 = AE_WRONG_METHOD_NAME
toEnum 4 = AE_BAD_SEQUENCE_ID
toEnum 5 = AE_MISSING_RESULT
toEnum 6 = AE_INTERNAL_ERROR
toEnum 7 = AE_PROTOCOL_ERROR
toEnum 8 = AE_INVALID_TRANSFORM
toEnum 9 = AE_INVALID_PROTOCOL
toEnum 10 = AE_UNSUPPORTED_CLIENT_TYPE
toEnum t = error $ "Invalid AppExnType " ++ show t
fromEnum AE_UNKNOWN = 0
fromEnum AE_UNKNOWN_METHOD = 1
fromEnum AE_INVALID_MESSAGE_TYPE = 2
fromEnum AE_WRONG_METHOD_NAME = 3
fromEnum AE_BAD_SEQUENCE_ID = 4
fromEnum AE_MISSING_RESULT = 5
fromEnum AE_INTERNAL_ERROR = 6
fromEnum AE_PROTOCOL_ERROR = 7
fromEnum AE_INVALID_TRANSFORM = 8
fromEnum AE_INVALID_PROTOCOL = 9
fromEnum AE_UNSUPPORTED_CLIENT_TYPE = 10
data AppExn = AppExn { ae_type :: AppExnType, ae_message :: String }
deriving ( Show, Typeable )
instance Exception AppExn
writeAppExn :: (Protocol p, Transport t) => p t -> AppExn -> IO ()
writeAppExn pt ae = writeVal pt $ TStruct $ Map.fromList
[ (1, ("message", TString $ encodeUtf8 $ pack $ ae_message ae))
, (2, ("type", TI32 $ fromIntegral $ fromEnum (ae_type ae)))
]
readAppExn :: (Protocol p, Transport t) => p t -> IO AppExn
readAppExn pt = do
let typemap = Map.fromList [("message",(1,T_STRING)),("type",(2,T_I32))]
TStruct fields <- readVal pt $ T_STRUCT typemap
return $ readAppExnFields fields
readAppExnFields :: Map.HashMap Int16 (Text, ThriftVal) -> AppExn
readAppExnFields fields = AppExn{
ae_message = maybe undefined unwrapMessage $ Map.lookup 1 fields,
ae_type = maybe undefined unwrapType $ Map.lookup 2 fields
}
where
unwrapMessage (_, TString s) = unpack $ decodeUtf8 s
unwrapMessage _ = undefined
unwrapType (_, TI32 i) = toEnum $ fromIntegral i
unwrapType _ = undefined
| chjp2046/fbthrift | thrift/lib/hs/Thrift.hs | apache-2.0 | 3,765 | 0 | 13 | 784 | 831 | 458 | 373 | 80 | 3 |
module T5664 where
import Foreign
import Foreign.C
data D = D
newtype I = I CInt
foreign import ccall "dynamic"
mkFun1 :: FunPtr (CInt -> IO ()) -> CInt -> IO ()
foreign import ccall "dynamic"
mkFun2 :: FunPtr (I -> IO ()) -> CInt -> IO ()
foreign import ccall "dynamic"
mkFun3 :: FunPtr (D -> IO ()) -> CInt -> IO ()
foreign import ccall "wrapper"
mkCallBack1 :: IO CInt -> IO (FunPtr (IO CInt))
foreign import ccall "wrapper"
mkCallBack2 :: IO CInt -> IO (FunPtr (IO I))
foreign import ccall "wrapper"
mkCallBack3 :: IO CInt -> IO (FunPtr (IO D))
| urbanslug/ghc | testsuite/tests/ffi/should_fail/T5664.hs | bsd-3-clause | 569 | 0 | 11 | 123 | 249 | 128 | 121 | 17 | 0 |
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Unison.Codebase.Editor.SlurpResult where
import Unison.Prelude
import Unison.Codebase.Editor.SlurpComponent (SlurpComponent(..))
import Unison.Name ( Name )
import Unison.Parser ( Ann )
import Unison.Var (Var)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Unison.Codebase.Editor.SlurpComponent as SC
import qualified Unison.DataDeclaration as DD
import qualified Unison.DeclPrinter as DeclPrinter
import qualified Unison.HashQualified as HQ
import qualified Unison.Name as Name
import qualified Unison.Names2 as Names
import qualified Unison.PrettyPrintEnv as PPE
import qualified Unison.Referent as Referent
import qualified Unison.TypePrinter as TP
import qualified Unison.UnisonFile as UF
import qualified Unison.Util.Monoid as Monoid
import qualified Unison.Util.Pretty as P
import qualified Unison.Util.Relation as R
import qualified Unison.Var as Var
-- `oldRefNames` are the previously existing names for the old reference
-- (these names will all be pointed to a new reference)
-- `newRefNames` are the previously existing names for the new reference
-- (the reference that all the old names will point to after the update)
data Aliases
= AddAliases (Set Name)
| UpdateAliases { oldRefNames :: Set Name
, newRefNames :: Set Name }
deriving (Show, Eq, Ord)
data SlurpResult v = SlurpResult {
-- The file that we tried to add from
originalFile :: UF.TypecheckedUnisonFile v Ann
-- Extra definitions that were added to satisfy transitive closure,
-- beyond what the user specified.
, extraDefinitions :: SlurpComponent v
-- Previously existed only in the file; now added to the codebase.
, adds :: SlurpComponent v
-- Exists in the branch and the file, with the same name and contents.
, duplicates :: SlurpComponent v
-- Not added to codebase due to the name already existing
-- in the branch with a different definition.
, collisions :: SlurpComponent v
-- Not added to codebase due to the name existing
-- in the branch with a conflict (two or more definitions).
, conflicts :: SlurpComponent v
-- Names that already exist in the branch, but whose definitions
-- in `originalFile` are treated as updates.
, updates :: SlurpComponent v
-- Names of terms in `originalFile` that couldn't be updated because
-- they refer to existing constructors. (User should instead do a find/replace,
-- a constructor rename, or refactor the type that the name comes from).
, termExistingConstructorCollisions :: Set v
, constructorExistingTermCollisions :: Set v
-- -- Already defined in the branch, but with a different name.
, termAlias :: Map v Aliases
, typeAlias :: Map v Aliases
, defsWithBlockedDependencies :: SlurpComponent v
} deriving (Show)
-- Returns the set of constructor names for type names in the given `Set`.
constructorsFor :: Var v => Set v -> UF.TypecheckedUnisonFile v Ann -> Set v
constructorsFor types uf = let
names = UF.typecheckedToNames0 uf
typesRefs = Set.unions $ Names.typesNamed names . Name.fromVar <$> toList types
ctorNames = R.filterRan isOkCtor (Names.terms names)
isOkCtor (Referent.Con r _ _) | Set.member r typesRefs = True
isOkCtor _ = False
in Set.map Name.toVar $ R.dom ctorNames
-- Remove `removed` from the slurp result, and move any defns with transitive
-- dependencies on the removed component into `defsWithBlockedDependencies`.
-- Also removes `removed` from `extraDefinitions`.
subtractComponent :: forall v. Var v => SlurpComponent v -> SlurpResult v -> SlurpResult v
subtractComponent removed sr =
sr { adds = SC.difference (adds sr) (removed <> blocked)
, updates = SC.difference (updates sr) (removed <> blocked)
, defsWithBlockedDependencies = blocked
, extraDefinitions = SC.difference (extraDefinitions sr) blocked
}
where
-- for each v in adds, move to blocked if transitive dependency in removed
blocked = defsWithBlockedDependencies sr <>
SC.difference (blockedTerms <> blockedTypes) removed
uf = originalFile sr
constructorsFor v = case UF.lookupDecl v uf of
Nothing -> mempty
Just (_, e) -> Set.fromList . DD.constructorVars $ either DD.toDataDecl id e
blockedTypes = foldMap doType . SC.types $ adds sr <> updates sr where
-- include this type if it or any of its dependencies are removed
doType :: v -> SlurpComponent v
doType v =
if null (Set.intersection (SC.types removed) (SC.types (SC.closeWithDependencies uf vc)))
&& null (Set.intersection (SC.terms removed) (constructorsFor v))
then mempty else vc
where vc = mempty { types = Set.singleton v }
blockedTerms = foldMap doTerm . SC.terms $ adds sr <> updates sr where
doTerm :: v -> SlurpComponent v
doTerm v =
if mempty == SC.intersection removed (SC.closeWithDependencies uf vc)
then mempty else vc
where vc = mempty { terms = Set.singleton v }
-- Move `updates` to `collisions`, and move any dependents of those updates to `*WithBlockedDependencies`.
-- Subtract stuff from `extraDefinitions` that isn't in `adds` or `updates`
disallowUpdates :: forall v. Var v => SlurpResult v -> SlurpResult v
disallowUpdates sr =
let sr2 = subtractComponent (updates sr) sr
in sr2 { collisions = collisions sr2 <> updates sr }
isNonempty :: Ord v => SlurpResult v -> Bool
isNonempty s = Monoid.nonEmpty (adds s) || Monoid.nonEmpty (updates s)
data Status =
Add | Update | Duplicate | Collision | Conflicted |
TermExistingConstructorCollision | ConstructorExistingTermCollision |
ExtraDefinition | BlockedDependency
deriving (Ord,Eq,Show)
isFailure :: Status -> Bool
isFailure s = case s of
TermExistingConstructorCollision -> True
ConstructorExistingTermCollision -> True
BlockedDependency -> True
Collision -> True
Conflicted -> True
_ -> False
prettyStatus :: Status -> P.Pretty P.ColorText
prettyStatus s = case s of
Add -> "added"
Update -> "updated"
Collision -> "needs update"
Conflicted -> "conflicted"
Duplicate -> "duplicate"
TermExistingConstructorCollision -> "term/ctor collision"
ConstructorExistingTermCollision -> "ctor/term collision"
BlockedDependency -> "blocked"
ExtraDefinition -> "extra dependency"
type IsPastTense = Bool
prettyVar :: Var v => v -> P.Pretty P.ColorText
prettyVar = P.text . Var.name
aliasesToShow :: Int
aliasesToShow = 5
pretty
:: forall v
. Var v
=> IsPastTense
-> PPE.PrettyPrintEnv
-> SlurpResult v
-> P.Pretty P.ColorText
pretty isPast ppe sr =
let
tms = UF.hashTerms (originalFile sr)
goodIcon = P.green "⍟ "
badIcon = P.red "x "
plus = P.green " "
oxfordAliases shown sz end =
P.oxfordCommasWith end $ (P.shown <$> shown) ++ case sz of
0 -> []
n -> [P.shown n <> " more"]
okType v = (plus <>) $ case UF.lookupDecl v (originalFile sr) of
Just (_, dd) ->
P.syntaxToColor (DeclPrinter.prettyDeclHeader (HQ.unsafeFromVar v) dd)
<> if null aliases
then mempty
else P.newline <> P.indentN 2 (P.lines aliases)
where aliases = aliasesMessage . Map.lookup v $ typeAlias sr
Nothing -> P.bold (prettyVar v) <> P.red " (Unison bug, unknown type)"
aliasesMessage aliases = case aliases of
Nothing -> []
Just (AddAliases (splitAt aliasesToShow . toList -> (shown, rest))) ->
[ P.indentN 2 . P.wrap $
P.hiBlack "(also named " <> oxfordAliases
shown
(length rest)
(P.hiBlack ")")
]
Just (UpdateAliases oldNames newNames) ->
let oldMessage =
let (shown, rest) = splitAt aliasesToShow $ toList oldNames
sz = length oldNames
in P.indentN
2
( P.wrap
$ P.hiBlack
( "(The old definition "
<> (if isPast then "was" else "is")
<> " also named "
)
<> oxfordAliases shown (length rest) (P.hiBlack ".")
<> P.hiBlack
(case (sz, isPast) of
(1, True ) -> "I updated this name too.)"
(1, False) -> "I'll update this name too.)"
(_, True ) -> "I updated these names too.)"
(_, False) -> "I'll update these names too.)"
)
)
newMessage =
let (shown, rest) = splitAt aliasesToShow $ toList newNames
sz = length rest
in P.indentN
2
( P.wrap
$ P.hiBlack "(The new definition is already named "
<> oxfordAliases shown sz (P.hiBlack " as well.)")
)
in (if null oldNames then mempty else [oldMessage])
++ (if null newNames then mempty else [newMessage])
-- The second field in the result is an optional second column.
okTerm :: v -> [(P.Pretty P.ColorText, Maybe (P.Pretty P.ColorText))]
okTerm v = case Map.lookup v tms of
Nothing ->
[(P.bold (prettyVar v), Just $ P.red "(Unison bug, unknown term)")]
Just (_, _, ty) ->
( plus <> P.bold (prettyVar v)
, Just $ ": " <> P.indentNAfterNewline 2 (TP.pretty ppe ty)
)
: ((, Nothing) <$> aliases)
where
aliases = fmap (P.indentN 2) . aliasesMessage . Map.lookup v $ termAlias sr
ok _ _ sc | SC.isEmpty sc = mempty
ok past present sc =
let header = goodIcon <> P.indentNAfterNewline
2
(P.wrap (if isPast then past else present))
updatedTypes = P.lines $ okType <$> toList (SC.types sc)
updatedTerms = P.mayColumn2 . (=<<) okTerm . Set.toList $ SC.terms sc
in header <> "\n\n" <> P.linesNonEmpty [updatedTypes, updatedTerms]
okToUpdate = ok
(P.green "I've updated these names to your new definition:")
( P.green
$ "These names already exist. You can `update` them "
<> "to your new definition:"
)
okToAdd = ok (P.green "I've added these definitions:")
(P.green "These new definitions are ok to `add`:")
notOks _past _present sr | isOk sr = mempty
notOks past present sr =
let
header = badIcon <> P.indentNAfterNewline
2
(P.wrap (if isPast then past else present))
typeLineFor status v = case UF.lookupDecl v (originalFile sr) of
Just (_, dd) ->
( prettyStatus status
, P.syntaxToColor
$ DeclPrinter.prettyDeclHeader (HQ.unsafeFromVar v) dd
)
Nothing ->
( prettyStatus status
, prettyVar v <> P.red (P.wrap " (Unison bug, unknown type)")
)
typeMsgs =
P.column2
$ (typeLineFor Conflicted <$> toList (types (conflicts sr)))
++ (typeLineFor Collision <$> toList (types (collisions sr)))
++ ( typeLineFor BlockedDependency
<$> toList (types (defsWithBlockedDependencies sr))
)
termLineFor status v = case Map.lookup v tms of
Just (_ref, _tm, ty) ->
( prettyStatus status
, P.bold (P.text $ Var.name v)
, ": " <> P.indentNAfterNewline 6 (TP.pretty ppe ty)
)
Nothing -> (prettyStatus status, P.text (Var.name v), "")
termMsgs =
P.column3sep " "
$ (termLineFor Conflicted <$> toList (terms (conflicts sr)))
++ (termLineFor Collision <$> toList (terms (collisions sr)))
++ ( termLineFor TermExistingConstructorCollision
<$> toList (termExistingConstructorCollisions sr)
)
++ ( termLineFor ConstructorExistingTermCollision
<$> toList (constructorExistingTermCollisions sr)
)
++ ( termLineFor BlockedDependency
<$> toList (terms (defsWithBlockedDependencies sr))
)
in
header
<> "\n\n"
<> P.hiBlack " Reason"
<> "\n"
<> P.indentN 2 (P.linesNonEmpty [typeMsgs, termMsgs])
<> "\n\n"
<> P.indentN
2
(P.column2 [("Tip:", "Use `help filestatus` to learn more.")])
dups = Set.toList (SC.terms (duplicates sr) <> SC.types (duplicates sr))
more i =
"... "
<> P.bold (P.shown i)
<> P.hiBlack " more."
<> "Try moving these below the `---` \"fold\" in your file."
in
P.sepNonEmpty
"\n\n"
[ if SC.isEmpty (duplicates sr)
then mempty
else
(if isPast
then "⊡ Ignored previously added definitions: "
else "⊡ Previously added definitions will be ignored: "
)
<> P.indentNAfterNewline
2
(P.wrap $ P.excerptSep' (Just 7)
more
" "
(P.hiBlack . prettyVar <$> dups)
)
, okToAdd (adds sr)
, okToUpdate (updates sr)
, notOks
(P.red "These definitions failed:")
(P.wrap $ P.red "These definitions would fail on `add` or `update`:")
sr
]
isOk :: Ord v => SlurpResult v -> Bool
isOk SlurpResult {..} =
SC.isEmpty collisions &&
SC.isEmpty conflicts &&
Set.null termExistingConstructorCollisions &&
Set.null constructorExistingTermCollisions &&
SC.isEmpty defsWithBlockedDependencies
isAllDuplicates :: Ord v => SlurpResult v -> Bool
isAllDuplicates SlurpResult {..} =
SC.isEmpty adds &&
SC.isEmpty updates &&
SC.isEmpty extraDefinitions &&
SC.isEmpty collisions &&
SC.isEmpty conflicts &&
Map.null typeAlias &&
Map.null termAlias &&
Set.null termExistingConstructorCollisions &&
Set.null constructorExistingTermCollisions &&
SC.isEmpty defsWithBlockedDependencies
-- stack repl
--
-- λ> import Unison.Util.Pretty
-- λ> import Unison.Codebase.Editor.SlurpResult
-- λ> putStrLn $ toANSI 80 ex
ex :: P.Pretty P.ColorText
ex = P.indentN 2 $ P.lines ["",
P.green "▣ I've added these definitions: ", "",
P.indentN 2 . P.column2 $ [("a", "Nat"), ("map", "(a -> b) -> [a] -> [b]")],
"",
P.green "▣ I've updated these definitions: ", "",
P.indentN 2 . P.column2 $ [("c", "Nat"), ("flatMap", "(a -> [b]) -> [a] -> [b]")],
"",
P.wrap $ P.red "x" <> P.bold "These definitions couldn't be added:", "",
P.indentN 2 $
P.lines [
P.column2 [(P.hiBlack
"Reason for failure Symbol ", P.hiBlack "Type"),
("ctor/term collision foo ", "Nat"),
("failed dependency zoot ", "[a] -> [a] -> [a]"),
("term/ctor collision unique type Foo ", "f x")],
"", "Tip: use `help filestatus` to learn more."
],
"",
"⊡ Ignoring previously added definitions: " <>
P.indentNAfterNewline 2 (
P.hiBlack (P.wrap $ P.sep " " ["zonk", "anotherOne", "List.wrangle", "oatbag", "blarg", "mcgee", P.group "ability Woot"])),
""
]
| unisonweb/platform | parser-typechecker/src/Unison/Codebase/Editor/SlurpResult.hs | mit | 15,671 | 0 | 28 | 4,664 | 3,916 | 2,043 | 1,873 | -1 | -1 |
-- Copyright 2015 Ian D. Bollinger
--
-- Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-- http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT
-- or http://opensource.org/licenses/MIT>, at your option. This file may not be
-- copied, modified, or distributed except according to those terms.
{-# LANGUAGE CPP #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Nomegen.CLI (
main,
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>), (<*>))
#endif
import Control.Monad (replicateM_)
import Data.Char (toUpper)
import Data.Monoid ((<>))
import System.IO (Handle, IOMode (WriteMode), openFile, stdout)
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Options.Applicative.Builder as Opt
import Options.Applicative.Common (Parser)
import Options.Applicative.Extra (execParser, helper)
import Nomegen (
Nomicon, explainYamlParseException, generate, nameToText, yamlDeserializer,
)
data Options = Options {
_output :: !(IO Handle),
number :: !Int,
lowerCase :: !Bool,
lexiconFile :: !String
}
main :: IO ()
main = do
x <- execParser opts
yamlDeserializer (lexiconFile x) >>= \case
Left err -> putStrLn $ explainYamlParseException err
Right lexicon' -> printWords x lexicon'
where
opts = Opt.info (helper <*> options)
( Opt.fullDesc
<> Opt.progDesc "generate N random names using FILE"
<> Opt.header "nomegen: a random name generator"
)
printWords :: Options -> Nomicon -> IO ()
printWords options' nomicon =
replicateM_ (number options') $
Text.putStrLn . format . nameToText =<< generate nomicon
where
format
| lowerCase options' = id
| otherwise = capitalize
options :: Parser Options
options = Options
-- TODO: translate IO exception into Left arm of EitherT.
<$> Opt.option (flip openFile WriteMode <$> Opt.str)
( Opt.short 'o'
<> Opt.long "out"
<> Opt.metavar "OUT"
<> Opt.help "File to write the output"
<> Opt.value (return stdout)
)
<*> Opt.option Opt.auto
( Opt.short 'n'
<> Opt.long "number"
<> Opt.metavar "N"
<> Opt.help "Number of words to generate (default: 1)"
<> Opt.value (1 :: Int)
)
<*> Opt.switch
( Opt.short 'l'
<> Opt.long "lowercase"
<> Opt.help "Do not capitalize output"
)
<*> Opt.strArgument
( Opt.metavar "FILE"
<> Opt.help "The Nomicon file to use"
)
capitalize :: Text -> Text
capitalize = Text.cons . toUpper . Text.head <*> Text.tail
| ianbollinger/nomegen | cli/Nomegen/CLI.hs | mit | 2,723 | 0 | 16 | 658 | 665 | 358 | 307 | 71 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Unison.Test.TypeParser where
import Test.Tasty
import Unison.Parser (Result(..))
import Unison.Parsers (parseType)
import Unison.Type (Type)
import qualified Unison.Type as T
-- import Test.Tasty.SmallCheck as SC
-- import Test.Tasty.QuickCheck as QC
import Test.Tasty.HUnit
import Unison.Symbol (Symbol)
import Unison.View (DFO)
parseV :: (String, Type (Symbol DFO)) -> TestTree
parseV (s,expected) =
testCase ("`" ++ s ++ "`") $ case parseType s of
Fail _ _ -> assertFailure "parse failure"
Succeed a _ _ -> assertEqual "mismatch" expected a
tests :: TestTree
tests = testGroup "TypeParser" $ fmap parseV strings
where
strings :: [(String, Type (Symbol DFO))]
strings =
[ ("Number", T.lit T.Number)
, ("Text", T.lit T.Text)
, ("Vector", T.lit T.Vector)
, ("Remote", T.builtin "Remote")
, ("Foo", foo)
, ("Foo -> Foo", T.arrow foo foo)
, ("a -> a", T.arrow a a)
, ("Foo -> Foo -> Foo", T.arrow foo (T.arrow foo foo))
, ("Foo -> (Foo -> Foo)", T.arrow foo (T.arrow foo foo))
, ("(Foo -> Foo) -> Foo", T.arrow (T.arrow foo foo) foo)
, ("Vector Foo", T.vectorOf foo)
, ("forall a . a -> a", forall_aa)
, ("forall a. a -> a", forall_aa)
, ("(forall a . a) -> Number", T.forall' ["a"] (T.v' "a") `T.arrow` T.lit T.Number)
]
a = T.v' "a"
foo = T.v' "Foo"
forall_aa = T.forall' ["a"] (T.arrow a a)
main :: IO ()
main = defaultMain tests
| nightscape/platform | shared/tests/Unison/Test/TypeParser.hs | mit | 1,605 | 0 | 13 | 455 | 550 | 306 | 244 | 38 | 2 |
--The MIT License (MIT)
--
--Copyright (c) 2016-2017 Steffen Michels ([email protected])
--
--Permission is hereby granted, free of charge, to any person obtaining a copy of
--this software and associated documentation files (the "Software"), to deal in
--the Software without restriction, including without limitation the rights to use,
--copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
--Software, and to permit persons to whom the Software is furnished to do so,
--subject to the following conditions:
--
--The above copyright notice and this permission notice shall be included in all
--copies or substantial portions of the Software.
--
--THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
--IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
--FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
--COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
--IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
--CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 800
{-# LANGUAGE Strict #-}
#endif
module Interval
( IntervalLimit(..)
, IntervalLimitPoint(..)
, Infinitesimal(..)
, LowerUpper(..)
, Interval(..)
, toPoint
, pointRational
, corners
, rat2IntervLimPoint
, nullInfte
, (~>), (~>=), (~<), (~<=)
) where
import Data.Hashable (Hashable)
import GHC.Generics (Generic)
import Data.HashMap (Map)
import qualified Data.HashMap as Map
import Numeric (fromRat)
import Data.Foldable (foldl')
import TextShow
import Data.Monoid ((<>))
data IntervalLimit = Inf | Open Rational | Closed Rational deriving (Eq, Generic, Ord)
instance TextShow IntervalLimit where
showb Inf = "inf"
showb (Open r) = "Open " <> showb (fromRat r :: Float)
showb (Closed r) = "Closed " <> showb (fromRat r :: Float)
data LowerUpper = Lower | Upper
data IntervalLimitPoint = PosInf | NegInf | Indet
| Point Rational Infinitesimal
deriving (Eq)
instance TextShow IntervalLimitPoint where
showb PosInf = "+inf"
showb NegInf = "-inf"
showb Indet = "?"
showb (Point r infte) = showb (fromRat r :: Float) <> "^" <> showb infte
data Infinitesimal = InfteNull | InftePlus | InfteMinus | InfteIndet deriving Eq
instance TextShow Infinitesimal where
showb InfteNull = "0"
showb InftePlus = "+"
showb InfteMinus = "-"
showb InfteIndet = "?"
instance Hashable IntervalLimit
data Interval = Interval IntervalLimit IntervalLimit deriving (Eq, Generic, Ord)
instance Hashable Interval
toPoint :: LowerUpper -> IntervalLimit -> IntervalLimitPoint
toPoint _ (Closed p) = Point p InfteNull
toPoint Lower (Open p) = Point p InftePlus
toPoint Upper (Open p) = Point p InfteMinus
toPoint Lower Inf = NegInf
toPoint Upper Inf = PosInf
pointRational :: IntervalLimitPoint -> Maybe Rational
pointRational (Point r _) = Just r
pointRational _ = Nothing
--TODO: complete definition
instance Num IntervalLimitPoint
where
x + y = case (x, y) of
(Point x' ix, Point y' iy) -> Point (x' + y') $ case (ix, iy) of
(InfteIndet, _ ) -> InfteIndet
(_, InfteIndet) -> InfteIndet
(InfteNull, iy' ) -> iy'
(ix', InfteNull ) -> ix'
_ | ix == iy -> ix
_ -> InfteIndet
(Indet, _ ) -> Indet
(_, Indet ) -> Indet
(PosInf, NegInf) -> Indet
(NegInf, PosInf) -> Indet
(PosInf, _ ) -> PosInf
(_, PosInf) -> PosInf
(NegInf, _ ) -> NegInf
(_, NegInf) -> NegInf
_ * _ = error "undefined: * for IntervalLimitPoint"
abs _ = error "undefined: abs for IntervalLimitPoint"
signum _ = error "undefined: signum for IntervalLimitPoint"
fromInteger i = Point (fromInteger i) InfteNull
negate Indet = Indet
negate PosInf = NegInf
negate NegInf = PosInf
negate (Point x infte) = Point (-x) $ negInfte infte
where
negInfte InfteMinus = InftePlus
negInfte InftePlus = InfteMinus
negInfte infte' = infte'
instance Ord IntervalLimitPoint
where
Indet <= _ = error "Ord IntervalLimitPoint: undefined for Indet"
_ <= Indet = error "Ord IntervalLimitPoint: undefined for Indet"
NegInf <= _ = True
_ <= NegInf = False
_ <= PosInf = True
PosInf <= _ = False
Point x infteX <= Point y infteY
| x == y = infteX <= infteY
| otherwise = x <= y
instance Ord Infinitesimal
where
InfteIndet <= _ = error "Ord Infinitesimal: undefined for InfteIndet"
_ <= InfteIndet = error "Ord Infinitesimal: undefined for InfteIndet2"
InfteMinus <= _ = True
InfteNull <= InfteNull = True
InfteNull <= InftePlus = True
InftePlus <= InftePlus = True
_ <= _ = False
nullInfte :: IntervalLimitPoint -> IntervalLimitPoint
nullInfte (Point p _) = Point p InfteNull
nullInfte p = p
data IntervalLimitPointOrd = Lt | Gt | Eq | IndetOrd deriving (Eq, Ord)
compareIntervalPoints :: IntervalLimitPoint -> IntervalLimitPoint -> IntervalLimitPointOrd
compareIntervalPoints x y = case (x,y) of
(Indet, _ ) -> IndetOrd
(_, Indet ) -> IndetOrd
(x', y') | x' == y' -> Eq
(NegInf, _ ) -> Lt
(PosInf, _ ) -> Gt
(_, NegInf) -> Gt
(_, PosInf) -> Lt
(Point x' ix, Point y' iy)
| o /= Eq -> o
| otherwise -> case (ix, iy) of
(InfteIndet, _ ) -> IndetOrd
(_, InfteIndet) -> IndetOrd
_ | ix == iy -> Eq
(InfteMinus, _ ) -> Lt
(InfteNull, InftePlus ) -> Lt
_ -> Gt
where
o = ordRat x' y'
where
ordRat x' y' = case compare x' y' of
LT -> Lt
GT -> Gt
EQ -> Eq
infix 4 ~<
(~<) :: IntervalLimitPoint -> IntervalLimitPoint -> Maybe Bool
x ~< y
| oneArgIndet x y = Nothing
| otherwise = Just $ compareIntervalPoints x y == Lt
infix 4 ~<=
(~<=) :: IntervalLimitPoint -> IntervalLimitPoint -> Maybe Bool
x ~<= y
| oneArgIndet x y = Nothing
| otherwise = let c = compareIntervalPoints x y in Just $ c == Lt || c == Eq
infix 4 ~>
(~>) :: IntervalLimitPoint -> IntervalLimitPoint -> Maybe Bool
x ~> y
| oneArgIndet x y = Nothing
| otherwise = Just $ compareIntervalPoints x y == Gt
infix 4 ~>=
(~>=) :: IntervalLimitPoint -> IntervalLimitPoint -> Maybe Bool
x ~>= y
| oneArgIndet x y = Nothing
| otherwise = let c = compareIntervalPoints x y in Just $ c == Gt || c == Eq
oneArgIndet :: IntervalLimitPoint -> IntervalLimitPoint -> Bool
oneArgIndet Indet _ = True
oneArgIndet _ Indet = True
oneArgIndet _ _ = False
corners :: (Ord k, Hashable k) => [(k, Interval)] -> [Map k IntervalLimitPoint]
corners choices = foldl'
( \crnrs (pf, Interval l u) ->
[Map.insert pf (Interval.toPoint Lower l) c | c <- crnrs] ++ [Map.insert pf (Interval.toPoint Upper u) c | c <- crnrs]
)
[Map.fromList [(firstRf, Interval.toPoint Lower firstLower)], Map.fromList [(firstRf, Interval.toPoint Upper firstUpper)]]
otherConditions
where
((firstRf, Interval firstLower firstUpper):otherConditions) = choices
rat2IntervLimPoint :: Rational -> IntervalLimitPoint
rat2IntervLimPoint r = Point r InfteNull
| SteffenMichels/IHPMC | src/Interval.hs | mit | 7,929 | 0 | 16 | 2,374 | 2,168 | 1,139 | 1,029 | 159 | 15 |
{-# LANGUAGE InstanceSigs #-}
{-# OPTIONS
-Wall
-Wno-missing-signatures
-Wno-type-defaults
#-}
module Main where
import Data.Semigroup
-- Сегодня у нас аппликативные функторы
{-
class Functor f => Applicative f where
pure :: a -> f a
(<*>) :: f (a -> b) -> f a -> f b
-- Т.е. внутри функтора не просто a, а какая-то функция, из а в б.
У него есть законы:
1. f <*> pure x == fmap ($ x) f
2. pure f <*> x == fmap f x
-}
-- А сейчас напишем DSL для инвалидации...
data ValidationResult a
= Errors [String]
| Ok a [String]
deriving Show
newtype Validation src dest
= Validation { validate :: src -> ValidationResult dest }
instance Functor (Validation src) where
fmap :: (a -> b) -> Validation src a -> Validation src b
fmap f (Validation g) = Validation $ \x -> case g x of
Errors es -> Errors es
Ok v ws -> Ok (f v) ws
instance Semigroup (Validation a b) where
(Validation v1) <> (Validation v2) =
Validation $ \x -> case (v1 x, v2 x) of
(Errors e1, Errors e2) -> Errors (e1 ++ e2)
(Ok _ w1 , Ok x w2) -> Ok x (w1 ++ w2)
(Errors e1, _) -> Errors e1
(_ , Errors e2) -> Errors e2
instance Applicative (Validation src) where
pure x = Validation $ \_ -> Ok x []
Validation vf <*> Validation vx = Validation $ \x ->
case (vf x, vx x) of
(Errors e1, Errors e2) -> Errors (e1 ++ e2)
(Ok f w1 , Ok v w2) -> Ok (f v) (w1 ++ w2)
(Errors e1, _) -> Errors e1
(_ , Errors e2) -> Errors e2
ageV :: Validation Int Int
ageV = check "Negative age!" (> 0)
<> note "Maybe too young!" (> 20)
<> note "Maybe too old!" (< 80)
{-
ageV = Validation $ \x ->
if x < 0 then Errors ["Negative age!"]
else if x < 18 then Ok x ["Maybe too young"]
else Ok x []
-}
check, note :: String -> (a -> Bool) -> Validation a a
check err test = Validation $ \x ->
if test x then Ok x []
else Errors [err]
note warning test = Validation $ \x ->
Ok x $ if test x
then []
else [warning]
nameV :: Validation String String
nameV = check "Empty name!" (not . null)
data User = User
{ userName :: String
, userAge :: Int
}
field :: (a -> b) -> Validation b c -> Validation a c
field f (Validation g) = Validation $ \x -> g (f x)
userV :: Validation User User
userV = User
<$> field userName nameV
<*> field userAge ageV
main = do
print a
where
a = validate ageV 3
| aquatir/remember_java_api | code-sample-haskell/typed_fp_basics_cource/06_applicative/code.hs | mit | 2,593 | 0 | 13 | 728 | 849 | 439 | 410 | 56 | 2 |
--------------------------------------------------------------------------------
-- | Data types for logging
{-# LANGUAGE OverloadedStrings #-}
module Web.SocketIO.Types.Log (Log(..), Serializable(..)) where
--------------------------------------------------------------------------------
import System.Console.ANSI
import Web.SocketIO.Types.String
--------------------------------------------------------------------------------
-- | Logger
data Log = Error ByteString
| Warn ByteString
| Info ByteString
| Debug ByteString
deriving (Eq, Show)
instance Serializable Log where
serialize (Error message) = fromString $ " " ++ (paint Red $ "[error] " ++ error (fromByteString message))
serialize (Warn message) = fromString $ " " ++ (paint Yellow $ "[warn] " ++ fromByteString message)
serialize (Info message) = fromString $ " " ++ (paint Cyan $ "[info] " ++ fromByteString message)
serialize (Debug message) = fromString $ " " ++ (paint Black $ "[debug] " ++ fromByteString message)
--------------------------------------------------------------------------------
-- | helper function
paint :: Color -> String -> String
paint color s = setSGRCode [SetColor Foreground Vivid color] ++ s ++ setSGRCode []
| banacorn/socket.io-haskell | Web/SocketIO/Types/Log.hs | mit | 1,282 | 0 | 11 | 220 | 299 | 161 | 138 | 16 | 1 |
{-# LANGUAGE EmptyDataDecls,
FlexibleContexts,
GADTs,
GeneralizedNewtypeDeriving,
MultiParamTypeClasses,
OverloadedStrings,
QuasiQuotes,
TemplateHaskell,
TypeFamilies #-}
module Scarlet.Entry where
import Control.Monad.IO.Class (liftIO)
import Database.Persist
import Database.Persist.Sqlite
import Database.Persist.TH
import Data.Time.Clock (UTCTime)
share [mkPersist sqlSettings, mkSave "entityDefs"]
[persistLowerCase|
Entry
ctime UTCTime
uri String
title String
content String
byline String
language String
directives String
deriving Show
|]
| rhwlo/Scarlet | src/Scarlet/Entry.hs | mit | 764 | 0 | 7 | 269 | 71 | 44 | 27 | 17 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.