code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Types where
data Header = Header {
getData :: String
} deriving (Show)
data HeaderNew = HeaderNew {
requestQuery :: (String,String), -- (Method, Full query)
gzipFlag :: Bool,
userAgent :: Maybe String,
doesItEnd :: Bool
} deriving (Show)
data Input = Input {
server :: Maybe String,
board :: Maybe String,
post :: Maybe String,
raw :: Maybe String,
sid :: Maybe String,
start :: Maybe Int,
end :: Maybe Int,
last :: Maybe Int,
keepFirst :: Maybe Bool,
remFirst :: Maybe Bool,
error :: Maybe Bool,
errorMessage :: Maybe String
--headOnly :: Maybe Bool,
--isItGet :: Maybe Bool
} deriving (Show)
| Cipherwraith/Rokka | Types.hs | gpl-2.0 | 760 | 0 | 9 | 262 | 202 | 118 | 84 | 24 | 0 |
module Crystal.Pretty (pretty, prettyD) where
import Data.List
import Text.PrettyPrint
import Crystal.AST
pretty expr = renderStyle style{lineLength=150} $ prettyE expr
prettyD (decls, expr) =
renderStyle style{lineLength=150} $
vcat (map toDecl decls) $+$ prettyE expr
where toDecl (id, Expr l (Lambda args r body)) = appl [text "define", renderArgs (id:args) r, prettyE body]
toDecl ("_", value) = prettyE value
toDecl (id, value) = appl [text "define", text id, prettyE value]
prettyE (Expr l (Ref ident)) = text ident
prettyE (Expr l (Appl e args)) = appl $ map prettyE (e:args)
prettyE (Expr l (If cond cons alt)) =
case alt of
Expr _ (Lit LitVoid) -> appl (text "if" : map prettyE [cond, cons])
otherwise -> appl (text "if" : map prettyE [cond, cons, alt])
prettyE (Expr l (Let bds bod)) = appl [op, parens (vcat $ map (\(i,e) -> appl [text i, prettyBE e]) bds) , prettyBE bod]
where op = if length bds > 1 then text "let*" else text "let"
prettyE (Expr l (LetRec bds bod)) = appl [text "letrec" , parens (vcat $ map (\(i,e) -> appl [text i, prettyBE e]) bds) , prettyBE bod]
prettyE (Expr l (Lambda args r body)) = appl [text "lambda", renderArgs args r, prettyBE body]
prettyE (Expr l (Begin body)) = appl (text "begin" : map prettyBE body)
prettyE (Expr l (Lit lit)) = prettyL False lit
prettyBE (Expr l (Begin exps)) = vcat $ map prettyBE exps
prettyBE other = prettyE other
prettyL _ (LitChar c) = text "#\\" <> text [c]
prettyL _ (LitString s) = text "\"" <> escape s <> text "\""
prettyL l (LitSymbol s) = quoted l <> text s
prettyL _ (LitInt i) = int (fromIntegral i)
prettyL _ (LitFloat f) = double f
prettyL _ (LitBool True) = text "#t"
prettyL _ (LitBool False) = text "#f"
prettyL _ (LitVoid) = text "(void)"
prettyL l (LitList els) = quoted l <> parens (hsep $ map (prettyL True) els)
prettyL l (LitPair x y) = quoted l <> parens (prettyL True x <+> text "." <+> prettyL True y)
appl (x:xs) = parens (x <+> sep xs)
quoted l = if l then empty else text "'"
escape = text . concatMap (\x -> if x == '\n' then "\\n" else [x])
renderArgs :: [Ident] -> Maybe Ident -> Doc
renderArgs vs Nothing = parens (sep $ map text vs)
renderArgs [] (Just r) = text r
renderArgs vs (Just r) = parens (sep (map text vs ++ [text "."] ++ [text r]))
| Botje/crystal | Crystal/Pretty.hs | gpl-2.0 | 2,421 | 0 | 15 | 592 | 1,221 | 609 | 612 | 42 | 3 |
{- |
Module : $Header$
Description : extraction of the sign from the frames
Copyright : (c) Francisc-Nicolae Bungiu, Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Extraction of all the entities in the ontology
-}
module OWL2.Extract where
import OWL2.AS
import OWL2.MS
import OWL2.Sign
import Control.Monad
import Common.Lib.State
import qualified Data.Set as Set
fromObjPropExpr :: ObjectPropertyExpression -> State Sign ()
fromObjPropExpr = addEntity . Entity ObjectProperty . objPropToIRI
fromDataPropExpr :: DataPropertyExpression -> State Sign ()
fromDataPropExpr = addEntity . Entity DataProperty
fromIndividual :: Individual -> State Sign ()
fromIndividual ind =
unless (iriType ind == NodeID) $ addEntity $ Entity NamedIndividual ind
fromAnnoProp :: AnnotationProperty -> State Sign ()
fromAnnoProp = addEntity . Entity AnnotationProperty
fromLiteral :: Literal -> State Sign ()
fromLiteral l = case l of
Literal _ ty -> case ty of
Typed u -> addEntity $ Entity Datatype u
_ -> return ()
_ -> return ()
fromDType :: Datatype -> State Sign ()
fromDType dt = unless (isDatatypeKey dt) $ addEntity $ Entity Datatype dt
-- | Adds the DataRange to the Signature and returns it as a State Sign ()
fromDataRange :: DataRange -> State Sign ()
fromDataRange dr = case dr of
DataJunction _ lst -> mapM_ fromDataRange lst
DataComplementOf r -> fromDataRange r
DataOneOf cs -> mapM_ fromLiteral cs
DataType r fcs -> do
fromDType r
mapM_ (fromLiteral . snd) fcs
-- | Adds the Fact to the Signature and returns it as a State Sign()
fromFact :: Fact -> State Sign ()
fromFact f = case f of
ObjectPropertyFact _ obe ind -> do
fromObjPropExpr obe
fromIndividual ind
DataPropertyFact _ dpe _ ->
fromDataPropExpr dpe
-- | Adds the Description to the Signature. Returns it as a State
fromDescription :: ClassExpression -> State Sign ()
fromDescription desc = case desc of
Expression u ->
unless (isThing u) $ addEntity $ Entity Class u
ObjectJunction _ ds -> mapM_ fromDescription ds
ObjectComplementOf d -> fromDescription d
ObjectOneOf is -> mapM_ fromIndividual is
ObjectValuesFrom _ opExpr d -> do
fromObjPropExpr opExpr
fromDescription d
ObjectHasSelf opExpr -> fromObjPropExpr opExpr
ObjectHasValue opExpr i -> do
fromObjPropExpr opExpr
fromIndividual i
ObjectCardinality (Cardinality _ _ opExpr md) -> do
fromObjPropExpr opExpr
maybe (return ()) fromDescription md
DataValuesFrom _ dExp r -> do
fromDataPropExpr dExp
fromDataRange r
DataHasValue dExp c -> do
fromDataPropExpr dExp
fromLiteral c
DataCardinality (Cardinality _ _ dExp mr) -> do
fromDataPropExpr dExp
maybe (return ()) fromDataRange mr
fromAnno :: Annotation -> State Sign ()
fromAnno (Annotation as apr _) = do
fromAnnoProp apr
fromAnnos as
fromAnnos :: Annotations -> State Sign ()
fromAnnos = mapM_ fromAnno
fromAnnoList :: (a -> State Sign ()) -> AnnotatedList a -> State Sign ()
fromAnnoList f al = do
fromAnnos $ concatMap fst al
mapM_ (f . snd) al
{- | Adds possible ListFrameBits to the Signature by calling
bottom level functions -}
fromLFB :: Maybe Relation -> ListFrameBit -> State Sign ()
fromLFB r lfb = case lfb of
AnnotationBit ab ->
unless (r `elem` [Just (DRRelation ADomain), Just (DRRelation ARange)])
$ fromAnnoList fromAnnoProp ab
ExpressionBit al -> fromAnnoList fromDescription al
ObjectBit anob -> fromAnnoList fromObjPropExpr anob
DataBit dlst -> fromAnnoList fromDataPropExpr dlst
IndividualSameOrDifferent anind -> fromAnnoList fromIndividual anind
ObjectCharacteristics al -> fromAnnos $ concatMap fst al
DataPropRange dr -> fromAnnoList fromDataRange dr
IndividualFacts fct -> fromAnnoList fromFact fct
fromAFB :: AnnFrameBit -> State Sign ()
fromAFB afb = case afb of
AnnotationFrameBit _ -> return ()
DataFunctional -> return ()
DatatypeBit dr -> fromDataRange dr
ClassDisjointUnion cls -> mapM_ fromDescription cls
ClassHasKey obe dpe -> do
mapM_ fromObjPropExpr obe
mapM_ fromDataPropExpr dpe
ObjectSubPropertyChain ope -> mapM_ fromObjPropExpr ope
{- | Calls the completion of Signature based on
case separation of ListFrameBit and AnnotationFrameBit -}
fromFB :: Extended -> FrameBit -> State Sign ()
fromFB ext fb = case fb of
ListFrameBit rel lfb -> do
fromExt ext
fromLFB rel lfb
AnnFrameBit an anf -> do
fromAnnos an
fromAFB anf
case anf of
AnnotationFrameBit Assertion -> case ext of
Misc _ -> return ()
_ -> fromExt ext
_ -> fromExt ext
fromFrame :: Frame -> State Sign ()
fromFrame (Frame ex fblist) = mapM_ (fromFB ex) fblist
fromExt :: Extended -> State Sign ()
fromExt ext = case ext of
SimpleEntity e -> addEntity e
ObjectEntity op -> fromObjPropExpr op
ClassEntity ce -> fromDescription ce
Misc ans -> fromAnnos ans
{- | Top level function: takes the OntologyDocument and completes
the signature by calling completeSignForFrame -}
extractSign :: OntologyDocument -> State Sign ()
extractSign = mapM_ fromFrame . ontFrames . ontology
toDecl :: Sign -> [Frame]
toDecl s =
let cls = map (Entity Class) $ Set.toList (concepts s)
dt = map (Entity Datatype) $ Set.toList (datatypes s)
op = map (Entity ObjectProperty) $ Set.toList (objectProperties s)
dp = map (Entity DataProperty) $ Set.toList (dataProperties s)
i = map (Entity NamedIndividual) $ Set.toList (individuals s)
ans = map (Entity AnnotationProperty) $ Set.toList (annotationRoles s)
in map (\ c -> Frame (mkExtendedEntity c)
[AnnFrameBit [] $ AnnotationFrameBit Declaration])
(cls ++ dt ++ op ++ dp ++ i ++ ans)
signToFrames :: [Frame] -> [Frame]
signToFrames f = let s = mapM_ fromFrame f in toDecl $ execState s emptySign
| nevrenato/HetsAlloy | OWL2/Extract.hs | gpl-2.0 | 6,031 | 0 | 17 | 1,309 | 1,836 | 860 | 976 | 133 | 11 |
{-# OPTIONS_GHC -Wno-missing-export-lists #-}
module Main where
import Prelude hiding (id, putStrLn)
import Data.Monoid ((<>))
import Data.Text (intercalate)
import Data.Text.IO (putStrLn)
import Data.Version (showVersion)
import Database.SQLite.Simple
import Options.Applicative
import Paths_pinboard_notes_backup (version)
import Pinboard
import System.Exit (exitFailure)
import Text.PrettyPrint.ANSI.Leijen (Doc, vsep)
import Types
import Utils (pluralize, putStrLnErr)
-- * Command line parsing
data ProgramOptions = ProgramOptions { o_apiToken :: String
, o_verbosity :: Verbosity
, o_databasePath :: String
}
optionsParser :: Options.Applicative.Parser ProgramOptions
optionsParser = ProgramOptions
<$> strOption (short 't'
<> long "token"
<> metavar "TOKEN"
<> help tokenHelp)
<*> flag Standard Verbose (short 'v'
<> long "verbose"
<> help verboseHelp)
<*> argument str (metavar "FILE"
<> help pathHelp
<> action "file")
where tokenHelp = "Your API token (e.g. maciej:abc123456). "
<> "You can find this at <https://pinboard.in/settings/password>."
verboseHelp = "Display detailed progress information."
pathHelp = "Filename of the SQLite database where your notes will be stored. "
<> "This file will be created if it does not already exist. "
<> "Notes are always stored in a table called \"notes\"."
addVersionOption :: Options.Applicative.Parser (a -> a)
addVersionOption = infoOption ("pnbackup " <> showVersion version)
(long "version"
<> help "Show the version number"
<> hidden)
footerText :: Doc
footerText = vsep [ "For more information, see \"man pnbackup\"."
, ""
, "Copyright © 2016–2017, 2019, 2021 Benjamin D. Esham"
, ""
, "License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>."
, "This is free software: you are free to change and redistribute it."
, "There is NO WARRANTY, to the extent permitted by law."
]
commandLineOptions :: ParserInfo ProgramOptions
commandLineOptions = info (addVersionOption <*> helper <*> optionsParser) parserInfo
where parserInfo = fullDesc
<> header "pnbackup -- Back up the notes you've saved to Pinboard"
<> footerDoc (Just footerText)
-- * Main
main :: IO ()
main = execParser commandLineOptions >>= main'
main' :: ProgramOptions -> IO ()
main' (ProgramOptions apiToken verbosity databasePath) = do
conn <- open databasePath
execute_ conn createTableQuery
result <- runPinboard apiToken verbosity $ backUpNotes conn
case result of
Left err -> putStrLnErr ("pnbackup: " <> err) >> exitFailure
Right result' -> displayResult result'
createTableQuery :: Query
createTableQuery = mconcat [ "CREATE TABLE IF NOT EXISTS notes "
, "(id TEXT NOT NULL UNIQUE, "
, "title TEXT NOT NULL, "
, "text TEXT NOT NULL, "
, "hash TEXT NOT NULL, "
, "created DATETIME NOT NULL, "
, "updated DATETIME NOT NULL)"
]
displayResult :: ApplicationResult -> IO ()
displayResult (ApplicationResult upToDate updated new deleted) = do
putStrLn $ intercalate ", " [ updatedString
, newString
, deletedString
, upToDateString
] <> "."
where upToDateString = pluralize "note already up-to-date" "notes already up-to-date" upToDate
updatedString = pluralize "note updated" "notes updated" updated
newString = pluralize "new note" "new notes" new
deletedString = pluralize "note deleted" "notes deleted" deleted
| bdesham/pinboard-notes-backup | src/Main.hs | gpl-3.0 | 4,304 | 0 | 13 | 1,504 | 713 | 380 | 333 | 83 | 2 |
{---------------------------------------------------------------------}
{- Copyright 2015, 2016 Nathan Bloomfield -}
{- -}
{- This file is part of Feivel. -}
{- -}
{- Feivel is free software: you can redistribute it and/or modify -}
{- it under the terms of the GNU General Public License version 3, -}
{- as published by the Free Software Foundation. -}
{- -}
{- Feivel is distributed in the hope that it will be useful, but -}
{- WITHOUT ANY WARRANTY; without even the implied warranty of -}
{- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -}
{- GNU General Public License for more details. -}
{- -}
{- You should have received a copy of the GNU General Public License -}
{- along with Feivel. If not, see <http://www.gnu.org/licenses/>. -}
{---------------------------------------------------------------------}
module Feivel.Grammar.Int where
import Feivel.Grammar.Util
data IntExprLeaf
int str bool rat mod
list mat tup poly mac
a
= IntConst Integer
| IntVar Key
| IntMacro [(Type, Key, a)] a -- Expr, MacTo ZZ
| IntAtPos list int
| IntAtIdx mat int int
| IntAtSlot tup int
| IntIfThenElse bool int int
-- Arithmetic
| IntAdd int int
| IntSub int int
| IntMult int int
| IntQuo int int
| IntMod int int
| IntPow int int
| IntGCD int int
| IntLCM int int
| IntMin int int
| IntMax int int
| IntChoose int int
| IntNeg int
| IntAbs int
| IntRad int
| IntSqPart int
| IntSqFreePart int
-- String
| StrLength str
-- Rational
| RatNumer rat
| RatDenom rat
| RatFloor rat
-- List
| ListLen list
| IntRand list
| IntSum list
| IntProd list
| IntMaxim list
| IntMinim list
| IntGCDiv list
| IntLCMul list
-- Matrix
| MatNumRows mat
| MatNumCols mat
| MatRank mat
-- Polynomial
| IntContent poly
| PolyDegree Type poly
-- Stats
| IntObserveUniform int int
| IntObserveBinomial int rat
| IntObservePoisson rat
-- Casts
| IntCastStr str
deriving (Eq, Show)
| nbloomf/feivel | src/Feivel/Grammar/Int.hs | gpl-3.0 | 2,474 | 0 | 8 | 931 | 339 | 215 | 124 | 51 | 0 |
{-# LANGUAGE TypeSynonymInstances , OverlappingInstances #-}
module Database.Design.Ampersand.ADL1.Expression (
subst
,foldlMapExpression,foldrMapExpression
,primitives,isMp1, isEEps
,isPos,isNeg, deMorganERad, deMorganECps, deMorganEUni, deMorganEIsc, notCpl, isCpl
,exprIsc2list, exprUni2list, exprCps2list, exprRad2list, exprPrd2list
,insParentheses)
where
import Database.Design.Ampersand.Basics (uni)
import Database.Design.Ampersand.Core.AbstractSyntaxTree
--import Debug.Trace
-- fatal :: Int -> String -> a
-- fatal = fatalMsg "ADL1.Expression"
-- | subst is used to replace each occurrence of a relation
-- with an expression. The parameter expr will therefore be applied to an
-- expression of the form Erel rel.
subst :: (Declaration,Expression) -> Expression -> Expression
subst (decl,expr) = subs
where
subs (EEqu (l,r)) = EEqu (subs l,subs r)
subs (EImp (l,r)) = EImp (subs l,subs r)
subs (EIsc (l,r)) = EIsc (subs l,subs r)
subs (EUni (l,r)) = EUni (subs l,subs r)
subs (EDif (l,r)) = EDif (subs l,subs r)
subs (ELrs (l,r)) = ELrs (subs l,subs r)
subs (ERrs (l,r)) = ERrs (subs l,subs r)
subs (EDia (l,r)) = EDia (subs l,subs r)
subs (ECps (l,r)) = ECps (subs l,subs r)
subs (ERad (l,r)) = ERad (subs l,subs r)
subs (EPrd (l,r)) = EPrd (subs l,subs r)
subs (EKl0 e ) = EKl0 (subs e)
subs (EKl1 e ) = EKl1 (subs e)
subs (EFlp e ) = EFlp (subs e)
subs (ECpl e ) = ECpl (subs e)
subs (EBrk e) = EBrk (subs e)
subs e@(EDcD d ) | d==decl = expr
| otherwise = e
subs e@EDcI{} = e
subs e@EEps{} = e
subs e@EDcV{} = e
subs e@EMp1{} = e
foldlMapExpression :: (a -> r -> a) -> (Declaration->r) -> a -> Expression -> a
foldlMapExpression f = foldrMapExpression f' where f' x y = f y x
foldrMapExpression :: (r -> a -> a) -> (Declaration->r) -> a -> Expression -> a
foldrMapExpression f g a (EEqu (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EImp (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EIsc (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EUni (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EDif (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ELrs (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ERrs (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EDia (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ECps (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (ERad (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EPrd (l,r)) = foldrMapExpression f g (foldrMapExpression f g a l) r
foldrMapExpression f g a (EKl0 e) = foldrMapExpression f g a e
foldrMapExpression f g a (EKl1 e) = foldrMapExpression f g a e
foldrMapExpression f g a (EFlp e) = foldrMapExpression f g a e
foldrMapExpression f g a (ECpl e) = foldrMapExpression f g a e
foldrMapExpression f g a (EBrk e) = foldrMapExpression f g a e
foldrMapExpression f g a (EDcD d) = f (g d) a
foldrMapExpression _ _ a EDcI{} = a
foldrMapExpression _ _ a EEps{} = a
foldrMapExpression _ _ a EDcV{} = a
foldrMapExpression _ _ a EMp1{} = a
primitives :: Expression -> [Expression]
primitives expr =
case expr of
(EEqu (l,r)) -> primitives l `uni` primitives r
(EImp (l,r)) -> primitives l `uni` primitives r
(EIsc (l,r)) -> primitives l `uni` primitives r
(EUni (l,r)) -> primitives l `uni` primitives r
(EDif (l,r)) -> primitives l `uni` primitives r
(ELrs (l,r)) -> primitives l `uni` primitives r
(ERrs (l,r)) -> primitives l `uni` primitives r
(EDia (l,r)) -> primitives l `uni` primitives r
(ECps (l,r)) -> primitives l `uni` primitives r
(ERad (l,r)) -> primitives l `uni` primitives r
(EPrd (l,r)) -> primitives l `uni` primitives r
(EKl0 e) -> primitives e
(EKl1 e) -> primitives e
(EFlp e) -> primitives e
(ECpl e) -> primitives e
(EBrk e) -> primitives e
EDcD{} -> [expr]
EDcI{} -> [expr]
EEps{} -> [] -- Since EEps is inserted for typing reasons only, we do not consider it a primitive..
EDcV{} -> [expr]
EMp1{} -> [expr]
-- | The rule of De Morgan requires care with respect to the complement.
-- The following function provides a function to manipulate with De Morgan correctly.
deMorganERad :: Expression -> Expression
deMorganERad (ECpl (ERad (l,r)))
= notCpl (deMorganERad l) .:. notCpl (deMorganERad r)
deMorganERad (ERad (l,r))
= notCpl (notCpl (deMorganERad l) .:. notCpl (deMorganERad r))
deMorganERad e = e
deMorganECps :: Expression -> Expression
deMorganECps (ECpl (ECps (l,r)))
= notCpl (deMorganECps l) .!. notCpl (deMorganECps r)
deMorganECps (ECps (l,r))
= notCpl (notCpl (deMorganECps l) .!. notCpl (deMorganECps r))
deMorganECps e = e
deMorganEUni :: Expression -> Expression
deMorganEUni (ECpl (EUni (l,r)))
= notCpl (deMorganEUni l) ./\. notCpl (deMorganEUni r)
deMorganEUni (EUni (l,r))
= notCpl (notCpl (deMorganEUni l) ./\. notCpl (deMorganEUni r))
deMorganEUni e = e
deMorganEIsc :: Expression -> Expression
deMorganEIsc (ECpl (EIsc (l,r)))
= notCpl (deMorganEIsc l) .\/. notCpl (deMorganEIsc r)
deMorganEIsc (EIsc (l,r))
= notCpl (notCpl (deMorganEIsc l) .\/. notCpl (deMorganEIsc r))
deMorganEIsc e = e
notCpl :: Expression -> Expression
notCpl (ECpl e) = e
notCpl e = ECpl e
isCpl :: Expression -> Bool
isCpl (ECpl{}) = True
isCpl _ = False
isPos :: Expression -> Bool
isPos = not . isNeg
isNeg :: Expression -> Bool
isNeg = isCpl
isMp1 :: Expression -> Bool
isMp1 EMp1{} = True
isMp1 _ = False
isEEps :: Expression -> Bool
isEEps EEps{} = True
isEEps _ = False
exprIsc2list, exprUni2list, exprCps2list, exprRad2list, exprPrd2list :: Expression -> [Expression]
exprIsc2list (EIsc (l,r)) = exprIsc2list l++exprIsc2list r
exprIsc2list r = [r]
exprUni2list (EUni (l,r)) = exprUni2list l++exprUni2list r
exprUni2list r = [r]
exprCps2list (ECps (l,r)) = exprCps2list l++exprCps2list r
exprCps2list r = [r]
exprRad2list (ERad (l,r)) = exprRad2list l++exprRad2list r
exprRad2list r = [r]
exprPrd2list (EPrd (l,r)) = exprPrd2list l++exprPrd2list r
exprPrd2list r = [r]
insParentheses :: Expression -> Expression
insParentheses expr = insPar 0 expr
where
wrap :: Integer -> Integer -> Expression -> Expression
wrap i j e' = if i<=j then e' else EBrk (insPar 0 e')
insPar :: Integer -> Expression -> Expression
insPar i (EEqu (l,r)) = wrap i 0 (insPar 1 l .==. insPar 1 r)
insPar i (EImp (l,r)) = wrap i 0 (insPar 1 l .|-. insPar 1 r)
insPar i x@EIsc{} = wrap i 2 (foldr1 (./\.) [insPar 3 e | e<-exprIsc2list x ])
insPar i x@EUni{} = wrap i 2 (foldr1 (.\/.) [insPar 3 e | e<-exprUni2list x ])
insPar i (EDif (l,r)) = wrap i 4 (insPar 5 l .-. insPar 5 r)
insPar i (ELrs (l,r)) = wrap i 6 (insPar 7 l ./. insPar 7 r)
insPar i (ERrs (l,r)) = wrap i 6 (insPar 7 l .\. insPar 7 r)
insPar i (EDia (l,r)) = wrap i 6 (insPar 7 l .<>. insPar 7 r)
insPar i x@ECps{} = wrap i 8 (foldr1 (.:.) [insPar 9 e | e<-exprCps2list x ])
insPar i x@ERad{} = wrap i 8 (foldr1 (.!.) [insPar 9 e | e<-exprRad2list x ])
insPar i x@EPrd{} = wrap i 8 (foldr1 (.*.) [insPar 9 e | e<-exprPrd2list x ])
insPar _ (EKl0 e) = EKl0 (insPar 10 e)
insPar _ (EKl1 e) = EKl1 (insPar 10 e)
insPar _ (EFlp e) = EFlp (insPar 10 e)
insPar _ (ECpl e) = ECpl (insPar 10 e)
insPar i (EBrk e) = insPar i e
insPar _ x = x
{-
insPar 0 (r/\s/\t/\x/\y |- p)
=
wrap 0 0 (insPar 1 (r/\s/\t/\x/\y) |- insPar 1 p)
=
insPar 1 (r/\s/\t/\x/\y) |- insPar 1 p
=
wrap 1 2 (foldr1 f [insPar 3 e | e<-exprIsc2list (r/\s/\t/\x/\y) ]) |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 e | e<-exprIsc2list (r/\s/\t/\x/\y) ] |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 e | e<-[r,s,t,x,y] ] |- p where f x y = EIsc (x,y)
=
foldr1 f [insPar 3 r,insPar 3 s,insPar 3 t,insPar 3 x,insPar 3 y] |- p where f x y = EIsc (x,y)
=
foldr1 f [r,s,t,x,y] |- p where f x y = EIsc (x,y)
=
r/\s/\t/\x/\y |- p
insPar 0 (r;s;t;x;y |- p)
=
wrap 0 0 (insPar 1 (r;s;t;x;y) |- insPar 1 p)
=
insPar 1 (r;s;t;x;y) |- insPar 1 p
=
wrap 1 8 (insPar 8 r ; insPar 8 (s;t;x;y)) |- p
=
r; insPar 8 (s;t;x;y) |- p
=
r; wrap 8 8 (insPar 8 s; insPar 8 (t;x;y)) |- p
=
r; insPar 8 s; insPar 8 (t;x;y) |- p
=
r; s; insPar 8 (t;x;y) |- p
-}
| guoy34/ampersand | src/Database/Design/Ampersand/ADL1/Expression.hs | gpl-3.0 | 9,217 | 0 | 13 | 2,451 | 3,699 | 1,909 | 1,790 | 155 | 21 |
module GetImages
(getActiveGraphImage, getTimetableImage, randomName) where
import TimetableImageCreator (renderTable)
import qualified Data.Map as M
import System.Random
import Svg.Generator
import ImageConversion
import Happstack.Server (Request, rqCookies, cookieValue)
import Data.List.Utils (replace)
-- | If there is an active graph available, an image of that graph is created,
-- otherwise the Computer Science graph is created as a default.
-- Either way, the resulting graph's .svg and .png names are returned.
getActiveGraphImage :: Request -> IO (String, String)
getActiveGraphImage req = do
let cookies = M.fromList $ rqCookies req
graphName =
replace "-" " " $
maybe "Computer-Science" cookieValue (M.lookup "active-graph" cookies)
getGraphImage graphName (M.map cookieValue cookies)
-- | Creates an image, and returns the name of the svg used to create the
-- image and the name of the image
getGraphImage :: String -> M.Map String String -> IO (String, String)
getGraphImage graphName courseMap = do
rand <- randomName
let svgFilename = rand ++ ".svg"
imageFilename = rand ++ ".png"
buildSVG graphName courseMap svgFilename True
createImageFile svgFilename imageFilename
return (svgFilename, imageFilename)
-- | Creates an image, and returns the name of the svg used to create the
-- image and the name of the image
getTimetableImage :: String -> String -> IO (String, String)
getTimetableImage courses session = do
rand <- randomName
let svgFilename = rand ++ ".svg"
imageFilename = rand ++ ".png"
renderTable svgFilename courses session
createImageFile svgFilename imageFilename
return (svgFilename, imageFilename)
-- | Generate a string containing random integers
randomName :: IO String
randomName = do
gen <- newStdGen
let (rand, _) = next gen
return (show rand)
| miameng/courseography | app/GetImages.hs | gpl-3.0 | 1,906 | 0 | 14 | 378 | 414 | 215 | 199 | 37 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CognitoSync.DescribeDataset
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Gets meta data about a dataset by identity and dataset name. With Amazon
-- Cognito Sync, each identity has access only to its own data. Thus, the
-- credentials used to make this API call need to have access to the identity
-- data.
--
-- DescribeDataset can be called with temporary user credentials provided by
-- Cognito Identity or with developer credentials. You should use Cognito
-- Identity credentials to make this API call.
--
-- <http://docs.aws.amazon.com/cognitosync/latest/APIReference/API_DescribeDataset.html>
module Network.AWS.CognitoSync.DescribeDataset
(
-- * Request
DescribeDataset
-- ** Request constructor
, describeDataset
-- ** Request lenses
, ddDatasetName
, ddIdentityId
, ddIdentityPoolId
-- * Response
, DescribeDatasetResponse
-- ** Response constructor
, describeDatasetResponse
-- ** Response lenses
, ddrDataset
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.RestJSON
import Network.AWS.CognitoSync.Types
import qualified GHC.Exts
data DescribeDataset = DescribeDataset
{ _ddDatasetName :: Text
, _ddIdentityId :: Text
, _ddIdentityPoolId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeDataset' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddDatasetName' @::@ 'Text'
--
-- * 'ddIdentityId' @::@ 'Text'
--
-- * 'ddIdentityPoolId' @::@ 'Text'
--
describeDataset :: Text -- ^ 'ddIdentityPoolId'
-> Text -- ^ 'ddIdentityId'
-> Text -- ^ 'ddDatasetName'
-> DescribeDataset
describeDataset p1 p2 p3 = DescribeDataset
{ _ddIdentityPoolId = p1
, _ddIdentityId = p2
, _ddDatasetName = p3
}
-- | A string of up to 128 characters. Allowed characters are a-z, A-Z, 0-9, '_'
-- (underscore), '-' (dash), and '.' (dot).
ddDatasetName :: Lens' DescribeDataset Text
ddDatasetName = lens _ddDatasetName (\s a -> s { _ddDatasetName = a })
-- | A name-spaced GUID (for example,
-- us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito.
-- GUID generation is unique within a region.
ddIdentityId :: Lens' DescribeDataset Text
ddIdentityId = lens _ddIdentityId (\s a -> s { _ddIdentityId = a })
-- | A name-spaced GUID (for example,
-- us-east-1:23EC4050-6AEA-7089-A2DD-08002EXAMPLE) created by Amazon Cognito.
-- GUID generation is unique within a region.
ddIdentityPoolId :: Lens' DescribeDataset Text
ddIdentityPoolId = lens _ddIdentityPoolId (\s a -> s { _ddIdentityPoolId = a })
newtype DescribeDatasetResponse = DescribeDatasetResponse
{ _ddrDataset :: Maybe Dataset
} deriving (Eq, Read, Show)
-- | 'DescribeDatasetResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ddrDataset' @::@ 'Maybe' 'Dataset'
--
describeDatasetResponse :: DescribeDatasetResponse
describeDatasetResponse = DescribeDatasetResponse
{ _ddrDataset = Nothing
}
-- | Meta data for a collection of data for an identity. An identity can have
-- multiple datasets. A dataset can be general or associated with a particular
-- entity in an application (like a saved game). Datasets are automatically
-- created if they don't exist. Data is synced by dataset, and a dataset can
-- hold up to 1MB of key-value pairs.
ddrDataset :: Lens' DescribeDatasetResponse (Maybe Dataset)
ddrDataset = lens _ddrDataset (\s a -> s { _ddrDataset = a })
instance ToPath DescribeDataset where
toPath DescribeDataset{..} = mconcat
[ "/identitypools/"
, toText _ddIdentityPoolId
, "/identities/"
, toText _ddIdentityId
, "/datasets/"
, toText _ddDatasetName
]
instance ToQuery DescribeDataset where
toQuery = const mempty
instance ToHeaders DescribeDataset
instance ToJSON DescribeDataset where
toJSON = const (toJSON Empty)
instance AWSRequest DescribeDataset where
type Sv DescribeDataset = CognitoSync
type Rs DescribeDataset = DescribeDatasetResponse
request = get
response = jsonResponse
instance FromJSON DescribeDatasetResponse where
parseJSON = withObject "DescribeDatasetResponse" $ \o -> DescribeDatasetResponse
<$> o .:? "Dataset"
| romanb/amazonka | amazonka-cognito-sync/gen/Network/AWS/CognitoSync/DescribeDataset.hs | mpl-2.0 | 5,301 | 0 | 9 | 1,129 | 608 | 374 | 234 | 73 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.Interconnects.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of interconnect available to the specified project.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.interconnects.list@.
module Network.Google.Resource.Compute.Interconnects.List
(
-- * REST Resource
InterconnectsListResource
-- * Creating a Request
, interconnectsList
, InterconnectsList
-- * Request Lenses
, ilReturnPartialSuccess
, ilOrderBy
, ilProject
, ilFilter
, ilPageToken
, ilMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.interconnects.list@ method which the
-- 'InterconnectsList' request conforms to.
type InterconnectsListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"interconnects" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] InterconnectList
-- | Retrieves the list of interconnect available to the specified project.
--
-- /See:/ 'interconnectsList' smart constructor.
data InterconnectsList =
InterconnectsList'
{ _ilReturnPartialSuccess :: !(Maybe Bool)
, _ilOrderBy :: !(Maybe Text)
, _ilProject :: !Text
, _ilFilter :: !(Maybe Text)
, _ilPageToken :: !(Maybe Text)
, _ilMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'InterconnectsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ilReturnPartialSuccess'
--
-- * 'ilOrderBy'
--
-- * 'ilProject'
--
-- * 'ilFilter'
--
-- * 'ilPageToken'
--
-- * 'ilMaxResults'
interconnectsList
:: Text -- ^ 'ilProject'
-> InterconnectsList
interconnectsList pIlProject_ =
InterconnectsList'
{ _ilReturnPartialSuccess = Nothing
, _ilOrderBy = Nothing
, _ilProject = pIlProject_
, _ilFilter = Nothing
, _ilPageToken = Nothing
, _ilMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
ilReturnPartialSuccess :: Lens' InterconnectsList (Maybe Bool)
ilReturnPartialSuccess
= lens _ilReturnPartialSuccess
(\ s a -> s{_ilReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
ilOrderBy :: Lens' InterconnectsList (Maybe Text)
ilOrderBy
= lens _ilOrderBy (\ s a -> s{_ilOrderBy = a})
-- | Project ID for this request.
ilProject :: Lens' InterconnectsList Text
ilProject
= lens _ilProject (\ s a -> s{_ilProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
ilFilter :: Lens' InterconnectsList (Maybe Text)
ilFilter = lens _ilFilter (\ s a -> s{_ilFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
ilPageToken :: Lens' InterconnectsList (Maybe Text)
ilPageToken
= lens _ilPageToken (\ s a -> s{_ilPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
ilMaxResults :: Lens' InterconnectsList Word32
ilMaxResults
= lens _ilMaxResults (\ s a -> s{_ilMaxResults = a})
. _Coerce
instance GoogleRequest InterconnectsList where
type Rs InterconnectsList = InterconnectList
type Scopes InterconnectsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient InterconnectsList'{..}
= go _ilProject _ilReturnPartialSuccess _ilOrderBy
_ilFilter
_ilPageToken
(Just _ilMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy InterconnectsListResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/Interconnects/List.hs | mpl-2.0 | 7,048 | 0 | 19 | 1,538 | 756 | 452 | 304 | 107 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Glacier.SetVaultAccessPolicy
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation configures an access policy for a vault and will overwrite an
-- existing policy. To configure a vault access policy, send a PUT request to
-- the 'access-policy' subresource of the vault. An access policy is specific to a
-- vault and is also called a vault subresource. You can set one access policy
-- per vault and the policy can be up to 20 KB in size. For more information
-- about vault access policies, see <http://docs.aws.amazon.com/amazonglacier/latest/dev/vault-access-policy.html Amazon Glacier Access Control with VaultAccess Policies>.
--
-- <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-SetVaultAccessPolicy.html>
module Network.AWS.Glacier.SetVaultAccessPolicy
(
-- * Request
SetVaultAccessPolicy
-- ** Request constructor
, setVaultAccessPolicy
-- ** Request lenses
, svapAccountId
, svapPolicy
, svapVaultName
-- * Response
, SetVaultAccessPolicyResponse
-- ** Response constructor
, setVaultAccessPolicyResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.RestJSON
import Network.AWS.Glacier.Types
import qualified GHC.Exts
data SetVaultAccessPolicy = SetVaultAccessPolicy
{ _svapAccountId :: Text
, _svapPolicy :: Maybe VaultAccessPolicy
, _svapVaultName :: Text
} deriving (Eq, Read, Show)
-- | 'SetVaultAccessPolicy' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'svapAccountId' @::@ 'Text'
--
-- * 'svapPolicy' @::@ 'Maybe' 'VaultAccessPolicy'
--
-- * 'svapVaultName' @::@ 'Text'
--
setVaultAccessPolicy :: Text -- ^ 'svapAccountId'
-> Text -- ^ 'svapVaultName'
-> SetVaultAccessPolicy
setVaultAccessPolicy p1 p2 = SetVaultAccessPolicy
{ _svapAccountId = p1
, _svapVaultName = p2
, _svapPolicy = Nothing
}
-- | The 'AccountId' value is the AWS account ID of the account that owns the vault.
-- You can either specify an AWS account ID or optionally a single apos'-'apos
-- (hyphen), in which case Amazon Glacier uses the AWS account ID associated
-- with the credentials used to sign the request. If you use an account ID, do
-- not include any hyphens (apos-apos) in the ID.
svapAccountId :: Lens' SetVaultAccessPolicy Text
svapAccountId = lens _svapAccountId (\s a -> s { _svapAccountId = a })
-- | The vault access policy as a JSON string.
svapPolicy :: Lens' SetVaultAccessPolicy (Maybe VaultAccessPolicy)
svapPolicy = lens _svapPolicy (\s a -> s { _svapPolicy = a })
-- | The name of the vault.
svapVaultName :: Lens' SetVaultAccessPolicy Text
svapVaultName = lens _svapVaultName (\s a -> s { _svapVaultName = a })
data SetVaultAccessPolicyResponse = SetVaultAccessPolicyResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'SetVaultAccessPolicyResponse' constructor.
setVaultAccessPolicyResponse :: SetVaultAccessPolicyResponse
setVaultAccessPolicyResponse = SetVaultAccessPolicyResponse
instance ToPath SetVaultAccessPolicy where
toPath SetVaultAccessPolicy{..} = mconcat
[ "/"
, toText _svapAccountId
, "/vaults/"
, toText _svapVaultName
, "/access-policy"
]
instance ToQuery SetVaultAccessPolicy where
toQuery = const mempty
instance ToHeaders SetVaultAccessPolicy
instance ToJSON SetVaultAccessPolicy where
toJSON SetVaultAccessPolicy{..} = object
[ "policy" .= _svapPolicy
]
instance AWSRequest SetVaultAccessPolicy where
type Sv SetVaultAccessPolicy = Glacier
type Rs SetVaultAccessPolicy = SetVaultAccessPolicyResponse
request = put
response = nullResponse SetVaultAccessPolicyResponse
| romanb/amazonka | amazonka-glacier/gen/Network/AWS/Glacier/SetVaultAccessPolicy.hs | mpl-2.0 | 4,734 | 0 | 9 | 989 | 510 | 311 | 199 | 64 | 1 |
{- arch-tag: FTP server support
Copyright (C) 2004 John Goerzen <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
-}
{- |
Module : Network.FTP.Server
Copyright : Copyright (C) 2004 John Goerzen
License : GNU LGPL, version 2.1 or above
Maintainer : John Goerzen <[email protected]>
Stability : experimental
Portability: systems with networking
This module provides a server-side interface to the File Transfer Protocol
as defined by:
* RFC959, basic protocol
* RFC1123, clarifications
* RFC1579, passive mode discussion
Written by John Goerzen, jgoerzen\@complete.org
This is a modular FTP server implementation in pure Haskell. It is highly
adaptable to many different tasks, and can serve up not only real files
and directories, but also virtually any data structure you could represent
as a filesystem. It does this by using the
"System.IO.HVFS" and "System.IO.HVIO" modules.
In addition, basic networking and multitasking configuration is handled
via "Network.SocketServer" and logging via
"System.Log.Logger".
This module is believed to be secure, but it not believed to be robust enough
for use on a public FTP server. In particular, it may be vulnerable to denial
of service attacks due to no timeouts or restrictions on data size, and
error catching is not yet completely pervasive. These will be fixed in time.
Your patches would also be welcomed.
Here is an example server that serves up the entire local filesystem
in a read-only manner:
>import Network.FTP.Server
>import Network.SocketServer
>import System.Log.Logger
>import System.IO.HVFS
>import System.IO.HVFS.Combinators
>
>main = do
> updateGlobalLogger "" (setLevel DEBUG)
> updateGlobalLogger "Network.FTP.Server" (setLevel DEBUG)
> let opts = (simpleTCPOptions 12345) {reuse = True}
> serveTCPforever opts $
> threadedHandler $
> loggingHandler "" INFO $
> handleHandler $
> anonFtpHandler (HVFSReadOnly SystemFS)
Hint: if you wantto serve up only part of a filesystem, see
'System.IO.HVFS.Combinators.newHVFSChroot'.
-}
module Network.FTP.Server(
anonFtpHandler
)
where
import Network.FTP.Server.Parser
import Network.FTP.Client.Parser
import Network.BSD
import Network.Socket
import qualified Network
import System.IO.Utils
import System.IO.Error
import System.Log.Logger
import Network.Utils
import Network.SocketServer
import Data.String.Utils
import System.IO.HVIO
import System.IO.HVFS
import System.IO.HVFS.InstanceHelpers
import System.IO.HVFS.Utils
import Text.Printf
import Data.Char
import Data.IORef
import Data.List
import Control.Exception(finally)
import System.IO
data DataType = ASCII | Binary
deriving (Eq, Show)
data AuthState = NoAuth
| User String
| Authenticated String
deriving (Eq, Show)
data DataChan = NoChannel
| PassiveMode SocketServer
| PortMode SockAddr
data FTPState = FTPState
{ auth :: IORef AuthState,
datatype :: IORef DataType,
rename :: IORef (Maybe String),
datachan :: IORef DataChan,
local :: SockAddr,
remote :: SockAddr}
data FTPServer = forall a. HVFSOpenable a => FTPServer Handle a FTPState
s_crlf = "\r\n"
logname = "Network.FTP.Server"
ftpPutStrLn :: FTPServer -> String -> IO ()
ftpPutStrLn (FTPServer h _ _) text =
do hPutStr h (text ++ s_crlf)
hFlush h
{- | Send a reply code, handling multi-line text as necessary. -}
sendReply :: FTPServer -> Int -> String -> IO ()
sendReply h codei text =
let codes = printf "%03d" codei
writethis [] = ftpPutStrLn h (codes ++ " ")
writethis [item] = ftpPutStrLn h (codes ++ " " ++ item)
writethis (item:xs) = do ftpPutStrLn h (codes ++ "-" ++ item)
writethis xs
in
writethis (map (rstrip) (lines text))
{- | Main FTP handler; pass the result of applying this to one argument to
'Network.SocketServer.handleHandler' -}
anonFtpHandler :: forall a. HVFSOpenable a => a -> Handle -> SockAddr -> SockAddr -> IO ()
anonFtpHandler f h saremote salocal =
let serv r = FTPServer h f r
in
traplogging logname NOTICE "" $
do authr <- newIORef (NoAuth)
typer <- newIORef ASCII
renamer <- newIORef (Nothing::Maybe String)
chanr <- newIORef (NoChannel)
let s = serv (FTPState {auth = authr, datatype = typer,
rename = renamer, datachan = chanr,
local = salocal, remote = saremote})
sendReply s 220 "Welcome to Network.FTP.Server."
commandLoop s
type CommandHandler = FTPServer -> String -> IO Bool
data Command = Command String (CommandHandler, (String, String))
instance Eq Command where
(Command x _) == (Command y _) = x == y
instance Ord Command where
compare (Command x _) (Command y _) = compare x y
trapIOError :: FTPServer -> IO a -> (a -> IO Bool) -> IO Bool
trapIOError h testAction remainingAction =
do result <- tryIOError testAction
case result of
Left err -> do sendReply h 550 (show err)
return True
Right result -> remainingAction result
forceLogin :: CommandHandler -> CommandHandler
forceLogin func h@(FTPServer _ _ state) args =
do state <- readIORef (auth state)
case state of
Authenticated _ -> func h args
x -> do sendReply h 530 "Command not possible in non-authenticated state."
return True
commands :: [Command]
commands =
[(Command "HELP" (cmd_help, help_help))
,(Command "QUIT" (cmd_quit, help_quit))
,(Command "USER" (cmd_user, help_user))
,(Command "PASS" (cmd_pass, help_pass))
,(Command "CWD" (forceLogin cmd_cwd, help_cwd))
,(Command "CDUP" (forceLogin cmd_cdup, help_cdup))
,(Command "TYPE" (forceLogin cmd_type, help_type))
,(Command "NOOP" (forceLogin cmd_noop, help_noop))
,(Command "RNFR" (forceLogin cmd_rnfr, help_rnfr))
,(Command "RNTO" (forceLogin cmd_rnto, help_rnto))
,(Command "DELE" (forceLogin cmd_dele, help_dele))
,(Command "RMD" (forceLogin cmd_rmd, help_rmd))
,(Command "MKD" (forceLogin cmd_mkd, help_mkd))
,(Command "PWD" (forceLogin cmd_pwd, help_pwd))
,(Command "MODE" (forceLogin cmd_mode, help_mode))
,(Command "STRU" (forceLogin cmd_stru, help_stru))
,(Command "PASV" (forceLogin cmd_pasv, help_pasv))
,(Command "PORT" (forceLogin cmd_port, help_port))
,(Command "RETR" (forceLogin cmd_retr, help_retr))
,(Command "STOR" (forceLogin cmd_stor, help_stor))
,(Command "STAT" (forceLogin cmd_stat, help_stat))
,(Command "SYST" (forceLogin cmd_syst, help_syst))
,(Command "NLST" (forceLogin cmd_nlst, help_nlst))
,(Command "LIST" (forceLogin cmd_list, help_list))
]
commandLoop :: FTPServer -> IO ()
commandLoop h@(FTPServer fh _ _) =
let errorhandler e = do noticeM logname
("Closing due to error: " ++ (show e))
hClose fh
return False
in do continue <- (flip catchIOError) errorhandler
(do x <- parseCommand fh
case x of
Left err -> do sendReply h 500 $
" Couldn't parse command: " ++ (show err)
return True
Right (cmd, args) ->
case lookupC cmd commands of
Nothing -> do sendReply h 502 $
"Unrecognized command " ++ cmd
return True
Just (Command _ hdlr) -> (fst hdlr) h args
)
if continue
then commandLoop h
else return ()
lookupC cmd cl = find (\(Command x _) -> x == cmd) cl
help_quit =
("Terminate the session",
"")
cmd_quit :: CommandHandler
cmd_quit h args =
do sendReply h 221 "OK, Goodbye."
return False
help_user =
("Provide a username",
unlines $
["USER username will provide the username for authentication."
,"It should be followed by a PASS command to finish the authentication."
])
cmd_user :: CommandHandler
cmd_user h@(FTPServer _ _ state) passedargs =
let args = strip passedargs
in
case args of
"anonymous" -> do sendReply h 331 "User name accepted; send password."
writeIORef (auth state) (User args)
return True
_ -> do sendReply h 530 "Unrecognized user name; please try \"anonymous\""
writeIORef (auth state) NoAuth
return True
help_pass =
("Provide a password",
"PASS password will provide the password for authentication.")
cmd_pass :: CommandHandler
cmd_pass h@(FTPServer _ _ state) passedargs =
do curstate <- readIORef (auth state)
case curstate of
User "anonymous" ->
do sendReply h 230 "Anonymous login successful."
writeIORef (auth state) (Authenticated "anonymous")
infoM logname "Anonymous authentication successful"
return True
_ -> do sendReply h 530 "Out of sequence PASS command"
return True
help_cwd =
("Change working directory",
unlines $
["Syntax: CWD cwd"
,""
,"Changes the working directory to the specified item"])
cmd_cwd :: CommandHandler
cmd_cwd h@(FTPServer _ fs _) args =
do trapIOError h (vSetCurrentDirectory fs args)
$ \_ -> do
newdir <- vGetCurrentDirectory fs
sendReply h 250 $ "New directory now " ++ newdir
return True
help_cdup =
("Change to parent directory", "Same as CWD ..")
cmd_cdup h _ = cmd_cwd h ".."
help_type =
("Change the type of data transfer", "Valid args are A, AN, and I")
cmd_type :: CommandHandler
cmd_type h@(FTPServer _ _ state) args =
let changetype newt =
do oldtype <- readIORef (datatype state)
writeIORef (datatype state) newt
sendReply h 200 $ "Type changed from " ++ show oldtype ++
" to " ++ show newt
return True
in case args of
"I" -> changetype Binary
"L 8" -> changetype Binary
"A" -> changetype ASCII
"AN" -> changetype ASCII
"AT" -> changetype ASCII
_ -> do sendReply h 504 $ "Type \"" ++ args ++ "\" not supported."
return True
closeconn :: FTPServer -> IO ()
closeconn h@(FTPServer _ _ state) =
do dc <- readIORef (datachan state)
writeIORef (datachan state) NoChannel
help_port = ("Initiate a port-mode connection", "")
cmd_port :: CommandHandler
cmd_port h@(FTPServer _ _ state) args =
let doIt clientsa =
do writeIORef (datachan state) (PortMode clientsa)
str <- showSockAddr clientsa
sendReply h 200 $ "OK, later I will connect to " ++ str
return True
in
do closeconn h -- Close any existing connection
trapIOError h (fromPortString args) $ (\clientsa ->
case clientsa of
SockAddrInet _ ha ->
case (local state) of
SockAddrInet _ ha2 -> if ha /= ha2
then do sendReply h 501 "Will only connect to same client as command channel."
return True
else doIt clientsa
_ -> do sendReply h 501 "Require IPv4 on client"
return True
_ -> do sendReply h 501 "Require IPv4 in specified address"
return True
)
runDataChan :: FTPServer -> (FTPServer -> Socket -> IO ()) -> IO ()
runDataChan h@(FTPServer _ _ state) func =
do chan <- readIORef (datachan state)
case chan of
NoChannel -> fail "Can't connect when no data channel exists"
PassiveMode ss -> do finally (handleOne ss (\sock _ _ -> func h sock))
(do closeSocketServer ss
closeconn h
)
PortMode sa -> do proto <- getProtocolNumber "tcp"
s <- socket AF_INET Stream proto
connect s sa
finally (func h s) $ closeconn h
help_pasv = ("Initiate a passive-mode connection", "")
cmd_pasv :: CommandHandler
cmd_pasv h@(FTPServer _ _ state) args =
do closeconn h -- Close any existing connection
addr <- case (local state) of
(SockAddrInet _ ha) -> return ha
_ -> fail "Require IPv4 sockets"
let ssopts = InetServerOptions
{ listenQueueSize = 1,
portNumber = aNY_PORT,
interface = addr,
reuse = False,
family = AF_INET,
sockType = Stream,
protoStr = "tcp"
}
ss <- setupSocketServer ssopts
sa <- getSocketName (sockSS ss)
portstring <- toPortString sa
sendReply h 227 $ "Entering passive mode (" ++ portstring ++ ")"
writeIORef (datachan state) (PassiveMode ss)
return True
help_noop = ("Do nothing", "")
cmd_noop :: CommandHandler
cmd_noop h _ =
do sendReply h 200 "OK"
return True
help_rnfr = ("Specify FROM name for a file rename", "")
cmd_rnfr :: CommandHandler
cmd_rnfr h@(FTPServer _ _ state) args =
if length args < 1
then do sendReply h 501 "Filename required"
return True
else do writeIORef (rename state) (Just args)
sendReply h 350 "Noted rename from name; please send RNTO."
return True
help_stor = ("Upload a file", "")
cmd_stor :: CommandHandler
cmd_stor h@(FTPServer _ fs state) args =
let datamap :: [String] -> [String]
datamap instr =
let linemap :: String -> String
linemap x = if endswith "\r" x
then take ((length x) - 1) x
else x
in map linemap instr
runit fhencap _ sock =
case fhencap of
HVFSOpenEncap fh ->
do readh <- socketToHandle sock ReadMode
mode <- readIORef (datatype state)
case mode of
ASCII -> finally (hLineInteract readh fh datamap)
(hClose readh)
Binary -> finally (do vSetBuffering fh (BlockBuffering (Just 4096))
hCopy readh fh
) (hClose readh)
in
if length args < 1
then do sendReply h 501 "Filename required"
return True
else trapIOError h (vOpen fs args WriteMode)
(\fhencap ->
trapIOError h (do sendReply h 150 "File OK; about to open data channel"
runDataChan h (runit fhencap)
)
(\_ ->
do case fhencap of
HVFSOpenEncap fh -> vClose fh
sendReply h 226 "Closing data connection; transfer complete."
return True
)
)
rtransmitString :: String -> FTPServer -> Socket -> IO ()
rtransmitString thestr (FTPServer _ _ state) sock =
let fixlines :: [String] -> [String]
fixlines x = map (\y -> y ++ "\r") x
copyit h =
hPutStr h $ unlines . fixlines . lines $ thestr
in
do writeh <- socketToHandle sock WriteMode
hSetBuffering writeh (BlockBuffering (Just 4096))
mode <- readIORef (datatype state)
case mode of
ASCII -> finally (copyit writeh)
(hClose writeh)
Binary -> finally (hPutStr writeh thestr)
(hClose writeh)
rtransmitH :: HVFSOpenEncap -> FTPServer -> Socket -> IO ()
rtransmitH fhencap h sock =
case fhencap of
HVFSOpenEncap fh ->
finally (do c <- vGetContents fh
rtransmitString c h sock
) (vClose fh)
genericTransmit :: FTPServer -> a -> (a -> FTPServer -> Socket -> IO ()) -> IO Bool
genericTransmit h dat func =
trapIOError h
(do sendReply h 150 "I'm going to open the data channel now."
runDataChan h (func dat)
) (\_ ->
do sendReply h 226 "Closing data connection; transfer complete."
return True
)
genericTransmitHandle :: FTPServer -> HVFSOpenEncap -> IO Bool
genericTransmitHandle h dat =
genericTransmit h dat rtransmitH
genericTransmitString :: FTPServer -> String -> IO Bool
genericTransmitString h dat =
genericTransmit h dat rtransmitString
help_retr = ("Retrieve a file", "")
cmd_retr :: CommandHandler
cmd_retr h@(FTPServer _ fs state) args =
if length args < 1
then do sendReply h 501 "Filename required"
return True
else trapIOError h (vOpen fs args ReadMode)
(\fhencap -> genericTransmitHandle h fhencap)
help_rnto = ("Specify TO name for a file name", "")
cmd_rnto :: CommandHandler
cmd_rnto h@(FTPServer _ fs state) args =
if length args < 1
then do sendReply h 501 "Filename required"
return True
else do fr <- readIORef (rename state)
case fr of
Nothing -> do sendReply h 503 "RNFR required before RNTO"
return True
Just fromname ->
do writeIORef (rename state) Nothing
trapIOError h (vRenameFile fs fromname args)
$ \_ -> do sendReply h 250
("File " ++ fromname ++
" renamed to " ++ args)
return True
help_dele = ("Delete files", "")
cmd_dele :: CommandHandler
cmd_dele h@(FTPServer _ fs _) args =
if length args < 1
then do sendReply h 501 "Filename required"
return True
else trapIOError h (vRemoveFile fs args) $
\_ -> do sendReply h 250 $ "File " ++ args ++ " deleted."
return True
help_nlst = ("Get plain listing of files", "")
cmd_nlst :: CommandHandler
cmd_nlst h@(FTPServer _ fs _) args =
let fn = case args of
"" -> "."
x -> x
in
trapIOError h (vGetDirectoryContents fs fn)
(\l -> genericTransmitString h (unlines l))
help_list = ("Get an annotated listing of files", "")
cmd_list :: CommandHandler
cmd_list h@(FTPServer _ fs _) args =
let fn = case args of
"" -> "."
x -> x
in
trapIOError h (lsl fs fn)
(\l -> genericTransmitString h l)
help_rmd = ("Remove directory", "")
cmd_rmd :: CommandHandler
cmd_rmd h@(FTPServer _ fs _) args =
if length args < 1
then do sendReply h 501 "Filename required"
return True
else trapIOError h (vRemoveDirectory fs args) $
\_ -> do sendReply h 250 $ "Directory " ++ args ++ " removed."
return True
help_mkd = ("Make directory", "")
cmd_mkd :: CommandHandler
cmd_mkd h@(FTPServer _ fs _) args =
if length args < 1
then do sendReply h 501 "Filename required"
return True
else trapIOError h (vCreateDirectory fs args) $
\_ -> do newname <- getFullPath fs args
sendReply h 257 $ "\"" ++ newname ++ "\" created."
return True
help_pwd = ("Print working directory", "")
cmd_pwd :: CommandHandler
cmd_pwd h@(FTPServer _ fs _) _ =
do d <- vGetCurrentDirectory fs
sendReply h 257 $ "\"" ++ d ++ "\" is the current working directory."
return True
help_mode = ("Provided for compatibility only", "")
cmd_mode :: CommandHandler
cmd_mode h args =
case args of
"S" -> do sendReply h 200 "Mode is Stream."
return True
x -> do sendReply h 504 $ "Mode \"" ++ x ++ "\" not supported."
return True
help_stru = ("Provided for compatibility only", "")
cmd_stru :: CommandHandler
cmd_stru h args =
case args of
"F" -> do sendReply h 200 "Structure is File."
return True
x -> do sendReply h 504 $ "Structure \"" ++ x ++ "\" not supported."
return True
help_syst = ("Display system type", "")
cmd_syst :: CommandHandler
cmd_syst h _ =
-- I have no idea what this L8 means, but everyone else seems to do
-- this, so I do too..
do sendReply h 215 "UNIX Type: L8"
return True
help_stat = ("Display sever statistics", "")
cmd_stat :: CommandHandler
cmd_stat h@(FTPServer _ _ state) _ =
do loc <- showSockAddr (local state)
rem <- showSockAddr (remote state)
auth <- readIORef (auth state)
datm <- readIORef (datatype state)
sendReply h 211 $ unlines $
[" *** Sever statistics and information"
," *** Please type HELP for more details"
,""
,"Server Software : MissingH, http://quux.org/devel/missingh"
,"Connected From : " ++ rem
,"Connected To : " ++ loc
,"Data Transfer Type : " ++ (show datm)
,"Auth Status : " ++ (show auth)
,"End of status."]
return True
help_help =
("Display help on available commands",
"When called without arguments, shows a summary of available system\n"
++ "commands. When called with an argument, shows detailed information\n"
++ "on that specific command.")
cmd_help :: CommandHandler
cmd_help h@(FTPServer _ _ state) args =
let genericreply addr = unlines $
[" --- General Help Response ---"
,""
,"Welcome to the FTP server, " ++ addr ++ "."
,"This server is implemented as the Network.FTP.Server"
,"component of the MissingH library. The MissingH library"
,"is available from http://quux.org/devel/missingh."
,""
,""
,"I know of the following commands:"
,concatMap (\ (Command name (_, (summary, _))) -> printf "%-10s %s\n" name summary)
(sort commands)
,""
,"You may type \"HELP command\" for more help on a specific command."
]
in
if args == ""
then do sastr <- showSockAddr (remote state)
sendReply h 214 (genericreply sastr)
return True
else let newargs = map toUpper args
in case lookupC newargs commands of
Nothing -> do
sendReply h 214 $ "No help for \"" ++ newargs
++ "\" is available.\nPlese send HELP"
++ " without arguments for a list of\n"
++ "valid commands."
return True
Just (Command _ (_, (summary, detail))) ->
do sendReply h 214 $ newargs ++ ": " ++ summary ++
"\n\n" ++ detail
return True
| icetortoise/ftphs | src/Network/FTP/Server.hs | lgpl-2.1 | 25,147 | 0 | 25 | 9,083 | 5,898 | 2,929 | 2,969 | -1 | -1 |
module ViperVM.Backends.OpenCL.Processor (
Processor, initProc,
procLibrary, procContext, procDevice,
procQueue, procID, procName, procVendor,
procCapabilities, procMemories,
programCompile
) where
import ViperVM.Backends.OpenCL.Types
import ViperVM.Backends.OpenCL.Loader
import ViperVM.Backends.OpenCL.Query
import ViperVM.Backends.OpenCL.CommandQueue
import ViperVM.Backends.OpenCL.Memory
import ViperVM.Platform.ProcessorCapabilities
import ViperVM.Platform.Compilation
import Data.Set (Set,fromList)
import Text.Printf
import Control.Applicative ( (<$>) )
import Data.List ( intersect )
data Processor = Processor {
procLibrary :: OpenCLLibrary,
procContext :: CLContext,
procDevice :: CLDeviceID,
procQueue :: CLCommandQueue,
procID :: String,
procName :: String,
procVendor :: String,
procCapabilities :: Set ProcessorCapability
}
instance Eq Processor where
(==) p1 p2 = procID p1 == procID p2
instance Ord Processor where
compare p1 p2 = compare (procID p1) (procID p2)
instance Show Processor where
show p = "{" ++ procID p ++ "}"
-- | Initialize an OpenCL processor
initProc :: OpenCLLibrary -> CLContext -> CLDeviceID -> (Int,Int) -> IO Processor
initProc lib ctx dev (pfIdx,devIdx) = do
devProps <- clGetDeviceQueueProperties lib dev
let props = intersect [CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE] devProps
cq <- clCreateCommandQueue lib ctx dev props
name <- clGetDeviceName lib dev
vendor <- clGetDeviceVendor lib dev
let pid = printf "OpenCL %d %d" pfIdx devIdx
caps <- retrieveCapabilities lib dev
return $ Processor {
procLibrary = lib,
procContext = ctx,
procDevice = dev,
procQueue = cq,
procID = pid,
procName = name,
procVendor = vendor,
procCapabilities = caps
}
-- | Retrieve capabilities of an OpenCL processor
retrieveCapabilities :: OpenCLLibrary -> CLDeviceID -> IO (Set ProcessorCapability)
retrieveCapabilities lib dev = do
extensions <- clGetDeviceExtensions lib dev
return . fromList $
if "cl_khr_fp64" `elem` extensions
then [DoubleFloatingPoint]
else []
-- | Retrieve attached memory
procMemories :: Processor -> IO [Memory]
procMemories p = return <$> initMemory (procLibrary p) (procContext p) (procDevice p)
-- | Compile a program
programCompile :: CLProgram -> [Processor] -> IO [CompilationResult]
programCompile p procs = undefined
| hsyl20/HViperVM | lib/ViperVM/Backends/OpenCL/Processor.hs | lgpl-3.0 | 2,448 | 0 | 12 | 469 | 652 | 362 | 290 | 61 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module LastFM
(
scrobbleItem
, getLastfmSession
) where
import System.IO
import Data.Aeson.Types
import Data.List (find)
import Data.Text (Text, pack)
import Data.Aeson hiding (json)
import Data.Int (Int64)
import System.Directory
import qualified Data.List.NonEmpty as NEL
import qualified Data.Text.IO as TIO
import Lastfm
import Lastfm.Track (item, scrobble)
import Lastfm.Response (Secret)
import Lastfm.Authentication (link, getSession, getToken)
import Flac
getToken' :: Value -> Parser String
getToken' o = parseJSON o >>= (.: "token")
getSession' :: Value -> Parser Text
getSession' o = parseJSON o >>= (.: "session") >>= (.: "key")
getLastfmToken :: Text -> IO (Maybe String)
getLastfmToken key = withConnection $ \conn -> do
r <- lastfm conn $ getToken <*> apiKey key <* json
case r of
Right t ->
return $ parseMaybe getToken' t
Left e ->
return Nothing
getSessionFile :: IO FilePath
getSessionFile = do
c <- getXdgDirectory XdgConfig "hmscrobbler"
createDirectoryIfMissing False c
return $ c ++ "/lastfm_session"
writeSession :: Text -> IO ()
writeSession s = do
f <- getSessionFile
o <- openFile f WriteMode
TIO.hPutStr o s
hClose o
readSession :: IO (Maybe Text)
readSession = do
f <- getSessionFile
e <- doesFileExist f
if e then do
co <- TIO.readFile f
return $ Just co
else return Nothing
getValue :: VorbisCommentItem -> Text
getValue (VorbisCommentItem _ v) = v
getItem :: [VorbisCommentItem] -> Text -> Maybe Text
getItem cs i = do
a <- find (\(VorbisCommentItem k v) -> k == i) cs
Just $ getValue a
vorbisCommentsToScrobble :: VorbisComments -> Maybe (Request f (Timestamp -> Scrobble))
vorbisCommentsToScrobble (VorbisComments _ comments) = do
ar <- getItem comments "ARTIST"
tl <- getItem comments "TITLE"
al <- getItem comments "ALBUM"
Just $ item <*> artist ar <*> track tl <* album al
scrobbleItem :: VorbisComments -> Text -> Text -> Text -> Int64 -> IO (Either LastfmError Value)
scrobbleItem cs key secret session ts =
case vorbisCommentsToScrobble cs of
Just item -> withConnection $ \conn ->
lastfm conn . sign (Secret secret) $ scrobble (NEL.fromList [item <*> timestamp ts]) <*>
apiKey key <*> sessionKey session <* json
Nothing -> return $ Left $ LastfmBadResponse "unable to extract metadata"
getLastfmSession :: Text -> Text -> IO (Maybe Text)
getLastfmSession key secret = do
s <- readSession
case s of
Just se -> return $ Just se
Nothing -> getLastfmSession' key secret
getLastfmSession' :: Text -> Text -> IO (Maybe Text)
getLastfmSession' key secret = withConnection $ \conn -> do
to <- getLastfmToken key
case to of
Just t -> do
let b = pack t
putStrLn $ "approve: " ++ link (apiKey key <* token b)
_ <- getChar
r <- lastfm conn . sign (Secret secret) $ getSession <*> token b <*> apiKey key <* json
case r of
Right p ->
case parseMaybe getSession' p of
Just s -> do
writeSession s
return $ Just s
Nothing -> return Nothing
Left e ->
return Nothing
| amir/hmscrobbler | src/LastFM.hs | unlicense | 3,183 | 0 | 23 | 739 | 1,134 | 554 | 580 | 93 | 3 |
module HaskHOL.Lib.CalcNum.Pre where
import HaskHOL.Core hiding (base)
import HaskHOL.Deductive
import HaskHOL.Lib.Nums
import HaskHOL.Lib.Arith
import HaskHOL.Lib.WF
-- Build up lookup table for numeral conversions.
tmZero, tmBIT0, tmBIT1, tmM, tmN, tmP, tmAdd, tmSuc :: WFCtxt thry => HOL cls thry HOLTerm
tmZero = serve [wf| _0 |]
tmBIT0 = serve [wf| BIT0 |]
tmBIT1 = serve [wf| BIT1 |]
tmM = serve [wf| m:num |]
tmN = serve [wf| n:num |]
tmP = serve [wf| p:num |]
tmAdd = serve [wf| (+) |]
tmSuc = serve [wf| SUC |]
mkClauses :: WFCtxt thry => Bool -> HOLTerm -> HOL cls thry (HOLThm, Int)
mkClauses sucflag t =
do tmSuc' <- tmSuc
tm <- if sucflag then mkComb tmSuc' t else return t
th1 <- runConv (convPURE_REWRITE
[thmARITH_ADD, thmARITH_SUC, thmARITH_0]) tm
tm1 <- patadj =<< rand (concl th1)
tmAdd' <- toHTm tmAdd
tmP' <- toHTm tmP
tmM' <- toHTm tmM
if not (tmAdd' `freeIn` tm1)
then return (th1, if tmM' `freeIn` tm1 then 0 else 1)
else do ptm <- rand =<< rand =<< rand =<< rand tm1
ptm' <- mkEq ptm tmP'
tmc <- mkEq ptm' =<< mkEq tm =<< subst [(ptm, tmP')] tm1
th <- ruleEQT_ELIM =<<
runConv (convREWRITE [ thmARITH_ADD
, thmARITH_SUC
, thmARITH_0
, thmBITS_INJ]) tmc
return (th, if tmSuc' `freeIn` tm1 then 3 else 2)
where patadj :: WFCtxt thry => HOLTerm -> HOL cls thry HOLTerm
patadj tm =
do tms <- mapM (pairMapM toHTm)
[ (serve [wf| SUC m |], serve [wf| SUC (m + _0) |])
, (serve [wf| SUC n |], serve [wf| SUC (_0 + n) |])]
subst tms tm
starts :: WFCtxt thry => HOL cls thry [HOLTerm]
starts =
do ms <- bases tmM
ns <- bases tmN
allpairsV (\ mtm ntm -> mkComb (mkComb tmAdd mtm) ntm) ms ns
where allpairsV :: Monad m => (a -> b -> m c) -> [a] -> [b] -> m [c]
allpairsV _ [] _ = return []
allpairsV f (h:t) ys =
do t' <- allpairsV f t ys
foldrM (\ x a -> do h' <- f h x
return (h' : a)) t' ys
bases :: (WFCtxt thry, HOLTermRep tm cls thry)
=> tm -> HOL cls thry [HOLTerm]
bases pv =
do v <- toHTm pv
v0 <- mkComb tmBIT0 v
v1 <- mkComb tmBIT1 v
part2 <- mapM (`mkCompnumeral` v) [8..15]
part1 <- mapM (subst [(v1, v0)]) part2
tmZero' <- toHTm tmZero
part0 <- mapM (`mkCompnumeral` tmZero') [0..15]
return $! part0 ++ part1 ++ part2
mkCompnumeral :: WFCtxt thry => Int -> HOLTerm -> HOL cls thry HOLTerm
mkCompnumeral 0 base = return base
mkCompnumeral k base =
do t <- mkCompnumeral (k `div` 2) base
if k `mod` 2 == 1
then mkComb tmBIT1 t
else mkComb tmBIT0 t
convNUM_SHIFT_pths0' :: WFCtxt thry => HOL cls thry HOLThm
convNUM_SHIFT_pths0' = cacheProof "convNUM_SHIFT_pths0'" ctxtWF .
prove [txt| (n = _0 + p * b <=>
BIT0(BIT0(BIT0(BIT0 n))) =
_0 + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT0(BIT0 n))) =
BIT1 _0 + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT0(BIT0 n))) =
BIT0(BIT1 _0) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT0(BIT0 n))) =
BIT1(BIT1 _0) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT1(BIT0 n))) =
BIT0(BIT0(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT1(BIT0 n))) =
BIT1(BIT0(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT1(BIT0 n))) =
BIT0(BIT1(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT1(BIT0 n))) =
BIT1(BIT1(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT0(BIT1 n))) =
BIT0(BIT0(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT0(BIT1 n))) =
BIT1(BIT0(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT0(BIT1 n))) =
BIT0(BIT1(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT0(BIT1 n))) =
BIT1(BIT1(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT1(BIT1 n))) =
BIT0(BIT0(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT1(BIT1 n))) =
BIT1(BIT0(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT1(BIT1 n))) =
BIT0(BIT1(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT1(BIT1 n))) =
BIT1(BIT1(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) |] $
tacSUBST1 (ruleMESON [defNUMERAL] [txt| _0 = 0 |]) `_THEN`
tacMP (ruleREWRITE [ruleGSYM thmMULT_2] thmBIT0) `_THEN`
tacMP (ruleREWRITE [ruleGSYM thmMULT_2] thmBIT1) `_THEN`
tacABBREV [txt| two = 2 |] `_THEN`
_DISCH_THEN (\ th -> tacREWRITE [th]) `_THEN`
_DISCH_THEN (\ th -> tacREWRITE [th]) `_THEN`
_FIRST_X_ASSUM (tacSUBST1 . ruleSYM) `_THEN`
tacREWRITE [ thmADD_CLAUSES, thmSUC_INJ
, thmEQ_MULT_LCANCEL, thmARITH_EQ
, ruleGSYM thmLEFT_ADD_DISTRIB, ruleGSYM thmMULT_ASSOC ]
convNUM_UNSHIFT_puths1' :: WFCtxt thry => HOL cls thry HOLThm
convNUM_UNSHIFT_puths1' = cacheProof "convNUM_UNSHIFT_puths1'" ctxtWF .
prove [txt| (a + p * b = n <=>
BIT0(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT1(BIT1 n)))) |] $
tacSUBST1 (ruleMESON [defNUMERAL] [txt| _0 = 0 |]) `_THEN`
tacMP (ruleREWRITE [ruleGSYM thmMULT_2] thmBIT0) `_THEN`
tacMP (ruleREWRITE [ruleGSYM thmMULT_2] thmBIT1) `_THEN`
tacABBREV [txt| two = 2 |] `_THEN`
_DISCH_THEN (\ th -> tacREWRITE[th]) `_THEN`
_DISCH_THEN (\ th -> tacREWRITE[th]) `_THEN`
_FIRST_X_ASSUM (tacSUBST1 . ruleSYM) `_THEN`
tacREWRITE [ thmADD_CLAUSES, thmSUC_INJ
, thmEQ_MULT_LCANCEL, thmARITH_EQ
, ruleGSYM thmLEFT_ADD_DISTRIB
, ruleGSYM thmMULT_ASSOC
]
| ecaustin/haskhol-math | src/HaskHOL/Lib/CalcNum/Pre.hs | bsd-2-clause | 9,511 | 0 | 16 | 3,788 | 1,543 | 831 | 712 | -1 | -1 |
module Main where
import System.Console.CmdArgs
import Application.Hoodle.Database.ProgType
import Application.Hoodle.Database.Command
main :: IO ()
main = do
putStrLn "hoodle-db"
param <- cmdArgs mode
commandLineProcess param | wavewave/hoodle-db | exe/hoodle-db.hs | bsd-2-clause | 238 | 0 | 8 | 35 | 61 | 33 | 28 | 9 | 1 |
module NeighboursSpec where
import Neighbours
import Test.Hspec
spec :: Spec
spec = describe "neighbours" $ do
describe "leftCell" $ do
it "the left cell of a left-most cell is always dead (top)" $ do
leftCell 0 0 [[1,0,0],[0,0,0],[0,0,0]] `shouldBe` 0
it "the left cell of a left-most cell is always dead (middle)" $ do
leftCell 1 0 [[0,0,0],[1,0,0],[0,0,0]] `shouldBe` 0
it "the left cell of a left-most cell is always dead (buttom)" $ do
leftCell 2 0 [[0,0,0],[0,0,0],[1,0,0]] `shouldBe` 0
it "has a living left neighbour" $ do
leftCell 0 1 [[1,1,0],[0,0,0],[0,0,0]] `shouldBe` 1
it "has a living left neighbour" $ do
leftCell 1 1 [[0,0,0],[1,1,0],[0,0,0]] `shouldBe` 1
it "has a living left neighbour" $ do
leftCell 2 1 [[0,0,0],[0,0,0],[1,1,0]] `shouldBe` 1
it "has a living left neighbour" $ do
leftCell 0 2 [[0,1,1],[0,0,0],[0,0,0]] `shouldBe` 1
it "has a living left neighbour" $ do
leftCell 1 2 [[0,0,0],[0,1,1],[0,0,0]] `shouldBe` 1
it "has a living left neighbour" $ do
leftCell 2 2 [[0,0,0],[0,0,0],[0,1,1]] `shouldBe` 1
describe "rightCell" $ do
it "the right cell of a right-most cell is always dead (top)" $ do
rightCell 0 2 [[0,0,1],[0,0,0],[0,0,0]] `shouldBe` 0
it "the right cell of a right-most cell is always dead (middle)" $ do
rightCell 1 2 [[0,0,0],[0,0,1],[0,0,0]] `shouldBe` 0
it "the right cell of a right-most cell is always dead (buttom)" $ do
rightCell 2 2 [[0,0,0],[0,0,0],[0,0,1]] `shouldBe` 0
it "has a living right neighbour" $ do
rightCell 0 1 [[0,1,1],[0,0,0],[0,0,0]] `shouldBe` 1
it "has a living right neighbour" $ do
rightCell 1 1 [[0,0,0],[0,1,1],[0,0,0]] `shouldBe` 1
it "has a living right neighbour" $ do
rightCell 2 1 [[0,0,0],[0,0,0],[0,1,1]] `shouldBe` 1
it "has a living right neighbour" $ do
rightCell 0 0 [[1,1,0],[0,0,0],[0,0,0]] `shouldBe` 1
it "has a living right neighbour" $ do
rightCell 1 0 [[0,0,0],[1,1,0],[0,0,0]] `shouldBe` 1
it "has a living right neighbour" $ do
rightCell 2 0 [[0,0,0],[0,0,0],[1,1,0]] `shouldBe` 1
describe "upperCell" $ do
it "the upper cell of a top-most cell is always dead" $ do
upperCell 0 0 [[1,0,0],[0,0,0],[0,0,0]] `shouldBe` 0
it "the upper cell of a top-most cell is always dead" $ do
upperCell 0 1 [[0,1,0],[0,0,0],[0,0,0]] `shouldBe` 0
it "the upper cell of a top-most cell is always dead" $ do
upperCell 0 2 [[0,0,1],[0,0,0],[0,0,0]] `shouldBe` 0
it "has a living upper neighbour" $ do
upperCell 1 0 [[1,0,0],[1,0,0],[0,0,0]] `shouldBe` 1
it "has a living upper neighbour" $ do
upperCell 1 1 [[0,1,0],[0,1,0],[0,0,0]] `shouldBe` 1
it "has a living upper neighbour" $ do
upperCell 1 2 [[0,0,1],[0,0,1],[0,0,0]] `shouldBe` 1
it "has a living upper neighbour" $ do
upperCell 2 0 [[0,0,0],[1,0,0],[1,0,0]] `shouldBe` 1
it "has a living upper neighbour" $ do
upperCell 2 1 [[0,0,0],[0,1,0],[0,1,0]] `shouldBe` 1
it "has a living upper neighbour" $ do
upperCell 2 2 [[0,0,0],[0,0,1],[0,0,1]] `shouldBe` 1
describe "lowerCell" $ do
it "the lower cell of a bottom-most cell is always dead" $ do
lowerCell 2 0 [[0,0,0],[0,0,0],[1,0,0]] `shouldBe` 0
it "the lower cell of a bottom-most cell is always dead" $ do
lowerCell 2 1 [[0,0,0],[0,0,0],[0,1,0]] `shouldBe` 0
it "the lower cell of a bottom-most cell is always dead" $ do
lowerCell 2 2 [[0,0,0],[0,0,0],[0,0,1]] `shouldBe` 0
it "has a living lower neighbour" $ do
lowerCell 0 0 [[1,0,0],[1,0,0],[0,0,0]] `shouldBe` 1
it "has a living lower neighbour" $ do
lowerCell 0 1 [[0,1,0],[0,1,0],[0,0,0]] `shouldBe` 1
it "has a living lower neighbour" $ do
lowerCell 0 2 [[0,0,1],[0,0,1],[0,0,0]] `shouldBe` 1
it "has a living lower neighbour" $ do
lowerCell 1 0 [[0,0,0],[1,0,0],[1,0,0]] `shouldBe` 1
it "has a living lower neighbour" $ do
lowerCell 1 1 [[0,0,0],[0,1,0],[0,1,0]] `shouldBe` 1
it "has a living lower neighbour" $ do
lowerCell 1 2 [[0,0,0],[0,0,1],[0,0,1]] `shouldBe` 1
describe "upperLeftCell" $ do
it "the upper-left cell of a left-most cell is always dead" $ do
upperLeftCell 0 0 [[1,0,0],[0,0,0],[0,0,0]] `shouldBe` 0
it "the upper-left cell of a left-most cell is always dead" $ do
upperLeftCell 1 0 [[0,0,0],[1,0,0],[0,0,0]] `shouldBe` 0
it "the upper-left cell of a left-most cell is always dead" $ do
upperLeftCell 1 0 [[0,0,0],[0,0,0],[1,0,0]] `shouldBe` 0
it "has a living neighbour" $ do
upperLeftCell 1 1 [[1,0,0],[0,1,0],[0,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
upperLeftCell 2 1 [[0,0,0],[1,0,0],[0,1,0]] `shouldBe` 1
it "has a living neighbour" $ do
upperLeftCell 1 2 [[0,1,0],[0,0,1],[0,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
upperLeftCell 2 2 [[0,0,0],[0,1,0],[0,0,1]] `shouldBe` 1
describe "lowerRightCell" $ do
it "the lower-right cell of a right-most cell is always dead" $ do
lowerRightCell 0 2 [[0,0,1],[0,0,0],[0,0,0]] `shouldBe` 0
it "the lower-right cell of a right-most cell is always dead" $ do
lowerRightCell 1 2 [[0,0,0],[0,0,1],[0,0,0]] `shouldBe` 0
it "the lower-right cell of a right-most cell is always dead" $ do
lowerRightCell 2 2 [[0,0,0],[0,0,0],[0,0,1]] `shouldBe` 0
it "has a living neighbour" $ do
lowerRightCell 0 0 [[1,0,0],[0,1,0],[0,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
lowerRightCell 1 0 [[0,0,0],[1,0,0],[0,1,0]] `shouldBe` 1
it "has a living neighbour" $ do
lowerRightCell 0 1 [[0,1,0],[0,0,1],[0,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
lowerRightCell 1 1 [[0,0,0],[0,1,0],[0,0,1]] `shouldBe` 1
describe "upperRightCell" $ do
it "the upper-right cell of a right-most cell is always dead" $ do
upperRightCell 0 2 [[0,0,1],[0,0,0],[0,0,0]] `shouldBe` 0
it "the upper-right cell of a right-most cell is always dead" $ do
upperRightCell 1 2 [[0,0,0],[0,0,1],[0,0,0]] `shouldBe` 0
it "the upper-right cell of a right-most cell is always dead" $ do
upperRightCell 2 2 [[0,0,0],[0,0,0],[0,0,1]] `shouldBe` 0
it "has a living neighbour" $ do
upperRightCell 1 1 [[0,0,1],[0,1,0],[0,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
upperRightCell 1 0 [[0,1,0],[1,0,0],[0,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
upperRightCell 2 1 [[0,0,0],[0,0,1],[0,1,0]] `shouldBe` 1
it "has a living neighbour" $ do
upperRightCell 2 0 [[0,0,0],[0,1,0],[1,0,0]] `shouldBe` 1
describe "lowerLeftCell" $ do
it "the lower-left cell of a left-most cell is always dead" $ do
lowerLeftCell 0 0 [[1,0,0],[0,0,0],[0,0,0]] `shouldBe` 0
it "the lower-left cell of a left-most cell is always dead" $ do
lowerLeftCell 1 0 [[0,0,0],[1,0,0],[0,0,0]] `shouldBe` 0
it "the lower-left cell of a left-most cell is always dead" $ do
lowerLeftCell 2 0 [[0,0,0],[0,0,0],[1,0,0]] `shouldBe` 0
it "has a living neighbour" $ do
lowerLeftCell 0 1 [[0,1,0],[1,0,0],[0,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
lowerLeftCell 0 2 [[0,0,1],[0,1,0],[0,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
lowerLeftCell 1 1 [[0,0,0],[0,1,0],[1,0,0]] `shouldBe` 1
it "has a living neighbour" $ do
lowerLeftCell 1 2 [[0,0,0],[0,0,1],[0,1,0]] `shouldBe` 1
describe "neighboursOfCell" $ do
it "a cell with two neighbours (left, right)" $ do
neighboursOfCell 0 1 [[1,1,1],[0,0,0],[0,0,0]] `shouldBe` 2
it "a cell with two neighbours (up, down)" $ do
neighboursOfCell 1 1 [[0,1,0],[0,1,0],[0,1,0]] `shouldBe` 2
it "a cell with eight neighbours (all directions)" $ do
neighboursOfCell 1 1 [[1,1,1],[1,1,1],[1,1,1]] `shouldBe` 8
it "left corner cell with three neighbours" $ do
neighboursOfCell 0 0 [[1,1,0],[1,1,0],[0,0,0]] `shouldBe` 3
it "left corner cell with three neighbours" $ do
neighboursOfCell 2 0 [[0,0,0],[1,1,0],[1,1,0]] `shouldBe` 3
it "right corner cell with three neighbours" $ do
neighboursOfCell 0 2 [[0,1,1],[0,1,1],[0,0,0]] `shouldBe` 3
it "right corner cell with three neighbours" $ do
neighboursOfCell 2 2 [[0,0,0],[0,1,1],[0,1,1]] `shouldBe` 3
describe "zipBoardWithIndices" $ do
it "add indices" $ do
zipBoardWithIndices 0 0 [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` [[(0,0,0),(0,1,0),(0,2,0)],
[(1,0,0),(1,1,0),(1,2,0)],
[(2,0,0),(2,1,0),(2,2,0)]]
describe "zipRowWithIndices" $ do
it "add indices" $ do
zipRowWithIndices 1 0 [0,0,0] `shouldBe` [(1,0,0), (1,1,0), (1,2,0)]
describe "neighbours" $ do
it "no living cells -> no neighbours" $ do
neighbours [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` [[0,0,0],[0,0,0],[0,0,0]]
it "neighbours in a row" $ do
neighbours [[1,1,1],[0,0,0],[0,0,0]] `shouldBe` [[1,2,1],[2,3,2],[0,0,0]]
it "neighbours in a col" $ do
neighbours [[0,1,0],[0,1,0],[0,1,0]] `shouldBe` [[2,1,2],[3,2,3],[2,1,2]]
describe "nextGeneration" $ do
it "no living cells -> no living cells" $ do
nextGeneration [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` [[0,0,0],[0,0,0],[0,0,0]]
describe "notInBounds" $ do
it "col too small" $ do
notInBounds 0 (-1) [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` True
it "row too small" $ do
notInBounds (-1) 0 [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` True
it "col and row too small" $ do
notInBounds (-1) (-1) [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` True
it "col too big" $ do
notInBounds 0 3 [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` True
it "row too big" $ do
notInBounds 3 0 [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` True
it "col and row too big" $ do
notInBounds 3 3 [[0,0,0],[0,0,0],[0,0,0]] `shouldBe` True
| supersven/game-of-life | test/NeighboursSpec.hs | bsd-2-clause | 10,080 | 0 | 17 | 2,371 | 5,589 | 3,245 | 2,344 | 187 | 1 |
{-# LANGUAGE DeriveGeneric, FlexibleContexts #-}
-- | The following module is responsible for general types used
-- throughout the system.
module Torrent (
-- * Types
InfoHash
, PeerId
, AnnounceURL
, TorrentState(..)
, TorrentInfo(..)
, PieceNum
, PieceSize
, PieceMap
, PiecesDoneMap
, PieceInfo(..)
, BlockSize
, Block(..)
, Capabilities(..)
-- * Interface
, bytesLeft
, defaultBlockSize
, defaultOptimisticSlots
, defaultPort
, mkPeerId
, mkTorrentInfo
)
where
import Control.Applicative
import Control.DeepSeq
import Data.Array
import Data.List
import Data.Maybe (fromMaybe)
import qualified Data.ByteString as B
import qualified Data.Map as M
import Data.Word
import GHC.Generics
import Numeric
import System.Random
import System.Random.Shuffle
import Test.QuickCheck
import Protocol.BCode
import Digest
import Version
-- | The type of Infohashes as used in torrents. These are identifiers
-- of torrents
type InfoHash = Digest
-- | The peerId is the ID of a client. It is used to identify clients
-- from each other
type PeerId = String
-- | The internal type of Announce URLs
type AnnounceURL = B.ByteString
-- | Internal type for a torrent. It identifies a torrent in various places of the system.
data TorrentInfo = TorrentInfo {
infoHash :: InfoHash,
pieceCount :: Int, -- Number of pieces in torrent
announceURLs :: [[AnnounceURL]]
} deriving Show
data TorrentState = Seeding | Leeching
deriving (Show, Generic)
instance NFData TorrentState
----------------------------------------------------------------------
-- Capabilities
data Capabilities = Fast | Extended
deriving (Show, Eq)
-- PIECES
----------------------------------------------------------------------
type PieceNum = Int
type PieceSize = Int
data PieceInfo = PieceInfo {
offset :: !Integer, -- ^ Offset of the piece, might be greater than Int
len :: !Integer, -- ^ Length of piece; usually a small value
digest :: !B.ByteString -- ^ Digest of piece; taken from the .torret file
} deriving (Eq, Show)
type PieceMap = Array PieceNum PieceInfo
-- | The PiecesDoneMap is a map which is true if we have the piece and false otherwise
type PiecesDoneMap = M.Map PieceNum Bool
-- | Return the amount of bytes left on a torrent given what pieces are done and the
-- map of the shape of the torrent in question.
bytesLeft :: PiecesDoneMap -> PieceMap -> Integer
bytesLeft done pm =
foldl' (\accu (k,v) ->
case M.lookup k done of
Just False -> (len v) + accu
_ -> accu) 0 $ Data.Array.assocs pm
-- BLOCKS
----------------------------------------------------------------------
type BlockSize = Int
data Block = Block { blockOffset :: !Int -- ^ offset of this block within the piece
, blockSize :: !BlockSize -- ^ size of this block within the piece
} deriving (Eq, Ord, Show)
instance NFData Block where
rnf (Block bo sz) = rnf bo `seq` rnf sz `seq` ()
instance Arbitrary Block where
arbitrary = Block <$> pos <*> pos
where pos = choose (0, 4294967296 - 1)
defaultBlockSize :: BlockSize
defaultBlockSize = 16384 -- Bytes
-- | Default number of optimistic slots
defaultOptimisticSlots :: Int
defaultOptimisticSlots = 2
-- | Default port to communicate on
defaultPort :: Word16
defaultPort = 1579
-- | Convert a BCode block into its corresponding TorrentInfo block, perhaps
-- failing in the process.
mkTorrentInfo :: BCode -> IO TorrentInfo
mkTorrentInfo bc = do
(ann, np) <- case queryInfo bc of Nothing -> fail "Could not create torrent info"
Just x -> return x
ih <- hashInfoDict bc
let alist = fromMaybe [[ann]] $ announceList bc
-- BEP012 says that lists of URL inside each tier must be shuffled
gen <- newStdGen
let alist' = map (\xs -> shuffle' xs (length xs) gen) alist
return TorrentInfo { infoHash = ih, pieceCount = np, announceURLs = alist'}
where
queryInfo b =
do ann <- announce b
np <- numberPieces b
return (ann, np)
-- | Create a new PeerId for this client
mkPeerId :: StdGen -> PeerId
mkPeerId gen = header ++ take (20 - length header) ranString
where randomList :: Int -> StdGen -> [Int]
randomList n = take n . unfoldr (Just . random)
rs = randomList 10 gen
ranString = concatMap (\i -> showHex (abs i) "") rs
header = "-CT" ++ protoVersion ++ "-"
| abhin4v/combinatorrent | src/Torrent.hs | bsd-2-clause | 4,565 | 0 | 15 | 1,103 | 972 | 553 | 419 | 110 | 2 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.APPLE.TextureRange
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.APPLE.TextureRange (
-- * Extension Support
glGetAPPLETextureRange,
gl_APPLE_texture_range,
-- * Enums
pattern GL_STORAGE_CACHED_APPLE,
pattern GL_STORAGE_PRIVATE_APPLE,
pattern GL_STORAGE_SHARED_APPLE,
pattern GL_TEXTURE_RANGE_LENGTH_APPLE,
pattern GL_TEXTURE_RANGE_POINTER_APPLE,
pattern GL_TEXTURE_STORAGE_HINT_APPLE,
-- * Functions
glGetTexParameterPointervAPPLE,
glTextureRangeAPPLE
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/APPLE/TextureRange.hs | bsd-3-clause | 956 | 0 | 5 | 124 | 85 | 60 | 25 | 15 | 0 |
import Data.Binary (decode)
import Data.Binary.Put (putWord16host, runPut)
import Data.Word (Word8)
littleEndian :: Bool
littleEndian = (decode . runPut $ putWord16host 42 :: Word8) == 42
main :: IO ()
main | littleEndian = putStrLn "LittleEndian"
| otherwise = putStrLn "BigEndian"
| nikai3d/ce-challenges | moderate/endian.hs | bsd-3-clause | 293 | 0 | 8 | 50 | 102 | 54 | 48 | 8 | 1 |
module Monad.Free where
import qualified Prelude as P
import Data.Constraint
import Data.Proxy
import Data.Tagged
import Category
import Functor
import Coproduct
import Monad
import NatTr
import NatTr.Coproduct
data Free f a where
Free :: FMap f (Free f a) -> Free f a
Pure :: EndoFunctorOf f (->) => a -> Free f a
freeT :: EndoFunctorOf f (->) => NatTr (->) (->) (f :.: Ftag (Free f)) (Ftag (Free f))
freeT = NatTr (Tagged Free)
pureT :: EndoFunctorOf f (->) => NatTr (->) (->) Id (Ftag (Free f))
pureT = NatTr (Tagged Pure)
unfreeT :: forall f. EndoFunctorOf f (->) => NatTr (->) (->) (Ftag (Free f)) ((f :.: Ftag (Free f)) :+: Id)
unfreeT = NatTr (Tagged t) where
t :: forall a. Free f a -> FMap ((f :.: Ftag (Free f)) :+: Id) a
t (Free f) = appNat inj1 f
t (Pure a) = appNat inj2 a
instance EndoFunctorOf f (->) => P.Functor (Free f) where
fmap t = go where
go (Free f) = Free (proxy morphMap (Proxy :: Proxy f) go f)
go (Pure a) = Pure (t a)
instance EndoFunctorOf f (->) => P.Monad (Free f) where
f >>= t = go f where
go (Free x) = Free (proxy morphMap (Proxy :: Proxy f) go x)
go (Pure a) = t a
return = Pure
data FreeM = FreeM
instance Functor FreeM ('KProxy :: KProxy (* -> *)) where
type Domain FreeM = NatTr (->) (->)
type Codomain FreeM = MonadMorph (->)
type FMap FreeM f = Ftag (Free f)
morphMap = Tagged (\t -> case observeObjects t of Dict -> f t) where
f t = MonadMorph go where
go = coproduct (freeT . compNat t go) pureT . unfreeT
| ian-mi/extended-categories | Monad/Free.hs | bsd-3-clause | 1,560 | 1 | 16 | 399 | 778 | 402 | 376 | -1 | -1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Main (main) where
import Test.QuickCheck
import Test.Framework (Test, defaultMain, testGroup)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.XBeeTestSupport
import Data.Word
import Data.ByteString (ByteString,pack,unpack)
import qualified Data.ByteString as BS
import Data.Serialize
import System.Hardware.XBee.Command
import Control.Monad
ser :: Serialize s => s -> [Word8]
ser = BS.unpack . runPut . put
serParseTest s = runGet get (runPut $ put s) == Right s
parse :: Serialize s => [Word8] -> Either String s
parse = runGet get . BS.pack
-- FrameId
frameIdLoopsAroundAfter255 = nextFrame (frameForId 255) == frameId
frameIdSerializeParse :: Word8 -> Bool
frameIdSerializeParse v = serParseTest (frameForId v)
frameIdParseWord8 w = runGet get (BS.singleton w) == Right (frameForId w)
-- Command name
commandNameSerializeParse a b = serParseTest (commandName a b)
commandNameExampleDH = parse [0x44, 0x48] == Right (commandName 'D' 'H')
-- Modem status
modemStatusSerialize =
ser HardwareReset == [0]
&& ser WatchdogTimerReset == [1]
&& ser Associated == [2]
&& ser Disassociated == [3]
&& ser SyncLost == [4]
&& ser CoordinatorRealignment == [5]
&& ser CoordinatorStarted == [6]
modemStatusSerializeParse :: ModemStatus -> Bool
modemStatusSerializeParse = serParseTest
-- Command status
commandStatusSerialize =
ser CmdOK == [0]
&& ser CmdError == [1]
&& ser CmdInvalidCommand == [2]
&& ser CmdInvalidParameter == [3]
commandStatusSerializeParse :: CommandStatus -> Bool
commandStatusSerializeParse = serParseTest
-- Address
address64SerializeParse :: Address64 -> Bool
address64SerializeParse = serParseTest
address16SerializeParse :: Address16 -> Bool
address16SerializeParse = serParseTest
-- Transmit status
transmitStatusSerialize =
ser TransmitSuccess == [0]
&& ser TransmitNoAck == [1]
&& ser TransmitCcaFailure == [2]
&& ser TransmitPurged == [3]
transmitStatusSerializeParse :: TransmitStatus -> Bool
transmitStatusSerializeParse = serParseTest
-- Signal Strength
signalStrengthSerializeParse :: SignalStrength -> Bool
signalStrengthSerializeParse = serParseTest
signalStrengthExample = parse [0x28] == Right (fromDbm (-40))
-- Command In
modemStatusUpdateSerializeParse s = serParseTest (ModemStatusUpdate s)
modemStatusUpdateParseExample = parse [0x8A, 0x01] == Right (ModemStatusUpdate WatchdogTimerReset)
atCommandResponseSerializeParse f cmd st val = serParseTest (ATCommandResponse f cmd st val)
atCommandResponseExample = parse [0x88, 0x52, 0x4D, 0x59, 0x00, 0x23, 0x12] ==
Right (ATCommandResponse (frameForId 0x52) (commandName 'M' 'Y') CmdOK $ pack [0x23, 0x12])
remoteAtCommandResponseSerializeParse f a64 a16 cmd st val = serParseTest $
RemoteATCommandResponse f a64 a16 cmd st val
remoteAtCommandResponseExample = parse [0x97, 0x52,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF,
0xFF, 0xFE,
0x4D, 0x59, 0x00, 0x23, 0x12] ==
Right (RemoteATCommandResponse (frameForId 0x52) broadcastAddress disabledAddress (commandName 'M' 'Y') CmdOK $ pack [0x23, 0x12])
transmitResponseSerializeParse f s = serParseTest (TransmitResponse f s)
transmitResponseExample = parse [0x89, 0x10, 0x00] ==
Right (TransmitResponse (frameForId 0x10) TransmitSuccess)
receive64SerializeParse from ss ack bc d = serParseTest $ Receive64 from ss ack bc d
receive64Example :: ByteString -> Bool
receive64Example d = parse ([0x80,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x28, 0x00] ++ unpack d) ==
Right (Receive64 (Address64 0x0102030405060708) (fromDbm (-40)) False False d)
receive16SerializeParse from ss ack bc d = serParseTest $ Receive16 from ss ack bc d
receive16Example :: ByteString -> Bool
receive16Example d = parse ([0x81,
0x01, 0x02,
0x28, 0x00] ++ unpack d) ==
Right (Receive16 (Address16 0x0102) (fromDbm (-40)) False False d)
-- Command Out
someData = pack [5..8]
atCommandSerializeParse f cmd d = serParseTest (ATCommand f cmd d)
atCommandExample = parse [0x08, 0x01, 0x44, 0x4C, 0x05, 0x06, 0x07, 0x08] ==
Right (ATCommand (frameForId 1) (commandName 'D' 'L') someData)
atQueueCommandSerializeParse f cmd d = serParseTest (ATQueueCommand f cmd d)
atQueueCommandExample = parse [0x09, 0x01, 0x44, 0x4C, 0x05, 0x06, 0x07, 0x08] ==
Right (ATQueueCommand (frameForId 1) (commandName 'D' 'L') someData)
remoteAtCommand64SerializeParse f adr b cmd d = serParseTest (RemoteATCommand64 f adr b cmd d)
remoteAtCommand64Example = parse [0x17, 0x01,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0xFF, 0xFE,
0x02,
0x44, 0x4C,
0x05, 0x06, 0x07, 0x08] ==
Right (RemoteATCommand64 (frameForId 1) (Address64 0x0102030405060708) True (commandName 'D' 'L') someData)
remoteAtCommand16SerializeParse f adr b cmd d = serParseTest (RemoteATCommand16 f adr b cmd d)
remoteAtCommand16Example = parse [0x17, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF,
0x01, 0x02,
0x02,
0x44, 0x4C,
0x05, 0x06, 0x07, 0x08] ==
Right (RemoteATCommand16 (frameForId 1) (Address16 0x0102) True (commandName 'D' 'L') someData)
transmit64SerializeParse f adr da bc d = serParseTest (Transmit64 f adr da bc d)
transmit64Example = parse [0x00, 0x02,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x00,
0x09, 0x08, 0x07] ==
Right (Transmit64 (frameForId 2) (Address64 0x0102030405060708) False False $ pack [0x09, 0x08, 0x07])
transmit16SerializeParse f adr da bc d = serParseTest (Transmit16 f adr da bc d)
transmit16Example = parse [0x01, 0x03,
0x01, 0x02,
0x00,
0x09, 0x08, 0x07] ==
Right (Transmit16 (frameForId 3) (Address16 0x0102) False False $ pack [0x09, 0x08, 0x07])
--Main
main = defaultMain tests
tests :: [Test]
tests = [
testGroup "FrameId" [
testProperty "loops around after 255" frameIdLoopsAroundAfter255,
testProperty "serialize and parse yield original value" frameIdSerializeParse,
testProperty "can parse single Word8" frameIdParseWord8
],
testGroup "ModemStatus" [
testProperty "values are correctly serialized" modemStatusSerialize,
testProperty "serialize and then parse yields original value" modemStatusSerializeParse
],
testGroup "CommandName" [
testProperty "serialize and then parse yields original value" commandNameSerializeParse,
testProperty "value DH is parsed correctly" commandNameExampleDH
],
testGroup "CommandStatus" [
testProperty "values are correctly serialized" commandStatusSerialize,
testProperty "serialize and then parse yields original value" commandStatusSerializeParse
],
testGroup "SignalStrength" [
testProperty "serialize and then parse yields original value" signalStrengthSerializeParse,
testProperty "value -40 (=0x28) is parsed correctly" signalStrengthExample
],
testGroup "TransmitStatus" [
testProperty "values are correctly serialized" transmitStatusSerialize,
testProperty "serialize and then parse yields original value" transmitStatusSerializeParse
],
testGroup "Address64" [
testProperty "serialize and then parse yields original value" address64SerializeParse
],
testGroup "Address16" [
testProperty "serialize and then parse yields original value" address16SerializeParse
],
testGroup "CommandIn" [
testProperty "ModemStatusUpdate serialize & parse yields original" modemStatusUpdateSerializeParse,
testProperty "ModemStatusUpdate example works" modemStatusUpdateParseExample,
testProperty "ATCommandResponse serialize & parse yields original" atCommandResponseSerializeParse,
testProperty "ATCommandResponse example works" atCommandResponseExample,
testProperty "RemoteATCommandResponse serialize & parse yields original" remoteAtCommandResponseSerializeParse,
testProperty "RemoteATCommandResponse example works" remoteAtCommandResponseExample,
testProperty "TransmitResponse serialize & parse yields original" transmitResponseSerializeParse,
testProperty "TransmitResponse example works" transmitResponseExample,
testProperty "Receive64 serialize & parse yields original" receive64SerializeParse,
testProperty "Receive64 example works" receive64Example,
testProperty "Receive16 serialize & parse yields original" receive16SerializeParse,
testProperty "Receive16 example works" receive16Example
],
testGroup "CommandOut" [
testProperty "ATCommand serialize & parse yields original" atCommandSerializeParse,
testProperty "ATCommand example works" atCommandExample,
testProperty "ATQueueCommand serialize & parse yields original" atQueueCommandSerializeParse,
testProperty "ATQueueCommand example works" atQueueCommandExample,
testProperty "RemoteATCommand64 serialize & parse yields original" remoteAtCommand64SerializeParse,
testProperty "RemoteATCommand64 example works" remoteAtCommand64Example,
testProperty "RemoteATCommand16 serialize & parse yields original" remoteAtCommand16SerializeParse,
testProperty "RemoteATCommand16 example works" remoteAtCommand16Example,
testProperty "Transmit64 serialize & parse yields original" transmit64SerializeParse,
testProperty "Transmit64 example works" transmit64Example,
testProperty "Transmit16 serialize & parse yields original" transmit16SerializeParse,
testProperty "Transmit16 example works" transmit16Example
]]
| msiegenthaler/haskell-xbee | test/CommandTest.hs | bsd-3-clause | 9,904 | 0 | 18 | 1,973 | 2,375 | 1,268 | 1,107 | 168 | 1 |
-- Shake Generator for wiki pages
{-# LANGUAGE CPP #-}
module Main where
import Prelude hiding ((*>))
import Control.Concurrent
import Data.Char
import qualified Data.List as L
import Development.Shake hiding (doesFileExist)
import qualified Development.Shake as Shake
import Development.Shake.FilePath
import System.Directory
import System.IO
import System.Process
import Web.Browser
-- TO test: ghci wiki-suite/Draw_Canvas.hs -idist/build/autogen/:.:wiki-suite
import qualified Arc
import qualified Bezier_Curve
import qualified Bounce
import qualified Circle
import qualified Clipping_Region
import qualified Color_Fill
import qualified Color_Square
import qualified Custom_Shape
import qualified Custom_Transform
import qualified Draw_Canvas
import qualified Draw_Device
import qualified Draw_Image
import qualified Favicon
import qualified Font_Size_and_Style
import qualified Get_Image_Data_URL
import qualified Global_Alpha
import qualified Global_Composite_Operations
import qualified Grayscale
import qualified Image_Crop
import qualified Image_Loader
import qualified Image_Size
import qualified Is_Point_In_Path
import qualified Key_Read
import qualified Line
import qualified Line_Cap
import qualified Line_Color
import qualified Line_Join
import qualified Line_Width
import qualified Linear_Gradient
import qualified Load_Image_Data_URL
import qualified Load_Image_Data_URL_2
import qualified Miter_Limit
import qualified Path
import qualified Pattern
import qualified Quadratic_Curve
import qualified Radial_Gradient
import qualified Rectangle
import qualified Red_Line
import qualified Rotate_Transform
import qualified Rotating_Square
import qualified Rounded_Corners
import qualified Scale_Transform
import qualified Semicircle
import qualified Shadow
import qualified Square
import qualified Text_Align
import qualified Text_Baseline
import qualified Text_Color
import qualified Text_Metrics
import qualified Text_Stroke
import qualified Text_Wrap
import qualified Tic_Tac_Toe
import qualified Translate_Transform
import System.Environment
main :: IO ()
main = do
args <- getArgs
main2 args
main2 :: [String] -> IO ()
main2 ["Arc"] = Arc.main
main2 ["Bezier_Curve"] = Bezier_Curve.main
main2 ["Bounce"] = Bounce.main
main2 ["Circle"] = Circle.main
main2 ["Clipping_Region"] = Clipping_Region.main
main2 ["Color_Fill"] = Color_Fill.main
main2 ["Color_Square"] = Color_Square.main
main2 ["Custom_Shape"] = Custom_Shape.main
main2 ["Draw_Canvas"] = Draw_Canvas.main
main2 ["Draw_Device"] = Draw_Device.main
main2 ["Draw_Image"] = Draw_Image.main
main2 ["Favicon"] = Favicon.main
main2 ["Font_Size_and_Style"] = Font_Size_and_Style.main
main2 ["Get_Image_Data_URL"] = Get_Image_Data_URL.main
main2 ["Global_Alpha"] = Global_Alpha.main
main2 ["Global_Composite_Operations"] = Global_Composite_Operations.main
main2 ["Grayscale"] = Grayscale.main
main2 ["Image_Crop"] = Image_Crop.main
main2 ["Image_Loader"] = Image_Loader.main
main2 ["Miter_Limit"] = Miter_Limit.main
main2 ["Image_Size"] = Image_Size.main
main2 ["Is_Point_In_Path"] = Is_Point_In_Path.main
main2 ["Key_Read"] = Key_Read.main
main2 ["Line"] = Line.main
main2 ["Line_Cap"] = Line_Cap.main
main2 ["Line_Color"] = Line_Color.main
main2 ["Line_Join"] = Line_Join.main
main2 ["Line_Width"] = Line_Width.main
main2 ["Linear_Gradient"] = Linear_Gradient.main
main2 ["Load_Image_Data_URL"] = Load_Image_Data_URL.main
main2 ["Load_Image_Data_URL_2"] = Load_Image_Data_URL_2.main
main2 ["Path"] = Path.main
main2 ["Pattern"] = Pattern.main
main2 ["Quadratic_Curve"] = Quadratic_Curve.main
main2 ["Radial_Gradient"] = Radial_Gradient.main
main2 ["Rectangle"] = Rectangle.main
main2 ["Red_Line"] = Red_Line.main
main2 ["Rotating_Square"] = Rotating_Square.main
main2 ["Rounded_Corners"] = Rounded_Corners.main
main2 ["Semicircle"] = Semicircle.main
main2 ["Shadow"] = Shadow.main
main2 ["Square"] = Square.main
main2 ["Text_Align"] = Text_Align.main
main2 ["Text_Baseline"] = Text_Baseline.main
main2 ["Text_Color"] = Text_Color.main
main2 ["Text_Metrics"] = Text_Metrics.main
main2 ["Text_Stroke"] = Text_Stroke.main
main2 ["Text_Wrap"] = Text_Wrap.main
main2 ["Tic_Tac_Toe"] = Tic_Tac_Toe.main
main2 ["Translate_Transform"] = Translate_Transform.main
main2 ["Scale_Transform"] = Scale_Transform.main
main2 ["Rotate_Transform"] = Rotate_Transform.main
main2 ["Custom_Transform"] = Custom_Transform.main
main2 ["clean"] = do
_ <- createProcess $ shell "rm blank-canvas.wiki/images/*.png blank-canvas.wiki/images/*.gif blank-canvas.wiki/examples/*.hs"
return ()
main2 args = shakeArgs shakeOptions $ do
if null args then do
want ["blank-canvas.wiki/images/" ++ nm ++ ".gif" | nm <- movies ]
want ["blank-canvas.wiki/images/" ++ nm ++ ".png" | nm <- examples ++ tutorial]
want ["blank-canvas.wiki/examples/" ++ nm ++ ".hs" | nm <- movies ++ examples ++ tutorial]
want ["blank-canvas.wiki/" ++ toMinus nm ++ ".md" | nm <- movies ++ examples ++ tutorial]
else return ()
["blank-canvas.wiki/images/*.png", "blank-canvas.wiki/images/*.gif"] |%> \out -> do
let nm = takeBaseName out
liftIO $ print (out,nm)
liftIO $ removeFiles ("blank-canvas.wiki/tmp") ["*.png"]
need [ "blank-canvas.wiki/" ++ toMinus nm ++ ".md" ]
let haskell_file = nm ++ ".hs"
let haskell_path = wiki_suite ++ "/" ++ haskell_file
need [ haskell_path, "blank-canvas.wiki/examples/" ++ haskell_file ]
liftIO $ print nm
txt <- Shake.readFile' $ haskell_path
let (w,h) = head $
[ case words ln of
[_,_,_,n] -> read n
_ -> (512,384)
| ln <- lines txt
, "import" `L.isPrefixOf` ln && "Wiki" `L.isInfixOf` ln
] ++ [(512,384) :: (Int, Int)]
sequence_ [
do (_,_,_,ghc) <- liftIO $
createProcess (proc "stack" ["exec","wiki-suite",nm])
-- wait a second, for things to start
liftIO $ threadDelay (1 * 1000 * 1000)
_ <-liftIO $ openBrowser $ "http://localhost:3000/?height=" ++ show (h) ++ "&width=" ++ show (w) ++ hd
-- wait for haskell program to stop
liftIO $ waitForProcess ghc | hd <- [("")] ++ if nm == "Text_Wrap" then [("&hd")] else [] ]
return ()
"blank-canvas.wiki/examples/*.hs" %> \ out -> do
liftIO $ print "*hs"
liftIO $ print out
let haskell_file = takeFileName out
liftIO $ print "before read file"
txt <- Shake.readFile' $ wiki_suite ++ "/" ++ haskell_file
liftIO $ print "after read file"
let new = reverse
$ dropWhile (all isSpace)
$ reverse
[ if "module" `L.isPrefixOf` ln
then "module Main where"
else ln
| ln <- lines txt
, not ("wiki $" `L.isInfixOf` ln) -- remove the wiki stuff
, not ("import" `L.isPrefixOf` ln && "Wiki" `L.isInfixOf` ln)
]
writeFileChanged out (unlines $ map (untabify 0) new)
"blank-canvas.wiki/*.md" %> \ out -> do
b <- Shake.doesFileExist out
-- liftIO $ print b
txts <- liftIO $ if b then do
h <- openFile out ReadMode
let loop = do
b' <- hIsEOF h
if b'
then return []
else do
ln <- hGetLine h
lns <- loop
return (ln : lns)
txts <- loop
hClose h
return txts
else return []
-- liftIO $ print txts
let p = not . (code_header `L.isPrefixOf`)
let textToKeep = takeWhile p txts
let haskell_file = map (\ c -> if c == '-' then '_' else c)
$ replaceExtension (takeFileName out) ".hs"
liftIO $ print haskell_file
txt <- Shake.readFile' $ "blank-canvas.wiki/examples/" ++ haskell_file
let new = unlines $
[ t | t <- textToKeep
] ++
[code_header] ++
lines txt ++
[code_footer]
-- liftIO $ putStrLn new
writeFileChanged out new
-- to clean: rm images/*png images/*gif examples/*hs
-- */
movies :: [String]
movies = ["Rotating_Square","Tic_Tac_Toe","Bounce","Key_Read","Square"]
examples :: [String]
examples = ["Red_Line","Favicon"]
++ ["Color_Square"]
tutorial :: [String]
tutorial = ["Line", "Line_Width", "Line_Color", "Line_Cap","Miter_Limit"]
++ ["Arc","Quadratic_Curve","Bezier_Curve"]
++ ["Path","Line_Join","Rounded_Corners","Is_Point_In_Path"]
++ ["Custom_Shape","Rectangle","Circle","Semicircle"]
++ ["Color_Fill","Linear_Gradient","Radial_Gradient","Pattern"]
++ ["Draw_Image","Image_Size","Image_Crop","Image_Loader", "Draw_Canvas", "Draw_Device"]
++ ["Font_Size_and_Style","Text_Color","Text_Stroke","Text_Align","Text_Baseline","Text_Metrics","Text_Wrap"]
++ ["Translate_Transform","Scale_Transform","Rotate_Transform","Custom_Transform"]
++ ["Shadow","Global_Alpha","Clipping_Region","Global_Composite_Operations"]
++ ["Grayscale","Get_Image_Data_URL","Load_Image_Data_URL"]
++ ["Load_Image_Data_URL_2"]
wiki_dir :: String
wiki_dir = "."
toMinus :: String -> String
toMinus = map (\ c -> if c == '_' then '-' else c)
untabify :: Int -> String -> String
untabify _ [] = []
untabify n (c:cs) | c == '\t' = let t = 8 - n `mod` 8 in take t (cycle " ") ++ untabify (n + t) cs
| otherwise = c : untabify (n + 1) cs
code_header :: String
code_header = "````Haskell"
code_footer :: String
code_footer = "````"
wiki_suite :: String
wiki_suite = "wiki-suite"
| ku-fpg/blank-canvas | wiki-suite/Main.hs | bsd-3-clause | 10,241 | 38 | 25 | 2,590 | 2,622 | 1,443 | 1,179 | 236 | 8 |
-- Copyright 2019 Google LLC
--
-- Use of this source code is governed by a BSD-style
-- license that can be found in the LICENSE file or at
-- https://developers.google.com/open-source/licenses/bsd
{-# LANGUAGE CPP #-}
module GHC.SourceGen.Type.Internal where
import GHC.Hs (GhcPs)
#if MIN_VERSION_ghc(9,0,0)
import GHC.Hs.Type as Types
import GHC.Types.SrcLoc (unLoc)
#else
import GHC.Hs.Type as Types
import SrcLoc (unLoc)
#endif
import GHC.SourceGen.Syntax.Internal
mkQTyVars :: [HsTyVarBndr'] -> LHsQTyVars'
mkQTyVars vars = withPlaceHolder
$ noExt (withPlaceHolder HsQTvs)
$ map mkLocated vars
sigType :: HsType' -> LHsSigType'
#if MIN_VERSION_ghc(9,2,0)
sigType = mkLocated . noExt HsSig (noExt HsOuterImplicit) . mkLocated
#else
sigType = withPlaceHolder . noExt (withPlaceHolder Types.HsIB) . builtLoc
#endif
-- TODO: GHC >= 8.6 provides parenthesizeHsType. For consistency with
-- older versions, we're implementing our own parenthesis-wrapping.
-- Once we stop supporting GHC-8.4, we can switch to that implementation.
parenthesizeTypeForApp, parenthesizeTypeForOp, parenthesizeTypeForFun
:: LHsType GhcPs -> LHsType GhcPs
parenthesizeTypeForApp t
| needsParenForApp (unLoc t) = parTy t
| otherwise = t
parenthesizeTypeForOp t
| needsParenForOp (unLoc t) = parTy t
| otherwise = t
parenthesizeTypeForFun t
| needsParenForFun (unLoc t) = parTy t
| otherwise = t
needsParenForFun, needsParenForOp, needsParenForApp
:: HsType' -> Bool
needsParenForFun t = case t of
HsForAllTy{} -> True
HsQualTy{} -> True
HsFunTy{} -> True
_ -> False
needsParenForOp t = case t of
HsOpTy{} -> True
_ -> needsParenForFun t
needsParenForApp t = case t of
HsAppTy {} -> True
_ -> needsParenForOp t
parTy :: LHsType GhcPs -> LHsType GhcPs
parTy = mkLocated . withEpAnnNotUsed HsParTy
sigWcType :: HsType' -> LHsSigWcType'
sigWcType = noExt (withPlaceHolder Types.HsWC) . sigType
wcType :: HsType' -> LHsWcType'
wcType = noExt (withPlaceHolder Types.HsWC) . mkLocated
patSigType :: HsType' -> HsPatSigType'
#if MIN_VERSION_ghc(9,2,0)
patSigType = withEpAnnNotUsed mkHsPatSigType . mkLocated
#elif MIN_VERSION_ghc(9,0,0)
patSigType = mkHsPatSigType . builtLoc
#else
patSigType = sigWcType
#endif
| google/ghc-source-gen | src/GHC/SourceGen/Type/Internal.hs | bsd-3-clause | 2,294 | 0 | 10 | 410 | 490 | 259 | 231 | 44 | 4 |
module Language.Haskell.GhcMod.Boot where
import Control.Applicative ((<$>))
import CoreMonad (liftIO, liftIO)
import Language.Haskell.GhcMod.Browse
import Language.Haskell.GhcMod.Flag
import Language.Haskell.GhcMod.Lang
import Language.Haskell.GhcMod.List
import Language.Haskell.GhcMod.Monad
import Language.Haskell.GhcMod.Types
-- | Printing necessary information for front-end booting.
bootInfo :: Options -> IO String
bootInfo opt = runGhcMod opt $ boot
-- | Printing necessary information for front-end booting.
boot :: GhcMod String
boot = do
opt <- options
mods <- modules
langs <- liftIO $ listLanguages opt
flags <- liftIO $ listFlags opt
pre <- concat <$> mapM browse preBrowsedModules
return $ mods ++ langs ++ flags ++ pre
preBrowsedModules :: [String]
preBrowsedModules = [
"Prelude"
, "Control.Applicative"
, "Control.Exception"
, "Control.Monad"
, "Data.Char"
, "Data.List"
, "Data.Maybe"
, "System.IO"
]
| darthdeus/ghc-mod-ng | Language/Haskell/GhcMod/Boot.hs | bsd-3-clause | 974 | 0 | 10 | 167 | 229 | 131 | 98 | 29 | 1 |
{-# LANGUAGE TypeFamilies, TypeOperators #-}
-------------------------------------------------------------------------------------------
-- |
-- Module : Control.Category.Object
-- Copyright: 2010-2012 Edward Kmett
-- License : BSD
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability: non-portable (either class-associated types or MPTCs with fundeps)
--
-- This module declares the 'HasTerminalObject' and 'HasInitialObject' classes.
--
-- These are both special cases of the idea of a (co)limit.
-------------------------------------------------------------------------------------------
module Control.Categorical.Object
( HasTerminalObject(..)
, HasInitialObject(..)
) where
import Control.Category
-- | The @Category (~>)@ has a terminal object @Terminal (~>)@ such that for all objects @a@ in @(~>)@,
-- there exists a unique morphism from @a@ to @Terminal (~>)@.
class Category k => HasTerminalObject k where
type Terminal k :: *
terminate :: a `k` Terminal k
-- | The @Category (~>)@ has an initial (coterminal) object @Initial (~>)@ such that for all objects
-- @a@ in @(~>)@, there exists a unique morphism from @Initial (~>) @ to @a@.
class Category k => HasInitialObject k where
type Initial k :: *
initiate :: Initial k `k` a
| ekmett/categories | old/src/Control/Categorical/Object.hs | bsd-3-clause | 1,318 | 2 | 8 | 211 | 120 | 77 | 43 | 11 | 0 |
import Test.Cabal.Prelude
main = cabalTest $ do
expectBroken 4477 $ do
cabal' "new-run" ["foo"] >>= assertOutputContains "Hello World"
| mydaum/cabal | cabal-testsuite/PackageTests/NewBuild/T4477/cabal.test.hs | bsd-3-clause | 147 | 0 | 13 | 30 | 44 | 21 | 23 | 4 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ImplicitParams #-}
module MLModules where
import Prelude hiding (Monoid)
-- We can represent a module dependencies with our fake modules!
-- This means submodules!
--
-- The only thing is I cannot have type associators be named the same thing
-- These type families are globally scoped.
class Semigroup m where
type T m
(<>) :: (?m :: m) => T m -> T m -> T m
-- | Monoid is a submodule of Semigroup
class Semigroup m => Monoid m where
type T' m
zero :: (?m :: m) => T' m
data Add = Add
instance Semigroup Add where
type T Add = Int
(<>) = (+)
instance Monoid Add where
type T' Add = Int
zero = 0
foo :: Int
foo = zero <> zero
where
?m = Add
| sleexyz/haskell-fun | MLModulesSubmodule.hs | bsd-3-clause | 794 | 14 | 6 | 185 | 184 | 108 | 76 | 22 | 1 |
module PackageTests.Tests(tests) where
import PackageTests.PackageTester
import qualified PackageTests.BenchmarkStanza.Check
import qualified PackageTests.TestStanza.Check
import qualified PackageTests.DeterministicAr.Check
import qualified PackageTests.TestSuiteTests.ExeV10.Check
import Control.Monad
import Data.Version
import Test.Tasty (TestTree, testGroup, mkTimeout, localOption)
import Test.Tasty.HUnit (testCase)
-- TODO: turn this into a "test-defining writer monad".
-- This will let us handle scoping gracefully.
tests :: SuiteConfig -> [TestTree]
tests config =
tail [ undefined
---------------------------------------------------------------------
-- * External tests
-- Test that Cabal parses 'benchmark' sections correctly
, tc "BenchmarkStanza" PackageTests.BenchmarkStanza.Check.suite
-- Test that Cabal parses 'test' sections correctly
, tc "TestStanza" PackageTests.TestStanza.Check.suite
-- Test that Cabal determinstically generates object archives
, tc "DeterministicAr" PackageTests.DeterministicAr.Check.suite
---------------------------------------------------------------------
-- * Test suite tests
, testGroup "TestSuiteTests"
-- Test exitcode-stdio-1.0 test suites (and HPC)
[ testGroup "ExeV10"
(PackageTests.TestSuiteTests.ExeV10.Check.tests config)
-- Test detailed-0.9 test suites
, testGroup "LibV09" $
let
tcs :: FilePath -> TestM a -> TestTree
tcs name m
= testCase name (runTestM config ("TestSuiteTests/LibV09")
(Just name) m)
in -- Test if detailed-0.9 builds correctly
[ tcs "Build" $ cabal_build ["--enable-tests"]
-- Tests for #2489, stdio deadlock
, localOption (mkTimeout $ 10 ^ (8 :: Int))
. tcs "Deadlock" $ do
cabal_build ["--enable-tests"]
shouldFail $ cabal "test" []
]
]
---------------------------------------------------------------------
-- * Inline tests
-- Test if exitcode-stdio-1.0 benchmark builds correctly
, tc "BenchmarkExeV10" $ cabal_build ["--enable-benchmarks"]
-- Test --benchmark-option(s) flags on ./Setup bench
, tc "BenchmarkOptions" $ do
cabal_build ["--enable-benchmarks"]
cabal "bench" [ "--benchmark-options=1 2 3" ]
cabal "bench" [ "--benchmark-option=1"
, "--benchmark-option=2"
, "--benchmark-option=3"
]
-- Test --test-option(s) flags on ./Setup test
, tc "TestOptions" $ do
cabal_build ["--enable-tests"]
cabal "test" ["--test-options=1 2 3"]
cabal "test" [ "--test-option=1"
, "--test-option=2"
, "--test-option=3"
]
-- Test attempt to have executable depend on internal
-- library, but cabal-version is too old.
, tc "BuildDeps/InternalLibrary0" $ do
r <- shouldFail $ cabal' "configure" []
-- Should tell you how to enable the desired behavior
let sb = "library which is defined within the same package."
assertOutputContains sb r
-- Test executable depends on internal library.
, tc "BuildDeps/InternalLibrary1" $ cabal_build []
-- Test that internal library is preferred to an installed on
-- with the same name and version
, tc "BuildDeps/InternalLibrary2" $ internal_lib_test "internal"
-- Test that internal library is preferred to an installed on
-- with the same name and LATER version
, tc "BuildDeps/InternalLibrary3" $ internal_lib_test "internal"
-- Test that an explicit dependency constraint which doesn't
-- match the internal library causes us to use external library
, tc "BuildDeps/InternalLibrary4" $ internal_lib_test "installed"
-- Test "old build-dep behavior", where we should get the
-- same package dependencies on all targets if cabal-version
-- is sufficiently old.
, tc "BuildDeps/SameDepsAllRound" $ cabal_build []
-- Test "new build-dep behavior", where each target gets
-- separate dependencies. This tests that an executable
-- dep does not leak into the library.
, tc "BuildDeps/TargetSpecificDeps1" $ do
cabal "configure" []
r <- shouldFail $ cabal' "build" []
assertBool "error should be in MyLibrary.hs" $
resultOutput r =~ "^MyLibrary.hs:"
assertBool "error should be \"Could not find module `Text\\.PrettyPrint\"" $
resultOutput r =~ "Could not find module.*Text\\.PrettyPrint"
-- This is a control on TargetSpecificDeps1; it should
-- succeed.
, tc "BuildDeps/TargetSpecificDeps2" $ cabal_build []
-- Test "new build-dep behavior", where each target gets
-- separate dependencies. This tests that an library
-- dep does not leak into the executable.
, tc "BuildDeps/TargetSpecificDeps3" $ do
cabal "configure" []
r <- shouldFail $ cabal' "build" []
assertBool "error should be in lemon.hs" $
resultOutput r =~ "^lemon.hs:"
assertBool "error should be \"Could not find module `Text\\.PrettyPrint\"" $
resultOutput r =~ "Could not find module.*Text\\.PrettyPrint"
-- Test that Paths module is generated and available for executables.
, tc "PathsModule/Executable" $ cabal_build []
-- Test that Paths module is generated and available for libraries.
, tc "PathsModule/Library" $ cabal_build []
-- Check that preprocessors (hsc2hs) are run
, tc "PreProcess" $ cabal_build ["--enable-tests", "--enable-benchmarks"]
-- Check that preprocessors that generate extra C sources are handled
, tc "PreProcessExtraSources" $ cabal_build ["--enable-tests", "--enable-benchmarks"]
-- Test building a vanilla library/executable which uses Template Haskell
, tc "TemplateHaskell/vanilla" $ cabal_build []
-- Test building a profiled library/executable which uses Template Haskell
-- (Cabal has to build the non-profiled version first)
, tc "TemplateHaskell/profiling" $ cabal_build ["--enable-library-profiling", "--enable-profiling"]
-- Test building a dynamic library/executable which uses Template
-- Haskell
, tc "TemplateHaskell/dynamic" $ cabal_build ["--enable-shared", "--enable-executable-dynamic"]
-- Test building an executable whose main() function is defined in a C
-- file
, tc "CMain" $ cabal_build []
-- Test build when the library is empty, for #1241
, tc "EmptyLib" $
withPackage "empty" $ cabal_build []
-- Test that "./Setup haddock" works correctly
, tc "Haddock" $ do
dist_dir <- distDir
let haddocksDir = dist_dir </> "doc" </> "html" </> "Haddock"
cabal "configure" []
cabal "haddock" []
let docFiles
= map (haddocksDir </>)
["CPP.html", "Literate.html", "NoCPP.html", "Simple.html"]
mapM_ (assertFindInFile "For hiding needles.") docFiles
-- Test that Haddock with a newline in synopsis works correctly, #3004
, tc "HaddockNewline" $ do
cabal "configure" []
cabal "haddock" []
-- Test that Cabal properly orders GHC flags passed to GHC (when
-- there are multiple ghc-options fields.)
, tc "OrderFlags" $ cabal_build []
-- Test that reexported modules build correctly
-- TODO: should also test that they import OK!
, tc "ReexportedModules" $ do
whenGhcVersion (>= Version [7,9] []) $ cabal_build []
-- Test that Cabal computes different IPIDs when the source changes.
, tc "UniqueIPID" . withPackageDb $ do
withPackage "P1" $ cabal "configure" []
withPackage "P2" $ cabal "configure" []
withPackage "P1" $ cabal "build" []
withPackage "P1" $ cabal "build" [] -- rebuild should work
r1 <- withPackage "P1" $ cabal' "register" ["--print-ipid", "--inplace"]
withPackage "P2" $ cabal "build" []
r2 <- withPackage "P2" $ cabal' "register" ["--print-ipid", "--inplace"]
let exIPID s = takeWhile (/= '\n') $
head . filter (isPrefixOf $ "UniqueIPID-0.1-") $ (tails s)
when ((exIPID $ resultOutput r1) == (exIPID $ resultOutput r2)) $
assertFailure $ "cabal has not calculated different Installed " ++
"package ID when source is changed."
, tc "DuplicateModuleName" $ do
cabal_build ["--enable-tests"]
r1 <- shouldFail $ cabal' "test" ["foo"]
assertOutputContains "test B" r1
assertOutputContains "test A" r1
r2 <- shouldFail $ cabal' "test" ["foo2"]
assertOutputContains "test C" r2
assertOutputContains "test A" r2
, tc "TestNameCollision" $ do
withPackageDb $ do
withPackage "parent" $ cabal_install []
withPackage "child" $ do
cabal_build ["--enable-tests"]
cabal "test" []
-- Test that Cabal can choose flags to disable building a component when that
-- component's dependencies are unavailable. The build should succeed without
-- requiring the component's dependencies or imports.
, tc "BuildableField" $ do
r <- cabal' "configure" ["-v"]
assertOutputContains "Flags chosen: build-exe=False" r
cabal "build" []
]
where
-- Shared test function for BuildDeps/InternalLibrary* tests.
internal_lib_test expect = withPackageDb $ do
withPackage "to-install" $ cabal_install []
cabal_build []
r <- runExe' "lemon" []
assertEqual
("executable should have linked with the " ++ expect ++ " library")
("foofoomyLibFunc " ++ expect)
(concat $ lines (resultOutput r))
tc :: FilePath -> TestM a -> TestTree
tc name m
= testCase name (runTestM config name Nothing m)
| lukexi/cabal | Cabal/tests/PackageTests/Tests.hs | bsd-3-clause | 9,655 | 21 | 19 | 2,245 | 1,571 | 813 | 758 | 134 | 1 |
-- |
-- Support for source code annotation feature of GHC. That is the ANN pragma.
--
-- (c) The University of Glasgow 2006
-- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
--
module Annotations (
-- * Main Annotation data types
Annotation(..),
AnnTarget(..), CoreAnnTarget,
getAnnTargetName_maybe,
-- * AnnEnv for collecting and querying Annotations
AnnEnv,
mkAnnEnv, extendAnnEnvList, plusAnnEnv, emptyAnnEnv, findAnns,
deserializeAnns
) where
import Module ( Module )
import Name
import Outputable
import Serialized
import UniqFM
import Unique
import Data.Maybe
import Data.Typeable
import Data.Word ( Word8 )
-- | Represents an annotation after it has been sufficiently desugared from
-- it's initial form of 'HsDecls.AnnDecl'
data Annotation = Annotation {
ann_target :: CoreAnnTarget, -- ^ The target of the annotation
ann_value :: Serialized -- ^ 'Serialized' version of the annotation that
-- allows recovery of its value or can
-- be persisted to an interface file
}
-- | An annotation target
data AnnTarget name
= NamedTarget name -- ^ We are annotating something with a name:
-- a type or identifier
| ModuleTarget Module -- ^ We are annotating a particular module
-- | The kind of annotation target found in the middle end of the compiler
type CoreAnnTarget = AnnTarget Name
instance Functor AnnTarget where
fmap f (NamedTarget nm) = NamedTarget (f nm)
fmap _ (ModuleTarget mod) = ModuleTarget mod
-- | Get the 'name' of an annotation target if it exists.
getAnnTargetName_maybe :: AnnTarget name -> Maybe name
getAnnTargetName_maybe (NamedTarget nm) = Just nm
getAnnTargetName_maybe _ = Nothing
instance Uniquable name => Uniquable (AnnTarget name) where
getUnique (NamedTarget nm) = getUnique nm
getUnique (ModuleTarget mod) = deriveUnique (getUnique mod) 0
-- deriveUnique prevents OccName uniques clashing with NamedTarget
instance Outputable name => Outputable (AnnTarget name) where
ppr (NamedTarget nm) = text "Named target" <+> ppr nm
ppr (ModuleTarget mod) = text "Module target" <+> ppr mod
instance Outputable Annotation where
ppr ann = ppr (ann_target ann)
-- | A collection of annotations
-- Can't use a type synonym or we hit bug #2412 due to source import
newtype AnnEnv = MkAnnEnv (UniqFM [Serialized])
-- | An empty annotation environment.
emptyAnnEnv :: AnnEnv
emptyAnnEnv = MkAnnEnv emptyUFM
-- | Construct a new annotation environment that contains the list of
-- annotations provided.
mkAnnEnv :: [Annotation] -> AnnEnv
mkAnnEnv = extendAnnEnvList emptyAnnEnv
-- | Add the given annotation to the environment.
extendAnnEnvList :: AnnEnv -> [Annotation] -> AnnEnv
extendAnnEnvList (MkAnnEnv env) anns
= MkAnnEnv $ addListToUFM_C (++) env $
map (\ann -> (getUnique (ann_target ann), [ann_value ann])) anns
-- | Union two annotation environments.
plusAnnEnv :: AnnEnv -> AnnEnv -> AnnEnv
plusAnnEnv (MkAnnEnv env1) (MkAnnEnv env2) = MkAnnEnv $ plusUFM_C (++) env1 env2
-- | Find the annotations attached to the given target as 'Typeable'
-- values of your choice. If no deserializer is specified,
-- only transient annotations will be returned.
findAnns :: Typeable a => ([Word8] -> a) -> AnnEnv -> CoreAnnTarget -> [a]
findAnns deserialize (MkAnnEnv ann_env)
= (mapMaybe (fromSerialized deserialize))
. (lookupWithDefaultUFM ann_env [])
-- | Deserialize all annotations of a given type. This happens lazily, that is
-- no deserialization will take place until the [a] is actually demanded and
-- the [a] can also be empty (the UniqFM is not filtered).
deserializeAnns :: Typeable a => ([Word8] -> a) -> AnnEnv -> UniqFM [a]
deserializeAnns deserialize (MkAnnEnv ann_env)
= mapUFM (mapMaybe (fromSerialized deserialize)) ann_env
| nomeata/ghc | compiler/main/Annotations.hs | bsd-3-clause | 4,023 | 0 | 12 | 913 | 745 | 410 | 335 | 55 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS -fno-warn-orphans #-}
module PropGenerators
( arbitraryPropWithVarsAndSize
, arbitraryPropWithVars
, boundShrinkProp
) where
import PropositionalPrelude
import Prop
import Test.QuickCheck (Arbitrary, arbitrary, shrink, Gen, oneof, elements,
choose, sized)
-- | Generate an arbitrary proposition.
arbitraryPropWithVarsAndSize
:: forall v.
[v] -- ^ the possible values to use for variables
-> Int -- ^ some natural number for the size of the generated term
-> Gen (Prop v)
arbitraryPropWithVarsAndSize [] = const $ elements [PFalse, PTrue]
arbitraryPropWithVarsAndSize vs = go
where
go :: Int -> Gen (Prop v)
go maxNodes
| maxNodes <= 1 = oneof terms
| maxNodes <= 2 = oneof (terms ++ [ PNot <$> go (maxNodes - 1)])
| maxNodes <= 3 = oneof (terms ++ [ PNot <$> go (maxNodes - 1)
, binary PXor, binary PXnor ])
| otherwise = oneof (terms ++ [ PNot <$> go (maxNodes - 1)
, binary PXor, binary PXnor
, binary PAnd, binary PNand
, binary POr, binary PNor
, ternary PIte ] )
where
terms :: [Gen (Prop v)]
terms = map return (PFalse : PTrue : map PVar vs)
binary cons = cons <$> go (maxNodes `div` 2) <*> go (maxNodes `div` 2)
ternary cons = cons <$> go (maxNodes `div` 3) <*> go (maxNodes `div` 3) <*> go (maxNodes `div` 3)
arbitraryPropWithVars :: [v] -> Gen (Prop v)
arbitraryPropWithVars = sized . arbitraryPropWithVarsAndSize
boundShrinkProp :: (Ord v, Arbitrary v) => Int -> Prop v -> [Prop v]
boundShrinkProp bound prop
| bound <= 0 = []
| otherwise =
case prop of
PFalse -> []
PTrue -> []
PVar i -> [ PVar i' | i' <- shrink i ] ++ [ PFalse, PTrue ]
PNot p -> p : boundShrinkProp bound' p ++
[ PNot p' | p' <- boundShrinkProp bound' p ] ++
[ PFalse, PTrue ]
PAnd p1 p2 -> bin PAnd p1 p2
PNand p1 p2 -> bin PNand p1 p2
POr p1 p2 -> bin POr p1 p2
PNor p1 p2 -> bin PNor p1 p2
PXor p1 p2 -> bin PXor p1 p2
PXnor p1 p2 -> bin PXnor p1 p2
PIte p1 p2 p3 -> PFalse : PTrue : p1 : p2 : p3 :
[ PIte p1' p2' p3' | p1' <- boundShrinkProp bound' p1
, p2' <- boundShrinkProp bound' p2
, p3' <- boundShrinkProp bound' p3
]
where
bound' = pred bound
bin op p1 p2 = [ op p1' p2' | p1' <- boundShrinkProp bound' p1
, p2' <- boundShrinkProp bound' p2 ] ++
[ op p1' p2 | p1' <- boundShrinkProp bound' p1 ] ++
[ op p1 p2' | p2' <- boundShrinkProp bound' p2 ] ++
[ p1, p2, PFalse, PTrue ]
instance Arbitrary (Prop Int) where
--arbitrary = do
-- nVars <- choose (0, 10)
-- mVar <- choose (nVars, 100)
-- indexes <- vectorOf nVars (choose (0, mVar))
-- arbitraryPropWithVars indexes
arbitrary = do
mVar <- choose (0, 14)
arbitraryPropWithVars [0..mVar]
shrink prop = do
let newVars = zip (vars prop) [0..]
p <- [fmap (\v -> fromJust (lookup v newVars)) prop, prop]
boundShrinkProp 3 p
| bradlarsen/hs-cudd | test/PropGenerators.hs | bsd-3-clause | 3,517 | 0 | 15 | 1,293 | 1,115 | 567 | 548 | 70 | 11 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Numeral.KO.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Numeral.KO.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "KO Tests"
[ makeCorpusTest [Seal Numeral] corpus
]
| facebookincubator/duckling | tests/Duckling/Numeral/KO/Tests.hs | bsd-3-clause | 504 | 0 | 9 | 78 | 79 | 50 | 29 | 11 | 1 |
#!/usr/local/bin/runhaskell
{-# LANGUAGE DeriveDataTypeable #-}
import Text.Hastache
import Text.Hastache.Context
import qualified Data.Text.Lazy.IO as TL
import Data.Data
import Data.Generics
main = hastacheStr defaultConfig (encodeStr template) context >>= TL.putStrLn
-- begin example
data Book = Book {
title :: String,
publicationYear :: Integer
} deriving (Data, Typeable)
data Life = Life {
born :: Integer,
died :: Integer
} deriving (Data, Typeable)
data Writer = Writer {
name :: String,
life :: Life,
books :: [Book]
} deriving (Data, Typeable)
template = concat [
"Name: {{name}} ({{life.born}} - {{life.died}})\n",
"{{#life}}\n",
"Born: {{born}}\n",
"Died: {{died}}\n",
"{{/life}}\n",
"Bibliography:\n",
"{{#books}}\n",
" {{title}} ({{publicationYear}})\n",
"{{/books}}\n"
]
context = mkGenericContext Writer {
name = "Mikhail Bulgakov",
life = Life 1891 1940,
books = [
Book "Heart of a Dog" 1987,
Book "Notes of a country doctor" 1926,
Book "The Master and Margarita" 1967]
}
| lymar/hastache | examples/genericsBig.hs | bsd-3-clause | 1,235 | 12 | 9 | 381 | 240 | 140 | 100 | 37 | 1 |
module Main where
import Scraper
import Formatter
import System.Environment (getArgs)
import Data.Maybe
import Text.HTML.Scalpel
import Control.Monad (when)
import Data.List
import System.Console.ParseArgs
run :: String -> String -> IO()
run outputpath uri = do
putStrLn "Scraping..."
maybeitems <- scrapeURL uri items
items <- return $ fromMaybe [] maybeitems
res <- return . name2doc $ items
writeFile outputpath res
putStrLn $ "Done. Output is in " ++ outputpath
options = [Arg "outfile" (Just 'o') (Just "out") (argDataDefaulted "output file path" ArgtypeString "./out.txt") "output file.",
Arg "url" Nothing Nothing (argDataRequired "url" ArgtypeString) "URL to parse"]
main :: IO()
main = do
a <- parseArgsIO ArgsComplete options
let url = fromJust $ getArg a "url"
let outfile = fromJust $ getArg a "outfile"
run outfile url
| tsukimizake/haddock2anki | app/Main.hs | bsd-3-clause | 891 | 1 | 10 | 181 | 279 | 141 | 138 | 25 | 1 |
{-# OPTIONS_GHC -Wall #-}
module Main where
import Data.Proxy
import OfflinePlay
import qualified Punter.Connector as Connector
main :: IO ()
main = runPunterOffline (Proxy :: Proxy Connector.Punter)
| nobsun/icfpc2017 | hs/app/connector.hs | bsd-3-clause | 202 | 0 | 8 | 29 | 50 | 30 | 20 | 7 | 1 |
import TestDatas (int1, stringHello)
import Test.QuickCheck.Simple (Test, boolTest, qcTest, defaultMain)
main :: IO ()
main = defaultMain tests
prop_int1 :: Bool
prop_int1 = int1 == 1
prop_stringHello :: Bool
prop_stringHello = stringHello == "Hello 2017-01-02 12:34:56"
prop_show_read :: Int -> Bool
prop_show_read i = read (show i) == (i :: Int)
tests :: [Test]
tests = [ boolTest "int1" prop_int1
, boolTest "stringHello" prop_stringHello
, qcTest "show read" prop_show_read
]
| khibino/travis-ci-haskell | pkg-a/tests/useQuickCheckSimple.hs | bsd-3-clause | 531 | 0 | 8 | 121 | 151 | 84 | 67 | 14 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TupleSections #-}
{-|
HAProxy proxying protocol support (see
<http://haproxy.1wt.eu/download/1.5/doc/proxy-protocol.txt>) for applications
using io-streams. The proxy protocol allows information about a networked peer
(like remote address and port) to be propagated through a forwarding proxy that
is configured to speak this protocol.
This approach is safer than other alternatives like injecting a special HTTP
header (like "X-Forwarded-For") because the data is sent out of band, requests
without the proxy header fail, and proxy data cannot be spoofed by the client.
-}
module System.IO.Streams.Network.HAProxy
(
-- * Proxying requests.
behindHAProxy
, behindHAProxyWithLocalInfo
, decodeHAProxyHeaders
-- * Information about proxied requests.
, ProxyInfo
, socketToProxyInfo
, makeProxyInfo
, getSourceAddr
, getDestAddr
, getFamily
, getSocketType
) where
------------------------------------------------------------------------------
import Control.Applicative ((<|>))
import Control.Monad (void, when)
import Data.Attoparsec.ByteString (anyWord8)
import Data.Attoparsec.ByteString.Char8 (Parser, char, decimal, skipWhile, string, take, takeWhile1)
import Data.Bits (unsafeShiftR, (.&.))
import qualified Data.ByteString as S8
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Unsafe as S
import Data.Word (Word16, Word32, Word8)
import Foreign.C.Types (CUInt (..), CUShort (..))
import Foreign.Ptr (castPtr)
import Foreign.Storable (peek)
import qualified Network.Socket as N
import Prelude hiding (take)
import System.IO.Streams (InputStream, OutputStream)
import qualified System.IO.Streams as Streams
import qualified System.IO.Streams.Attoparsec as Streams
import System.IO.Streams.Network.Internal.Address (getSockAddr)
import System.IO.Unsafe (unsafePerformIO)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- | Make a 'ProxyInfo' from a connected socket.
socketToProxyInfo :: N.Socket -> N.SockAddr -> IO ProxyInfo
socketToProxyInfo s sa = do
da <- N.getSocketName s
let (N.MkSocket _ _ !sty _ _) = s
return $! makeProxyInfo sa da (addrFamily sa) sty
------------------------------------------------------------------------------
-- | Parses the proxy headers emitted by HAProxy and runs a user action with
-- the origin/destination socket addresses provided by HAProxy. Will throw a
-- 'Sockets.ParseException' if the protocol header cannot be parsed properly.
--
-- We support version 1.5 of the protocol (both the "old" text protocol and the
-- "new" binary protocol.). Typed data fields after the addresses are not (yet)
-- supported.
--
behindHAProxy :: N.Socket -- ^ A socket you've just accepted
-> N.SockAddr -- ^ and its peer address
-> (ProxyInfo
-> InputStream ByteString
-> OutputStream ByteString
-> IO a)
-> IO a
behindHAProxy socket sa m = do
pinfo <- socketToProxyInfo socket sa
sockets <- Streams.socketToStreams socket
behindHAProxyWithLocalInfo pinfo sockets m
------------------------------------------------------------------------------
-- | Like 'behindHAProxy', but allows the socket addresses and input/output
-- streams to be passed in instead of created based on an input 'Socket'.
-- Useful for unit tests.
--
behindHAProxyWithLocalInfo
:: ProxyInfo -- ^ local socket info
-> (InputStream ByteString, OutputStream ByteString) -- ^ socket streams
-> (ProxyInfo
-> InputStream ByteString
-> OutputStream ByteString
-> IO a) -- ^ user function
-> IO a
behindHAProxyWithLocalInfo localProxyInfo (is, os) m = do
proxyInfo <- decodeHAProxyHeaders localProxyInfo is
m proxyInfo is os
------------------------------------------------------------------------------
decodeHAProxyHeaders :: ProxyInfo -> (InputStream ByteString) -> IO ProxyInfo
decodeHAProxyHeaders localProxyInfo is0 = do
-- 536 bytes as per spec
is <- Streams.throwIfProducesMoreThan 536 is0
(!isOld, !mbOldInfo) <- Streams.parseFromStream
(((True,) <$> parseOldHaProxy)
<|> return (False, Nothing)) is
if isOld
then maybe (return localProxyInfo)
(\(srcAddr, srcPort, destAddr, destPort, f) -> do
(_, s) <- getSockAddr srcPort srcAddr
(_, d) <- getSockAddr destPort destAddr
return $! makeProxyInfo s d f $ getSocketType localProxyInfo)
mbOldInfo
else Streams.parseFromStream (parseNewHaProxy localProxyInfo) is
------------------------------------------------------------------------------
-- | Stores information about the proxied request.
data ProxyInfo = ProxyInfo {
_sourceAddr :: N.SockAddr
, _destAddr :: N.SockAddr
, _family :: N.Family
, _sockType :: N.SocketType
} deriving (Show)
------------------------------------------------------------------------------
-- | Gets the 'N.Family' of the proxied request (i.e. IPv4/IPv6/Unix domain
-- sockets).
getFamily :: ProxyInfo -> N.Family
getFamily p = _family p
------------------------------------------------------------------------------
-- | Gets the 'N.SocketType' of the proxied request (UDP/TCP).
getSocketType :: ProxyInfo -> N.SocketType
getSocketType p = _sockType p
------------------------------------------------------------------------------
-- | Gets the network address of the source node for this request (i.e. the
-- client).
getSourceAddr :: ProxyInfo -> N.SockAddr
getSourceAddr p = _sourceAddr p
------------------------------------------------------------------------------
-- | Gets the network address of the destination node for this request (i.e. the
-- client).
getDestAddr :: ProxyInfo -> N.SockAddr
getDestAddr p = _destAddr p
------------------------------------------------------------------------------
-- | Makes a 'ProxyInfo' object.
makeProxyInfo :: N.SockAddr -- ^ the source address
-> N.SockAddr -- ^ the destination address
-> N.Family -- ^ the socket family
-> N.SocketType -- ^ the socket type
-> ProxyInfo
makeProxyInfo srcAddr destAddr f st = ProxyInfo srcAddr destAddr f st
------------------------------------------------------------------------------
parseFamily :: Parser (Maybe N.Family)
parseFamily = (string "TCP4" >> return (Just N.AF_INET))
<|> (string "TCP6" >> return (Just N.AF_INET6))
<|> (string "UNKNOWN" >> return Nothing)
------------------------------------------------------------------------------
parseOldHaProxy :: Parser (Maybe (ByteString, Int, ByteString, Int, N.Family))
parseOldHaProxy = do
string "PROXY "
gotFamily <- parseFamily
case gotFamily of
Nothing -> skipWhile (/= '\r') >> string "\r\n" >> return Nothing
(Just f) -> do
char ' '
srcAddress <- takeWhile1 (/= ' ')
char ' '
destAddress <- takeWhile1 (/= ' ')
char ' '
srcPort <- decimal
char ' '
destPort <- decimal
string "\r\n"
return $! Just $! (srcAddress, srcPort, destAddress, destPort, f)
------------------------------------------------------------------------------
protocolHeader :: ByteString
protocolHeader = S8.pack [ 0x0D, 0x0A, 0x0D, 0x0A, 0x00, 0x0D
, 0x0A, 0x51, 0x55, 0x49, 0x54, 0x0A ]
{-# NOINLINE protocolHeader #-}
------------------------------------------------------------------------------
parseNewHaProxy :: ProxyInfo -> Parser ProxyInfo
parseNewHaProxy localProxyInfo = do
string protocolHeader
versionAndCommand <- anyWord8
let version = (versionAndCommand .&. 0xF0) `unsafeShiftR` 4
let command = (versionAndCommand .&. 0xF) :: Word8
when (version /= 0x2) $ fail $ "Invalid protocol version: " ++ show version
when (command > 1) $ fail $ "Invalid command: " ++ show command
protocolAndFamily <- anyWord8
let family = (protocolAndFamily .&. 0xF0) `unsafeShiftR` 4
let protocol = (protocolAndFamily .&. 0xF) :: Word8
-- VALUES FOR FAMILY
-- 0x0 : AF_UNSPEC : the connection is forwarded for an unknown,
-- unspecified or unsupported protocol. The sender should use this family
-- when sending LOCAL commands or when dealing with unsupported protocol
-- families. The receiver is free to accept the connection anyway and use
-- the real endpoint addresses or to reject it. The receiver should ignore
-- address information.
-- 0x1 : AF_INET : the forwarded connection uses the AF_INET address family
-- (IPv4). The addresses are exactly 4 bytes each in network byte order,
-- followed by transport protocol information (typically ports).
-- 0x2 : AF_INET6 : the forwarded connection uses the AF_INET6 address
-- family (IPv6). The addresses are exactly 16 bytes each in network byte
-- order, followed by transport protocol information (typically ports).
--
-- 0x3 : AF_UNIX : the forwarded connection uses the AF_UNIX address family
-- (UNIX). The addresses are exactly 108 bytes each.
socketType <- toSocketType protocol
addressLen <- ntohs <$> snarf16
case () of
!_ | command == 0x0 || family == 0x0 || protocol == 0x0 -- LOCAL
-> handleLocal addressLen
| family == 0x1 -> handleIPv4 addressLen socketType
| family == 0x2 -> handleIPv6 addressLen socketType
#ifndef WINDOWS
| family == 0x3 -> handleUnix addressLen socketType
#endif
| otherwise -> fail $ "Bad family " ++ show family
where
toSocketType 0 = return $! N.Stream
toSocketType 1 = return $! N.Stream
toSocketType 2 = return $! N.Datagram
toSocketType _ = fail "bad protocol"
handleLocal addressLen = do
-- skip N bytes and return the original addresses
when (addressLen > 500) $ fail $ "suspiciously long address "
++ show addressLen
void $ take (fromIntegral addressLen)
return localProxyInfo
handleIPv4 addressLen socketType = do
when (addressLen < 12) $ fail $ "bad address length "
++ show addressLen
++ " for IPv4"
let nskip = addressLen - 12
srcAddr <- snarf32
destAddr <- snarf32
srcPort <- ntohs <$> snarf16
destPort <- ntohs <$> snarf16
void $ take $ fromIntegral nskip
-- Note: we actually want the brain-dead constructors here
let sa = N.SockAddrInet (fromIntegral srcPort) srcAddr
let sb = N.SockAddrInet (fromIntegral destPort) destAddr
return $! makeProxyInfo sa sb (addrFamily sa) socketType
handleIPv6 addressLen socketType = do
let scopeId = 0 -- means "reserved", kludge alert!
let flow = 0
when (addressLen < 36) $ fail $ "bad address length "
++ show addressLen
++ " for IPv6"
let nskip = addressLen - 36
s1 <- ntohl <$> snarf32
s2 <- ntohl <$> snarf32
s3 <- ntohl <$> snarf32
s4 <- ntohl <$> snarf32
d1 <- ntohl <$> snarf32
d2 <- ntohl <$> snarf32
d3 <- ntohl <$> snarf32
d4 <- ntohl <$> snarf32
sp <- ntohs <$> snarf16
dp <- ntohs <$> snarf16
void $ take $ fromIntegral nskip
let sa = N.SockAddrInet6 (fromIntegral sp) flow (s1, s2, s3, s4) scopeId
let sb = N.SockAddrInet6 (fromIntegral dp) flow (d1, d2, d3, d4) scopeId
return $! makeProxyInfo sa sb (addrFamily sa) socketType
#ifndef WINDOWS
handleUnix addressLen socketType = do
when (addressLen < 216) $ fail $ "bad address length "
++ show addressLen
++ " for unix"
addr1 <- take 108
addr2 <- take 108
void $ take $ fromIntegral $ addressLen - 216
let sa = N.SockAddrUnix (toUnixPath addr1)
let sb = N.SockAddrUnix (toUnixPath addr2)
return $! makeProxyInfo sa sb (addrFamily sa) socketType
toUnixPath = S.unpack . fst . S.break (=='\x00')
#endif
foreign import ccall unsafe "iostreams_ntohs" c_ntohs :: CUShort -> CUShort
foreign import ccall unsafe "iostreams_ntohl" c_ntohl :: CUInt -> CUInt
ntohs :: Word16 -> Word16
ntohs = fromIntegral . c_ntohs . fromIntegral
ntohl :: Word32 -> Word32
ntohl = fromIntegral . c_ntohl . fromIntegral
snarf32 :: Parser Word32
snarf32 = do
s <- take 4
return $! unsafePerformIO $! S.unsafeUseAsCString s $ peek . castPtr
snarf16 :: Parser Word16
snarf16 = do
s <- take 2
return $! unsafePerformIO $! S.unsafeUseAsCString s $ peek . castPtr
addrFamily :: N.SockAddr -> N.Family
addrFamily s = case s of
(N.SockAddrInet _ _) -> N.AF_INET
(N.SockAddrInet6 _ _ _ _) -> N.AF_INET6
#ifndef WINDOWS
(N.SockAddrUnix _ ) -> N.AF_UNIX
#endif
_ -> error "unknown family"
| 23Skidoo/io-streams-haproxy | src/System/IO/Streams/Network/HAProxy.hs | bsd-3-clause | 14,487 | 0 | 17 | 4,118 | 2,728 | 1,429 | 1,299 | 220 | 4 |
module CNC.IntegrationTests where
import CNC.Declarative
import CNC.HCode
import Data.Complex
import Control.Monad
flake_side = do
[p1,p2,p3,p4,p5] <- declarePoints 5
xSize p1 p2 1
len p2 p3 1
len p3 p4 1
xAngle p2 p3 (pi / 3)
xAngle p3 p4 (- pi / 3)
xSize p4 p5 1
renderPolygon :: Int -> Path -> HCode ()
renderPolygon n path = render_side 0 path n
where
render_side p0 path 0 = return ()
render_side p0 path k = do
let d = deltaPath path
renderPath (posToExpr p0) path
render_side (p0 + d) (rotate (- 2*pi / fromIntegral n) path) (k-1)
test1 = do putStrLn "a snowflake"
print $ (figure flake_side)
putHCode $ renderPolygon 3 (figure flake_side)
integrationTests = do
putStrLn "Integration tests"
test1 | akamaus/gcodec | test/CNC/IntegrationTests.hs | bsd-3-clause | 777 | 0 | 15 | 194 | 328 | 158 | 170 | 26 | 2 |
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ViewPatterns #-}
module EZConfig (spec) where
import Control.Arrow (first, (>>>))
import Data.Coerce
import Foreign.C.Types (CUInt(..))
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
import XMonad
import XMonad.Prelude
import XMonad.Util.EZConfig
import XMonad.Util.Parser
spec :: Spec
spec = do
prop "prop_decodePreservation" prop_decodePreservation
prop "prop_encodePreservation" prop_encodePreservation
context "parseKey" $ do
let prepare = unzip . map (first surround)
testParseKey (ns, ks) = traverse (runParser parseKey) ns `shouldBe` Just ks
it "parses all regular keys" $ testParseKey (unzip regularKeys )
it "parses all function keys" $ testParseKey (prepare functionKeys )
it "parses all special keys" $ testParseKey (prepare specialKeys )
it "parses all multimedia keys" $ testParseKey (prepare multimediaKeys)
context "parseModifier" $ do
it "parses all combinations of modifiers" $
nub . map sort <$> traverse (runParser (many $ parseModifier def))
modifiers
`shouldBe` Just [[ shiftMask, controlMask
, mod1Mask, mod1Mask -- def M and M1
, mod2Mask, mod3Mask, mod4Mask, mod5Mask
]]
-- Checking for regressions
describe "readKeySequence" $
it "Fails on the non-existent key M-10" $
readKeySequence def "M-10" `shouldBe` Nothing
-- | Parsing preserves all info that printing does.
prop_encodePreservation :: KeyString -> Property
prop_encodePreservation (coerce -> s) = parse s === (parse . pp =<< parse s)
where parse = runParser (parseKeySequence def)
pp = unwords . map keyToString
-- | Printing preserves all info that parsing does.
prop_decodePreservation :: NonEmptyList (AKeyMask, AKeySym) -> Property
prop_decodePreservation (getNonEmpty >>> coerce -> xs) =
Just (pp xs) === (fmap pp . parse $ pp xs)
where parse = runParser (parseKeySequence def)
pp = unwords . map keyToString
-- | QuickCheck can handle the 8! combinations just fine.
modifiers :: [String]
modifiers = map concat $ permutations mods
mods :: [String]
mods = ["M-", "C-", "S-", "M1-", "M2-", "M3-", "M4-", "M5-"]
surround :: String -> String
surround s = "<" <> s <> ">"
-----------------------------------------------------------------------
-- Newtypes and Arbitrary instances
newtype AKeyMask = AKeyMask KeyMask
deriving newtype (Show)
instance Arbitrary AKeyMask where
arbitrary :: Gen AKeyMask
arbitrary = fmap (coerce . sum . nub) . listOf . elements $
[noModMask, shiftMask, controlMask, mod1Mask, mod2Mask, mod3Mask, mod4Mask, mod5Mask]
newtype AKeySym = AKeySym KeySym
deriving newtype (Show)
instance Arbitrary AKeySym where
arbitrary :: Gen AKeySym
arbitrary = elements . coerce . map snd $ regularKeys <> allSpecialKeys
newtype KeyString = KeyString String
deriving newtype (Show)
instance Arbitrary KeyString where
arbitrary :: Gen KeyString
arbitrary = coerce . unwords <$> listOf keybinding
where
keybinding :: Gen String
keybinding = do
let keyStr = map fst $ regularKeys <> allSpecialKeys
mks <- nub <$> listOf (elements ("" : mods))
k <- elements keyStr
ks <- listOf . elements $ keyStr
pure $ concat mks <> k <> " " <> unwords ks
| xmonad/xmonad-contrib | tests/EZConfig.hs | bsd-3-clause | 3,567 | 0 | 17 | 807 | 935 | 491 | 444 | 75 | 1 |
module Wow1 where
import Haskore.Melody
import Haskore.Music as M
import Haskore.Basic.Duration as D
import Haskore.Basic.Duration
import Snippet
import Prelude as P
import Haskore.Music.GeneralMIDI
import Haskore.Basic.Interval
flute_base = [
cs 1 qn, fs 1 qn, fs 1 en, e 1 en, fs 1 hn, fs 1 en, gs 1 en
]
flute_var_1 = [
a 1 qn, b 1 qn, a 1 qn, gs 1 en, fs 1 en, e 1 hn
]
flute_var_2 = [
a 1 qn, e 1 en, cs 1 en, a 1 qn, gs 1 dhn
]
flute_var_3 = [
a 1 qn, fs 1 qn, a 1 qn, b 1 dqn, a 1 en, gs 1 qn, fs 1 dwn
]
t1 = play_with PanFlute . line $ map (\x -> x ()) $
concat [
flute_base,
flute_var_1,
flute_base,
flute_var_2,
flute_base,
flute_var_1,
flute_var_3
]
wow_1 = export_to' "wow_1" 1 $ changeTempo 3 $ transpose octave $ t1
main = wow_1 | nfjinjing/haskore-guide | src/wow_1.hs | bsd-3-clause | 806 | 0 | 11 | 217 | 363 | 196 | 167 | 28 | 1 |
{-# language CPP #-}
-- | = Name
--
-- XR_MSFT_unbounded_reference_space - instance extension
--
-- = Specification
--
-- See
-- <https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#XR_MSFT_unbounded_reference_space XR_MSFT_unbounded_reference_space>
-- in the main specification for complete information.
--
-- = Registered Extension Number
--
-- 39
--
-- = Revision
--
-- 1
--
-- = Extension and Version Dependencies
--
-- - Requires OpenXR 1.0
--
-- = See Also
--
-- No cross-references are available
--
-- = Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/OpenXR/specs/1.0/html/xrspec.html#XR_MSFT_unbounded_reference_space OpenXR Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module OpenXR.Extensions.XR_MSFT_unbounded_reference_space ( MSFT_unbounded_reference_space_SPEC_VERSION
, pattern MSFT_unbounded_reference_space_SPEC_VERSION
, MSFT_UNBOUNDED_REFERENCE_SPACE_EXTENSION_NAME
, pattern MSFT_UNBOUNDED_REFERENCE_SPACE_EXTENSION_NAME
) where
import Data.String (IsString)
type MSFT_unbounded_reference_space_SPEC_VERSION = 1
-- No documentation found for TopLevel "XR_MSFT_unbounded_reference_space_SPEC_VERSION"
pattern MSFT_unbounded_reference_space_SPEC_VERSION :: forall a . Integral a => a
pattern MSFT_unbounded_reference_space_SPEC_VERSION = 1
type MSFT_UNBOUNDED_REFERENCE_SPACE_EXTENSION_NAME = "XR_MSFT_unbounded_reference_space"
-- No documentation found for TopLevel "XR_MSFT_UNBOUNDED_REFERENCE_SPACE_EXTENSION_NAME"
pattern MSFT_UNBOUNDED_REFERENCE_SPACE_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern MSFT_UNBOUNDED_REFERENCE_SPACE_EXTENSION_NAME = "XR_MSFT_unbounded_reference_space"
| expipiplus1/vulkan | openxr/src/OpenXR/Extensions/XR_MSFT_unbounded_reference_space.hs | bsd-3-clause | 2,016 | 0 | 8 | 429 | 145 | 99 | 46 | -1 | -1 |
module Libv10
( main'
) where
-- v10 : Fill in the division team mapping and other data
import Control.Monad
import qualified Data.Map.Strict as Map
import Data.List
import Data.Maybe
import Data.Ord
import System.Environment
import Text.Parsec
import Text.Parsec.String
import Text.Printf
-- data ----------------------------------------------------
-- Ranking information - team rank, team name
data Ranking = Ranking
{ rRank :: Int
, rTeam :: String
} deriving Show
-- Result information - division, average, standard deviation
data Result = Result
{ rDivision :: String
, rAverage :: Double
, rStdDev :: Double
} deriving Show
-- parsing -------------------------------------------------
-- Parse out a single ranking from the file
rankingParser :: Parser Ranking
rankingParser = do
many space
rank <- many1 digit
many space
team <- many1 alphaNum
newline
return (Ranking (read rank) team)
-- Parse out all of the rankings
rankingsParser :: Parser [Ranking]
rankingsParser = many1 rankingParser
-- Take a file and produce a list of team rankings
parseRankings :: String -> IO [Ranking]
parseRankings file =
parseFromFile rankingsParser file >>= either undefined return
-- calculating ---------------------------------------------
-- >> AFC divisions
afcEast, afcWest, afcSouth, afcNorth :: String
afcEast = "AFC East"
afcWest = "AFC West"
afcSouth = "AFC South"
afcNorth = "AFC North"
-- >> NFC divisions
nfcEast, nfcWest, nfcSouth, nfcNorth :: String
nfcEast = "NFC East"
nfcWest = "NFC West"
nfcSouth = "NFC South"
nfcNorth = "NFC North"
-- >> Map teams to their divisions
teamDivisionMap :: Map.Map String String
teamDivisionMap = Map.fromList
[ ("PATRIOTS", afcEast)
, ("JETS", afcEast)
, ("DOLPHINS", afcEast)
, ("BILLS", afcEast)
, ("BENGALS", afcNorth)
, ("BROWNS", afcNorth)
, ("STEELERS", afcNorth)
, ("RAVENS", afcNorth)
, ("BRONCOS", afcWest)
, ("RAIDERS", afcWest)
, ("CHARGERS", afcWest)
, ("CHIEFS", afcWest)
, ("JAGUARS", afcSouth)
, ("TITANS", afcSouth)
, ("TEXANS", afcSouth)
, ("COLTS", afcSouth)
, ("FALCONS", nfcSouth)
, ("PANTHERS", nfcSouth)
, ("BUCCANEERS", nfcSouth)
, ("SAINTS", nfcSouth)
, ("PACKERS", nfcNorth)
, ("VIKINGS", nfcNorth)
, ("LIONS", nfcNorth)
, ("BEARS", nfcNorth)
, ("COWBOYS", nfcEast)
, ("REDSKINS", nfcEast)
, ("GIANTS", nfcEast)
, ("EAGLES", nfcEast)
, ("CARDINALS", nfcWest)
, ("RAMS", nfcWest)
, ("49ERS", nfcWest)
, ("SEAHAWKS", nfcWest)
]
-- Lookup a team's division - assume well-formed data files :o
teamToDivision :: String -> String
teamToDivision team =
fromJust (Map.lookup team teamDivisionMap)
-- Convert a ranking to a (division, rank) tuple
rankingToDivision :: Ranking -> (String, Int)
rankingToDivision ranking =
(teamToDivision (rTeam ranking), rRank ranking)
-- Group sort - collect values with identical keys
groupSort :: Ord k => [(k, v)] -> [(k, [v])]
groupSort kvs = Map.toList (Map.fromListWith (++) [(k, [v]) | (k, v) <- kvs])
-- Turn the rankings into a (division, [rank]) tuple
rankingsToDivisions :: [Ranking] -> [(String, [Int])]
rankingsToDivisions rankings =
groupSort (map rankingToDivision rankings)
-- Average a number of rankings
average :: [Int] -> Double
average rankings = realToFrac (sum rankings) / genericLength rankings
-- Standard deviation of rankings group
stdDev :: [Int] -> Double
stdDev rankings = sqrt (sum (map f rankings) / genericLength rankings) where
f ranking = (realToFrac ranking - average rankings) ** 2
-- Produce a result from division and [rank]
calculateResult :: (String, [Int]) -> Result
calculateResult (division, rankings) =
Result division (average rankings) (stdDev rankings)
-- Take teams rankings and calculate results
calculateResults :: [Ranking] -> [Result]
calculateResults rankings =
sortBy (comparing rAverage) (map calculateResult (rankingsToDivisions rankings))
-- main ----------------------------------------------------
-- Combine the parsing and calculating
parseAndCalculate :: String -> IO [Result]
parseAndCalculate file = do
rankings <- parseRankings file
return (calculateResults rankings)
-- Collect results from all the files
parseAndCalculateAll :: [String] -> IO [(String, [Result])]
parseAndCalculateAll files =
forM files $ \file -> do
results <- parseAndCalculate file
return (file, results)
-- Print out a result
printResult :: Result -> IO ()
printResult result =
printf "%-10s %10.2f %10.2f\n" (rDivision result) (rAverage result) (rStdDev result)
-- Print out all the results
inputsToOutputs :: [(String, [Result])] -> IO ()
inputsToOutputs results =
forM_ results $ \(file, results') -> do
putStrLn file
forM_ results' printResult
putStrLn ""
-- Program Entry - pass file args as inputs
main' :: IO ()
main' = do
args <- getArgs
results <- parseAndCalculateAll args
inputsToOutputs results
| mfine/hs-talks | src/Libv10.hs | bsd-3-clause | 5,032 | 0 | 11 | 966 | 1,316 | 752 | 564 | 122 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift Compiler (0.7.0-dev) --
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE YOU KNOW WHAT YOU ARE DOING --
-----------------------------------------------------------------
module Thrift.Content_Consts where
import Prelude ( Bool(..), Enum, Double, String, Maybe(..),
Eq, Show, Ord,
return, length, IO, fromIntegral, fromEnum, toEnum,
(&&), (||), (==), (++), ($), (-) )
import Control.Exception
import Data.ByteString.Lazy
import Data.Int
import Data.Typeable ( Typeable )
import qualified Data.Map as Map
import qualified Data.Set as Set
import Thrift
import Thrift.Content_Types
| csabahruska/GFXDemo | Thrift/Content_Consts.hs | bsd-3-clause | 1,066 | 0 | 6 | 240 | 152 | 109 | 43 | 19 | 0 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE Safe #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE PatternGuards #-}
module Cryptol.TypeCheck.Solver.CrySAT
( withScope, withSolver
, assumeProps, simplifyProps, getModel
, check
, Solver, logger
, DefinedProp(..)
, debugBlock
, DebugLog(..)
, prepareConstraints
) where
import qualified Cryptol.TypeCheck.AST as Cry
import Cryptol.TypeCheck.InferTypes(Goal(..), SolverConfig(..))
import qualified Cryptol.TypeCheck.Subst as Cry
import Cryptol.TypeCheck.Solver.Numeric.AST
import Cryptol.TypeCheck.Solver.Numeric.ImportExport
import Cryptol.TypeCheck.Solver.Numeric.Defined
import Cryptol.TypeCheck.Solver.Numeric.Simplify
import Cryptol.TypeCheck.Solver.Numeric.SimplifyExpr(crySimpExpr)
import Cryptol.TypeCheck.Solver.Numeric.NonLin
import Cryptol.TypeCheck.Solver.Numeric.SMT
import Cryptol.Utils.PP -- ( Doc )
import Cryptol.Utils.Panic ( panic )
import MonadLib
import Data.Maybe ( mapMaybe, fromMaybe )
import Data.Either ( partitionEithers )
import Data.List(nub)
import qualified Data.Map as Map
import Data.Foldable ( any, all )
import qualified Data.Set as Set
import Data.IORef ( IORef, newIORef, readIORef, modifyIORef',
atomicModifyIORef' )
import Prelude hiding (any,all)
import qualified SimpleSMT as SMT
-- | We use this to remember what we simplified
newtype SimpProp = SimpProp { unSimpProp :: Prop }
simpProp :: Prop -> SimpProp
simpProp p = SimpProp (crySimplify p)
-- | 'dpSimpProp' and 'dpSimpExprProp' should be logically equivalent,
-- to each other, and to whatever 'a' represents (usually 'a' is a 'Goal').
data DefinedProp a = DefinedProp
{ dpData :: a
-- ^ Optional data to associate with prop.
-- Often, the original `Goal` from which the prop was extracted.
, dpSimpProp :: SimpProp
{- ^ Fully simplified: may mention ORs, and named non-linear terms.
These are what we send to the prover, and we don't attempt to
convert them back into Cryptol types. -}
, dpSimpExprProp :: Prop
{- ^ A version of the proposition where just the expression terms
have been simplified. These should not contain ORs or named non-linear
terms because we want to import them back into Crytpol types. -}
}
instance HasVars SimpProp where
apSubst su (SimpProp p) = do p1 <- apSubst su p
let p2 = crySimplify p1
guard (p1 /= p2)
return (SimpProp p2)
apSubstDefinedProp :: (Prop -> a -> a) ->
Subst -> DefinedProp a -> Maybe (DefinedProp a)
apSubstDefinedProp updCt su DefinedProp { .. } =
do s1 <- apSubst su dpSimpProp
return $ case apSubst su dpSimpExprProp of
Nothing -> DefinedProp { dpSimpProp = s1, .. }
Just p1 -> DefinedProp { dpSimpProp = s1
, dpSimpExprProp = p1
, dpData = updCt p1 dpData
}
{- | Check if the given constraint is guaranteed to be well-defined.
This means that it cannot be instantiated in a way that would result in
undefined behaviour.
This estimate is consevative:
* if we return `Right`, then the property is definately well-defined
* if we return `Left`, then we don't know if the property is well-defined
If the property is well-defined, then we also simplify it.
-}
checkDefined1 :: Solver -> (Prop -> a -> a) ->
(a,Prop) -> IO (Either (a,Prop) (DefinedProp a))
checkDefined1 s updCt (ct,p) =
do proved <- prove s defCt
return $
if proved
then Right $
case crySimpPropExprMaybe p of
Nothing -> DefinedProp { dpData = ct
, dpSimpExprProp = p
, dpSimpProp = simpProp p
}
Just p' -> DefinedProp { dpData = updCt p' ct
, dpSimpExprProp = p'
, dpSimpProp = simpProp p'
}
else Left (ct,p)
where
SimpProp defCt = simpProp (cryDefinedProp p)
prepareConstraints ::
Solver -> (Prop -> a -> a) ->
[(a,Prop)] -> IO (Either [a] ([a], [DefinedProp a], Subst, [Prop]))
prepareConstraints s updCt cs =
do res <- mapM (checkDefined1 s updCt) cs
let (unknown,ok) = partitionEithers res
goStep1 unknown ok Map.empty []
where
getImps ok = withScope s $
do mapM_ (assert s . dpSimpProp) ok
check s
mapEither f = partitionEithers . map f
apSuUnk su (x,p) =
case apSubst su p of
Nothing -> Left (x,p)
Just p1 -> Right (updCt p1 x, p1)
apSuOk su p = case apSubstDefinedProp updCt su p of
Nothing -> Left p
Just p1 -> Right p1
apSubst' su x = fromMaybe x (apSubst su x)
goStep1 unknown ok su sgs =
do let (ok1, moreSu) = improveByDefnMany updCt ok
go unknown ok1 (composeSubst moreSu su) (map (apSubst' moreSu) sgs)
go unknown ok su sgs =
do mb <- getImps ok
case mb of
Nothing ->
do bad <- minimizeContradictionSimpDef s ok
return (Left bad)
Just (imps,subGoals)
| not (null okNew) -> goStep1 unknown (okNew ++ okOld) newSu newGs
| otherwise ->
do res <- mapM (checkDefined1 s updCt) unkNew
let (stillUnk,nowOk) = partitionEithers res
if null nowOk
then return (Right ( map fst (unkNew ++ unkOld)
, ok, newSu, newGs))
else goStep1 (stillUnk ++ unkOld) (nowOk ++ ok) newSu newGs
where (okOld, okNew) = mapEither (apSuOk imps) ok
(unkOld,unkNew) = mapEither (apSuUnk imps) unknown
newSu = composeSubst imps su
newGs = nub (subGoals ++ map (apSubst' su) sgs)
-- XXX: inefficient
-- | Simplify a bunch of well-defined properties.
-- * Eliminates properties that are implied by the rest.
-- * Does not modify the set of assumptions.
simplifyProps :: Solver -> [DefinedProp a] -> IO [a]
simplifyProps s props =
debugBlock s "Simplifying properties" $
withScope s (go [] props)
where
go survived [] = return survived
go survived (DefinedProp { dpSimpProp = SimpProp PTrue } : more) =
go survived more
go survived (p : more) =
case dpSimpProp p of
SimpProp PTrue -> go survived more
SimpProp p' ->
do proved <- withScope s $ do mapM_ (assert s . dpSimpProp) more
prove s p'
if proved
then go survived more
else do assert s (SimpProp p')
go (dpData p : survived) more
-- | Add the given constraints as assumptions.
-- * We assume that the constraints are well-defined.
-- * Modifies the set of assumptions.
assumeProps :: Solver -> [Cry.Prop] -> IO [SimpProp]
assumeProps s props =
do let ps = mapMaybe exportProp props
let simpProps = map simpProp (map cryDefinedProp ps ++ ps)
mapM_ (assert s) simpProps
return simpProps
-- XXX: Instead of asserting one at a time, perhaps we should
-- assert a conjunction. That way, we could simplify the whole thing
-- in one go, and would avoid having to assert 'true' many times.
-- | Given a list of propositions that together lead to a contradiction,
-- find a sub-set that still leads to a contradiction (but is smaller).
minimizeContradictionSimpDef :: Solver -> [DefinedProp a] -> IO [a]
minimizeContradictionSimpDef s ps = start [] ps
where
start bad todo =
do res <- SMT.check (solver s)
case res of
SMT.Unsat -> return (map dpData bad)
_ -> do solPush s
go bad [] todo
go _ _ [] = panic "minimizeContradiction"
$ ("No contradiction" : map (show . ppProp . dpSimpExprProp) ps)
go bad prev (d : more) =
do assert s (dpSimpProp d)
res <- SMT.check (solver s)
case res of
SMT.Unsat -> do solPop s
assert s (dpSimpProp d)
start (d : bad) prev
_ -> go bad (d : prev) more
improveByDefnMany :: (Prop -> a -> a) ->
[DefinedProp a] -> ([DefinedProp a], Subst)
improveByDefnMany updCt = go [] Map.empty
where
mbSu su x = case apSubstDefinedProp updCt su x of
Nothing -> Left x
Just y -> Right y
go todo su (p : ps) =
let p1 = fromMaybe p (apSubstDefinedProp updCt su p)
in case improveByDefn p1 of
Just (x,e) ->
go todo (composeSubst (Map.singleton x e) su) ps
-- `p` is solved, so ignore
Nothing -> go (p1 : todo) su ps
go todo su [] =
let (same,changed) = partitionEithers (map (mbSu su) todo)
in case changed of
[] -> (same, fmap crySimpExpr su)
_ -> go same su changed
{- | If we see an equation: `?x = e`, and:
* ?x is a unification variable
* `e` is "zonked" (substitution is fully applied)
* ?x does not appear in `e`.
then, we can improve `?x` to `e`.
-}
improveByDefn :: DefinedProp a -> Maybe (Name,Expr)
improveByDefn p =
case dpSimpExprProp p of
Var x :== e
| isUV x -> tryToBind x e
e :== Var x
| isUV x -> tryToBind x e
_ -> Nothing
where
tryToBind x e
| x `Set.member` cryExprFVS e = Nothing
| otherwise = Just (x,e)
isUV (UserName (Cry.TVFree {})) = True
isUV _ = False
{- | Attempt to find a substituion that, when applied, makes all of the
given properties hold. -}
getModel :: Solver -> [Cry.Prop] -> IO (Maybe Cry.Subst)
getModel s props = withScope s $
do ps <- assumeProps s props
res <- SMT.check (solver s)
let vars = Set.toList $ Set.unions $ map (cryPropFVS . unSimpProp) ps
case res of
SMT.Sat ->
do vs <- getVals (solver s) vars
-- This is guaranteed to be a model only for the *linear*
-- properties, so now we check if it works for the rest too.
let su1 = fmap K vs
ps1 = [ fromMaybe p (apSubst su1 p) | SimpProp p <- ps ]
ok p = case crySimplify p of
PTrue -> True
_ -> False
su2 = Cry.listSubst
[ (x, numTy v) | (UserName x, v) <- Map.toList vs ]
return (guard (all ok ps1) >> return su2)
_ -> return Nothing
where
numTy Inf = Cry.tInf
numTy (Nat k) = Cry.tNum k
--------------------------------------------------------------------------------
-- | An SMT solver, and some info about declared variables.
data Solver = Solver
{ solver :: SMT.Solver
-- ^ The actual solver
, declared :: IORef VarInfo
-- ^ Information about declared variables, and assumptions in scope.
, logger :: SMT.Logger
-- ^ For debugging
}
-- | Keeps track of declared variables and non-linear terms.
data VarInfo = VarInfo
{ curScope :: Scope
, otherScopes :: [Scope]
} deriving Show
data Scope = Scope
{ scopeNames :: [Name]
-- ^ Variables declared in this scope (not counting the ones from
-- previous scopes).
, scopeNonLinS :: NonLinS
{- ^ These are the non-linear terms mentioned in the assertions
that are currently asserted (including ones from previous scopes). -}
} deriving Show
scopeEmpty :: Scope
scopeEmpty = Scope { scopeNames = [], scopeNonLinS = initialNonLinS }
scopeElem :: Name -> Scope -> Bool
scopeElem x Scope { .. } = x `elem` scopeNames
scopeInsert :: Name -> Scope -> Scope
scopeInsert x Scope { .. } = Scope { scopeNames = x : scopeNames, .. }
-- | Given a *simplified* prop, separate linear and non-linear parts
-- and return the linear ones.
scopeAssert :: SimpProp -> Scope -> ([SimpProp],Scope)
scopeAssert (SimpProp p) Scope { .. } =
let (ps1,s1) = nonLinProp scopeNonLinS p
in (map SimpProp ps1, Scope { scopeNonLinS = s1, .. })
-- | No scopes.
viEmpty :: VarInfo
viEmpty = VarInfo { curScope = scopeEmpty, otherScopes = [] }
-- | Check if a name is any of the scopes.
viElem :: Name -> VarInfo -> Bool
viElem x VarInfo { .. } = any (x `scopeElem`) (curScope : otherScopes)
-- | Add a name to a scope.
viInsert :: Name -> VarInfo -> VarInfo
viInsert x VarInfo { .. } = VarInfo { curScope = scopeInsert x curScope, .. }
-- | Add an assertion to the current scope. Returns the linear part.
viAssert :: SimpProp -> VarInfo -> (VarInfo, [SimpProp])
viAssert p VarInfo { .. } = ( VarInfo { curScope = s1, .. }, p1)
where (p1, s1) = scopeAssert p curScope
-- | Enter a scope.
viPush :: VarInfo -> VarInfo
viPush VarInfo { .. } =
VarInfo { curScope = scopeEmpty { scopeNonLinS = scopeNonLinS curScope }
, otherScopes = curScope : otherScopes
}
-- | Exit a scope.
viPop :: VarInfo -> VarInfo
viPop VarInfo { .. } = case otherScopes of
c : cs -> VarInfo { curScope = c, otherScopes = cs }
_ -> panic "viPop" ["no more scopes"]
-- | All declared names, that have not been "marked".
-- These are the variables whose values we are interested in.
viUnmarkedNames :: VarInfo -> [ Name ]
viUnmarkedNames VarInfo { .. } = concatMap scopeNames scopes
where scopes = curScope : otherScopes
-- | All known non-linear terms.
getNLSubst :: Solver -> IO Subst
getNLSubst Solver { .. } =
do VarInfo { .. } <- readIORef declared
return $ nonLinSubst $ scopeNonLinS curScope
-- | Execute a computation with a fresh solver instance.
withSolver :: SolverConfig -> (Solver -> IO a) -> IO a
withSolver SolverConfig { .. } k =
do logger <- if solverVerbose > 0 then SMT.newLogger 0 else return quietLogger
let smtDbg = if solverVerbose > 1 then Just logger else Nothing
solver <- SMT.newSolver solverPath solverArgs smtDbg
_ <- SMT.setOptionMaybe solver ":global-decls" "false"
SMT.setLogic solver "QF_LIA"
declared <- newIORef viEmpty
a <- k Solver { .. }
_ <- SMT.stop solver
return a
where
quietLogger = SMT.Logger { SMT.logMessage = \_ -> return ()
, SMT.logLevel = return 0
, SMT.logSetLevel= \_ -> return ()
, SMT.logTab = return ()
, SMT.logUntab = return ()
}
solPush :: Solver -> IO ()
solPush Solver { .. } =
do SMT.push solver
SMT.logTab logger
modifyIORef' declared viPush
solPop :: Solver -> IO ()
solPop Solver { .. } =
do modifyIORef' declared viPop
SMT.logUntab logger
SMT.pop solver
-- | Execute a computation in a new solver scope.
withScope :: Solver -> IO a -> IO a
withScope s k =
do solPush s
a <- k
solPop s
return a
-- | Declare a variable.
declareVar :: Solver -> Name -> IO ()
declareVar s@Solver { .. } a =
do done <- fmap (a `viElem`) (readIORef declared)
unless done $
do e <- SMT.declare solver (smtName a) SMT.tInt
let fin_a = smtFinName a
fin <- SMT.declare solver fin_a SMT.tBool
SMT.assert solver (SMT.geq e (SMT.int 0))
nlSu <- getNLSubst s
modifyIORef' declared (viInsert a)
case Map.lookup a nlSu of
Nothing -> return ()
Just e' ->
do let finDef = crySimplify (Fin e')
mapM_ (declareVar s) (Set.toList (cryPropFVS finDef))
SMT.assert solver $
SMT.eq fin (ifPropToSmtLib (desugarProp finDef))
-- | Add an assertion to the current context.
-- INVARIANT: Assertion is simplified.
assert :: Solver -> SimpProp -> IO ()
assert _ (SimpProp PTrue) = return ()
assert s@Solver { .. } p@(SimpProp p0) =
do debugLog s ("Assuming: " ++ show (ppProp p0))
ps1' <- atomicModifyIORef' declared (viAssert p)
let ps1 = map unSimpProp ps1'
vs = Set.toList $ Set.unions $ map cryPropFVS ps1
mapM_ (declareVar s) vs
mapM_ (SMT.assert solver . ifPropToSmtLib . desugarProp) ps1
-- | Try to prove a property. The result is 'True' when we are sure that
-- the property holds, and 'False' otherwise. In other words, getting `False`
-- *does not* mean that the proposition does not hold.
prove :: Solver -> Prop -> IO Bool
prove _ PTrue = return True
prove s@(Solver { .. }) p =
debugBlock s ("Proving: " ++ show (ppProp p)) $
withScope s $
do assert s (simpProp (Not p))
res <- SMT.check solver
case res of
SMT.Unsat -> debugLog s "Proved" >> return True
SMT.Unknown -> debugLog s "Not proved" >> return False -- We are not sure
SMT.Sat -> debugLog s "Not proved" >> return False
-- XXX: If the answer is Sat, it is possible that this is a
-- a fake example, as we need to evaluate the nonLinear constraints.
-- If they are all satisfied, then we have a genuine counter example.
-- Otherwise, we could look for another one...
{- | Check if the current set of assumptions is satisfiable, and find
some facts that must hold in any models of the current assumptions.
Returns `Nothing` if the currently asserted constraints are known to
be unsatisfiable.
Returns `Just (su, sub-goals)` is the current set is satisfiable.
* The `su` is a substitution that may be applied to the current constraint
set without loosing generality.
* The `sub-goals` are additional constraints that must hold if the
constraint set is to be satisfiable.
-}
check :: Solver -> IO (Maybe (Subst, [Prop]))
check s@Solver { .. } =
do res <- SMT.check solver
case res of
SMT.Unsat ->
do debugLog s "Not satisfiable"
return Nothing
SMT.Unknown ->
do debugLog s "Unknown"
return (Just (Map.empty, []))
SMT.Sat ->
do debugLog s "Satisfiable"
(impMap,sideConds) <- debugBlock s "Computing improvements"
(getImpSubst s)
return (Just (impMap, sideConds))
{- | Assuming that we are in a satisfiable state, try to compute an
improving substitution. We also return additional constraints that must
hold for the currently asserted propositions to hold.
-}
getImpSubst :: Solver -> IO (Subst,[Prop])
getImpSubst s@Solver { .. } =
do names <- viUnmarkedNames `fmap` readIORef declared
m <- getVals solver names
(impSu,sideConditions) <- cryImproveModel solver logger m
nlSu <- getNLSubst s
let isNonLinName (SysName {}) = True
isNonLinName (UserName {}) = False
(nlFacts, vFacts) = Map.partitionWithKey (\k _ -> isNonLinName k) impSu
(vV, vNL) = Map.partition noNLVars vFacts
nlSu1 = fmap (doAppSubst vV) nlSu
(vNL_su,vNL_eqs) = Map.partitionWithKey goodDef
$ fmap (doAppSubst nlSu1) vNL
nlSu2 = fmap (doAppSubst vNL_su) nlSu1
nlLkp x = case Map.lookup x nlSu2 of
Just e -> e
Nothing -> panic "getImpSubst"
[ "Missing NL variable:", show x ]
allSides =
[ Var a :== e | (a,e) <- Map.toList vNL_eqs ] ++
[ nlLkp x :== doAppSubst nlSu2 e | (x,e) <- Map.toList nlFacts ] ++
[ doAppSubst nlSu2 si | si <- sideConditions ]
theImpSu = composeSubst vNL_su vV
debugBlock s "Improvments" $
do debugBlock s "substitution" $
mapM_ (debugLog s . dump) (Map.toList theImpSu)
debugBlock s "side-conditions" $ debugLog s allSides
return (theImpSu, allSides)
where
goodDef k e = not (k `Set.member` cryExprFVS e)
isNLVar (SysName _) = True
isNLVar _ = False
noNLVars e = all (not . isNLVar) (cryExprFVS e)
dump (x,e) = show (ppProp (Var x :== e))
--------------------------------------------------------------------------------
debugBlock :: Solver -> String -> IO a -> IO a
debugBlock s@Solver { .. } name m =
do debugLog s name
SMT.logTab logger
a <- m
SMT.logUntab logger
return a
class DebugLog t where
debugLog :: Solver -> t -> IO ()
debugLogList :: Solver -> [t] -> IO ()
debugLogList s ts = case ts of
[] -> debugLog s "(none)"
_ -> mapM_ (debugLog s) ts
instance DebugLog Char where
debugLog s x = SMT.logMessage (logger s) (show x)
debugLogList s x = SMT.logMessage (logger s) x
instance DebugLog a => DebugLog [a] where
debugLog = debugLogList
instance DebugLog a => DebugLog (Maybe a) where
debugLog s x = case x of
Nothing -> debugLog s "(nothing)"
Just a -> debugLog s a
instance DebugLog Doc where
debugLog s x = debugLog s (show x)
instance DebugLog Cry.Type where
debugLog s x = debugLog s (pp x)
instance DebugLog Goal where
debugLog s x = debugLog s (goal x)
instance DebugLog Cry.Subst where
debugLog s x = debugLog s (pp x)
instance DebugLog Prop where
debugLog s x = debugLog s (ppProp x)
| ntc2/cryptol | src/Cryptol/TypeCheck/Solver/CrySAT.hs | bsd-3-clause | 21,839 | 0 | 22 | 6,846 | 5,956 | 3,024 | 2,932 | 399 | 5 |
module GHCJS.DOM.HTMLInputElement where
data HTMLInputElement = HTMLInputElement
class IsHTMLInputElement a
instance IsHTMLInputElement HTMLInputElement
ghcjs_dom_html_input_element_step_up = undefined
htmlInputElementStepUp = undefined
ghcjs_dom_html_input_element_step_down = undefined
htmlInputElementStepDown = undefined
ghcjs_dom_html_input_element_check_validity = undefined
htmlInputElementCheckValidity = undefined
ghcjs_dom_html_input_element_set_custom_validity = undefined
htmlInputElementSetCustomValidity = undefined
ghcjs_dom_html_input_element_select = undefined
htmlInputElementSelect = undefined
ghcjs_dom_html_input_element_set_range_text = undefined
htmlInputElementSetRangeText = undefined
ghcjs_dom_html_input_element_set_range_text4 = undefined
htmlInputElementSetRangeText4 = undefined
ghcjs_dom_html_input_element_set_value_for_user = undefined
htmlInputElementSetValueForUser = undefined
ghcjs_dom_html_input_element_set_accept = undefined
htmlInputElementSetAccept = undefined
ghcjs_dom_html_input_element_get_accept = undefined
htmlInputElementGetAccept = undefined
ghcjs_dom_html_input_element_set_alt = undefined
htmlInputElementSetAlt = undefined
ghcjs_dom_html_input_element_get_alt = undefined
htmlInputElementGetAlt = undefined
ghcjs_dom_html_input_element_set_autocomplete = undefined
htmlInputElementSetAutocomplete = undefined
ghcjs_dom_html_input_element_get_autocomplete = undefined
htmlInputElementGetAutocomplete = undefined
ghcjs_dom_html_input_element_set_autofocus = undefined
htmlInputElementSetAutofocus = undefined
ghcjs_dom_html_input_element_get_autofocus = undefined
htmlInputElementGetAutofocus = undefined
ghcjs_dom_html_input_element_set_default_checked = undefined
htmlInputElementSetDefaultChecked = undefined
ghcjs_dom_html_input_element_get_default_checked = undefined
htmlInputElementGetDefaultChecked = undefined
ghcjs_dom_html_input_element_set_checked = undefined
htmlInputElementSetChecked = undefined
ghcjs_dom_html_input_element_get_checked = undefined
htmlInputElementGetChecked = undefined
ghcjs_dom_html_input_element_set_dir_name = undefined
htmlInputElementSetDirName = undefined
ghcjs_dom_html_input_element_get_dir_name = undefined
htmlInputElementGetDirName = undefined
ghcjs_dom_html_input_element_set_disabled = undefined
htmlInputElementSetDisabled = undefined
ghcjs_dom_html_input_element_get_disabled = undefined
htmlInputElementGetDisabled = undefined
ghcjs_dom_html_input_element_get_form = undefined
htmlInputElementGetForm = undefined
ghcjs_dom_html_input_element_set_files = undefined
htmlInputElementSetFiles = undefined
ghcjs_dom_html_input_element_get_files = undefined
htmlInputElementGetFiles = undefined
ghcjs_dom_html_input_element_set_form_action = undefined
htmlInputElementSetFormAction = undefined
ghcjs_dom_html_input_element_get_form_action = undefined
htmlInputElementGetFormAction = undefined
ghcjs_dom_html_input_element_set_form_enctype = undefined
htmlInputElementSetFormEnctype = undefined
ghcjs_dom_html_input_element_get_form_enctype = undefined
htmlInputElementGetFormEnctype = undefined
ghcjs_dom_html_input_element_set_form_method = undefined
htmlInputElementSetFormMethod = undefined
ghcjs_dom_html_input_element_get_form_method = undefined
htmlInputElementGetFormMethod = undefined
ghcjs_dom_html_input_element_set_form_no_validate = undefined
htmlInputElementSetFormNoValidate = undefined
ghcjs_dom_html_input_element_get_form_no_validate = undefined
htmlInputElementGetFormNoValidate = undefined
ghcjs_dom_html_input_element_set_form_target = undefined
htmlInputElementSetFormTarget = undefined
ghcjs_dom_html_input_element_get_form_target = undefined
htmlInputElementGetFormTarget = undefined
ghcjs_dom_html_input_element_set_height = undefined
htmlInputElementSetHeight = undefined
ghcjs_dom_html_input_element_get_height = undefined
htmlInputElementGetHeight = undefined
ghcjs_dom_html_input_element_set_indeterminate = undefined
htmlInputElementSetIndeterminate = undefined
ghcjs_dom_html_input_element_get_indeterminate = undefined
htmlInputElementGetIndeterminate = undefined
ghcjs_dom_html_input_element_get_list = undefined
htmlInputElementGetList = undefined
ghcjs_dom_html_input_element_set_max = undefined
htmlInputElementSetMax = undefined
ghcjs_dom_html_input_element_get_max = undefined
htmlInputElementGetMax = undefined
ghcjs_dom_html_input_element_set_max_length = undefined
htmlInputElementSetMaxLength = undefined
ghcjs_dom_html_input_element_get_max_length = undefined
htmlInputElementGetMaxLength = undefined
ghcjs_dom_html_input_element_set_min = undefined
htmlInputElementSetMin = undefined
ghcjs_dom_html_input_element_get_min = undefined
htmlInputElementGetMin = undefined
ghcjs_dom_html_input_element_set_multiple = undefined
htmlInputElementSetMultiple = undefined
ghcjs_dom_html_input_element_get_multiple = undefined
htmlInputElementGetMultiple = undefined
ghcjs_dom_html_input_element_set_name = undefined
htmlInputElementSetName = undefined
ghcjs_dom_html_input_element_get_name = undefined
htmlInputElementGetName = undefined
ghcjs_dom_html_input_element_set_pattern = undefined
htmlInputElementSetPattern = undefined
ghcjs_dom_html_input_element_get_pattern = undefined
htmlInputElementGetPattern = undefined
ghcjs_dom_html_input_element_set_placeholder = undefined
htmlInputElementSetPlaceholder = undefined
ghcjs_dom_html_input_element_get_placeholder = undefined
htmlInputElementGetPlaceholder = undefined
ghcjs_dom_html_input_element_set_read_only = undefined
htmlInputElementSetReadOnly = undefined
ghcjs_dom_html_input_element_get_read_only = undefined
htmlInputElementGetReadOnly = undefined
ghcjs_dom_html_input_element_set_required = undefined
htmlInputElementSetRequired = undefined
ghcjs_dom_html_input_element_get_required = undefined
htmlInputElementGetRequired = undefined
ghcjs_dom_html_input_element_set_size = undefined
htmlInputElementSetSize = undefined
ghcjs_dom_html_input_element_get_size = undefined
htmlInputElementGetSize = undefined
ghcjs_dom_html_input_element_set_src = undefined
htmlInputElementSetSrc = undefined
ghcjs_dom_html_input_element_get_src = undefined
htmlInputElementGetSrc = undefined
ghcjs_dom_html_input_element_set_step = undefined
htmlInputElementSetStep = undefined
ghcjs_dom_html_input_element_get_step = undefined
htmlInputElementGetStep = undefined
ghcjs_dom_html_input_element_set_default_value = undefined
htmlInputElementSetDefaultValue = undefined
ghcjs_dom_html_input_element_get_default_value = undefined
htmlInputElementGetDefaultValue = undefined
ghcjs_dom_html_input_element_set_value = undefined
htmlInputElementSetValue = undefined
ghcjs_dom_html_input_element_get_value = undefined
htmlInputElementGetValue = undefined
ghcjs_dom_html_input_element_set_value_as_number = undefined
htmlInputElementSetValueAsNumber = undefined
ghcjs_dom_html_input_element_get_value_as_number = undefined
htmlInputElementGetValueAsNumber = undefined
ghcjs_dom_html_input_element_set_width = undefined
htmlInputElementSetWidth = undefined
ghcjs_dom_html_input_element_get_width = undefined
htmlInputElementGetWidth = undefined
ghcjs_dom_html_input_element_get_will_validate = undefined
htmlInputElementGetWillValidate = undefined
ghcjs_dom_html_input_element_get_validity = undefined
htmlInputElementGetValidity = undefined
ghcjs_dom_html_input_element_get_validation_message = undefined
htmlInputElementGetValidationMessage = undefined
ghcjs_dom_html_input_element_get_labels = undefined
htmlInputElementGetLabels = undefined
ghcjs_dom_html_input_element_set_align = undefined
htmlInputElementSetAlign = undefined
ghcjs_dom_html_input_element_get_align = undefined
htmlInputElementGetAlign = undefined
ghcjs_dom_html_input_element_set_use_map = undefined
htmlInputElementSetUseMap = undefined
ghcjs_dom_html_input_element_get_use_map = undefined
htmlInputElementGetUseMap = undefined
ghcjs_dom_html_input_element_set_incremental = undefined
htmlInputElementSetIncremental = undefined
ghcjs_dom_html_input_element_get_incremental = undefined
htmlInputElementGetIncremental = undefined
ghcjs_dom_html_input_element_set_autocorrect = undefined
htmlInputElementSetAutocorrect = undefined
ghcjs_dom_html_input_element_get_autocorrect = undefined
htmlInputElementGetAutocorrect = undefined
ghcjs_dom_html_input_element_set_autocapitalize = undefined
htmlInputElementSetAutocapitalize = undefined
ghcjs_dom_html_input_element_get_autocapitalize = undefined
htmlInputElementGetAutocapitalize = undefined
ghcjs_dom_html_input_element_set_capture = undefined
htmlInputElementSetCapture = undefined
ghcjs_dom_html_input_element_get_capture = undefined
htmlInputElementGetCapture = undefined
castToHTMLInputElement = undefined
gTypeHTMLInputElement = undefined
toHTMLInputElement = undefined
| mightybyte/reflex-dom-stubs | src/GHCJS/DOM/HTMLInputElement.hs | bsd-3-clause | 8,767 | 0 | 5 | 564 | 943 | 564 | 379 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Web.Spock.Api.Client (callEndpoint, callEndpoint') where
import qualified Data.Aeson as A
import qualified Data.ByteString.Lazy as BSL
import Data.HVect
import qualified Data.HVect as HV
import qualified Data.JSString as J
import qualified Data.JSString.Text as J
import qualified Data.Text.Encoding as T
import JavaScript.Web.XMLHttpRequest
import Web.Spock.Api
type Header = (J.JSString, J.JSString)
-- | Call an 'Endpoint' defined using the @Spock-api@ package passing extra headers
callEndpoint' ::
forall p i o.
(HasRep (MaybeToList i), HasRep p) =>
Endpoint p i o ->
[Header] ->
HVectElim p (HVectElim (MaybeToList i) (IO (Maybe o)))
callEndpoint' ep extraHeaders =
HV.curry $ \hv -> HV.curry (callEndpointCore' ep extraHeaders hv)
-- | Call an 'Endpoint' defined using the @Spock-api@ package
callEndpoint ::
forall p i o.
(HasRep (MaybeToList i), HasRep p) =>
Endpoint p i o ->
HVectElim p (HVectElim (MaybeToList i) (IO (Maybe o)))
callEndpoint ep = callEndpoint' ep []
data EndpointCall p i o = EndpointCall
{ epc_point :: !(Endpoint p i o),
epc_headers :: ![Header],
epc_params :: !(HVect p),
epc_body :: !(HVect (MaybeToList i))
}
callEndpointCore' ::
forall p i o.
Endpoint p i o ->
[Header] ->
HVect p ->
HVect (MaybeToList i) ->
IO (Maybe o)
callEndpointCore' ep hdrs hv b = callEndpointCore (EndpointCall ep hdrs hv b)
callEndpointCore :: forall p i o. EndpointCall p i o -> IO (Maybe o)
callEndpointCore call =
case call of
EndpointCall (MethodPost Proxy path) hdrs params (body :&: HNil) ->
do
let rt = J.textToJSString $ renderRoute path params
bodyText = J.textToJSString $ T.decodeUtf8 $ BSL.toStrict $ A.encode body
req =
Request
{ reqMethod = POST,
reqURI = rt,
reqLogin = Nothing,
reqHeaders = (("Content-Type", "application/json;charset=UTF-8") : hdrs),
reqWithCredentials = False,
reqData = StringData bodyText
}
runJsonReq req
EndpointCall (MethodPut Proxy path) hdrs params (body :&: HNil) ->
do
let rt = J.textToJSString $ renderRoute path params
bodyText = J.textToJSString $ T.decodeUtf8 $ BSL.toStrict $ A.encode body
req =
Request
{ reqMethod = PUT,
reqURI = rt,
reqLogin = Nothing,
reqHeaders = (("Content-Type", "application/json;charset=UTF-8") : hdrs),
reqWithCredentials = False,
reqData = StringData bodyText
}
runJsonReq req
EndpointCall (MethodGet path) hdrs params HNil ->
do
let rt = J.textToJSString $ renderRoute path params
req =
Request
{ reqMethod = GET,
reqURI = rt,
reqLogin = Nothing,
reqHeaders = hdrs,
reqWithCredentials = False,
reqData = NoData
}
runJsonReq req
runJsonReq :: A.FromJSON o => Request -> IO (Maybe o)
runJsonReq req =
do
response <- xhrText req
case (status response, contents response) of
(200, Just txt) ->
do
let res = A.eitherDecodeStrict' (T.encodeUtf8 txt)
case res of
Left errMsg ->
do
putStrLn errMsg
pure Nothing
Right val ->
pure (Just val)
_ -> pure Nothing
| agrafix/Spock | Spock-api-ghcjs/src/Web/Spock/Api/Client.hs | bsd-3-clause | 3,862 | 2 | 18 | 1,210 | 1,054 | 570 | 484 | 112 | 3 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.IBM.MultimodeDrawArrays
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.IBM.MultimodeDrawArrays (
-- * Extension Support
glGetIBMMultimodeDrawArrays,
gl_IBM_multimode_draw_arrays,
-- * Functions
glMultiModeDrawArraysIBM,
glMultiModeDrawElementsIBM
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/IBM/MultimodeDrawArrays.hs | bsd-3-clause | 676 | 0 | 4 | 89 | 47 | 36 | 11 | 7 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE UnicodeSyntax #-}
module Shake.It.C.Make
( make
, configure
, nmake
, vcshell
) where
import Control.Monad
import Shake.It.Core
import System.Environment
configure ∷ [String] → IO ()
configure α = rawSystem "configure" α >>= checkExitCode
make ∷ [String] → IO ()
make α = rawSystem "make" α >>= checkExitCode
vcshell ∷ [String] → IO String
nmake ∷ [String] → IO ()
#if ( defined(mingw32_HOST_OS) || defined(__MINGW32__) )
vcshell [x] = do
common ← getEnv $ "VS" ++ x ++ "COMNTOOLS"
return $ common </> ".." </> ".."
</> "VC"
</> "vcvarsall.bat"
vcshell (x:xs) = do
vcx ← vcshell [x]
if vcx /= [] then return vcx
else vcshell xs
vcshell [] = return []
nmake α = rawSystem "nmake" α >>= checkExitCode
#else
vcshell _ = return []
nmake _ = return ()
#endif
| Heather/Shake.it.off | src/Shake/It/C/Make.hs | bsd-3-clause | 938 | 0 | 11 | 263 | 279 | 147 | 132 | 18 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-------------------------------------------------------------------------------
-- |
-- Module : Generator.Primer.Modern
-- Copyright : (c) 2016 Michael Carpenter
-- License : BSD3
-- Maintainer : Michael Carpenter <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-------------------------------------------------------------------------------
module Generator.Primer.Modern
( primeModernProtocol
) where
import Data.List
import Generator.Parser
import Generator.Primer.Common
import Generator.Types
primeModernProtocol :: ExtractedVersion
-> ExtractedModernProtocol
-> Either String ModernProtocol
primeModernProtocol ev ep = do
let v = mkProtocolVersion ev
let lstGettersAndBoundTos =
[ modernClientBoundHandshaking , modernServerBoundHandshaking
, modernClientBoundStatus , modernServerBoundStatus
, modernClientBoundLogin , modernServerBoundLogin
, modernClientBoundPlay , modernServerBoundPlay
]
let lstMaybeMetadata =
fmap
(\getter -> getMetadataSection . getter $ ep)
lstGettersAndBoundTos
let lstMaybeNames =
(fmap . fmap)
getNameMap
lstMaybeMetadata
let lstMaybeIds =
(fmap . fmap)
getIdMap
lstMaybeMetadata
let lstLstPackets =
fmap
(\getter -> getPacketSection . getter $ ep)
lstGettersAndBoundTos
let lstPacketSectionMetadata =
zip4
lstMaybeNames
lstMaybeIds
lstLstPackets
((concat . repeat) [ClientBound,ServerBound])
let maybePrimedProto =
fmap sequence $ sequence
$ fmap
(\(a,b,c,d) ->
mkPacketSection
<$> a -- names map
<*> b -- ids map
<*> return c -- packet list
<*> return d -- packet direction
)
lstPacketSectionMetadata
case maybePrimedProto of
Nothing -> Left "Error: Something went wrong!"
Just primedProto ->
case primedProto of
Nothing -> Left "Error: Packet section primer failure!"
Just primedSections -> do
Right $ ModernProtocol
v
(primedSections !! 0)
(primedSections !! 1)
(primedSections !! 2)
(primedSections !! 3)
(primedSections !! 4)
(primedSections !! 5)
(primedSections !! 6)
(primedSections !! 7)
where
mkPacketSection nMap idMap pktLst boundTo =
fmap (sortOn pId) $ sequence $
fmap
(\x -> mkPacket nMap idMap pktLst (ePacketName x) boundTo)
pktLst
| oldmanmike/hs-minecraft-protocol | generate/src/Generator/Primer/Modern.hs | bsd-3-clause | 2,953 | 0 | 21 | 1,054 | 520 | 274 | 246 | 70 | 3 |
{-# LANGUAGE DataKinds, PolyKinds, TypeFamilies, TypeOperators, OverloadedStrings #-}
module Database.Edis.Command.Hash where
import Database.Edis.Type
import Database.Edis.Helper
import Data.ByteString (ByteString)
import Data.Proxy (Proxy)
import Data.Serialize (Serialize, encode)
import Data.Type.Bool
import Database.Redis as Redis hiding (decode)
import GHC.TypeLits
--------------------------------------------------------------------------------
-- Hashes
--------------------------------------------------------------------------------
hdel :: (KnownSymbol k, KnownSymbol f, HashOrNX xs k)
=> Proxy k -> Proxy f
-> Edis xs (DelHash xs k f) (Either Reply Integer)
hdel key field = Edis $ Redis.hdel (encodeKey key) [encodeKey field]
hexists :: (KnownSymbol k, KnownSymbol f, HashOrNX xs k)
=> Proxy k -> Proxy f
-> Edis xs xs (Either Reply Bool)
hexists key field = Edis $ Redis.hexists (encodeKey key) (encodeKey field)
hget :: (KnownSymbol k, KnownSymbol f, Serialize x
, 'Just (StringOf x) ~ GetHash xs k f)
=> Proxy k -> Proxy f
-> Edis xs xs (Either Reply (Maybe x))
hget key field = Edis $ Redis.hget (encodeKey key) (encodeKey field) >>= decodeAsMaybe
hincrby :: (KnownSymbol k, KnownSymbol f, HashOrNX xs k)
=> Proxy k -> Proxy f -> Integer
-> Edis xs (SetHash xs k f Integer) (Either Reply Integer)
hincrby key field n = Edis $ Redis.hincrby (encodeKey key) (encodeKey field) n
hincrbyfloat :: (KnownSymbol k, KnownSymbol f, HashOrNX xs k)
=> Proxy k -> Proxy f -> Double
-> Edis xs (SetHash xs k f Double) (Either Reply Double)
hincrbyfloat key field n = Edis $ Redis.hincrbyfloat (encodeKey key) (encodeKey field) n
hkeys :: (KnownSymbol k, HashOrNX xs k)
=> Proxy k
-> Edis xs xs (Either Reply [ByteString])
hkeys key = Edis $ Redis.hkeys (encodeKey key)
hlen :: (KnownSymbol k, HashOrNX xs k)
=> Proxy k
-> Edis xs xs (Either Reply Integer)
hlen key = Edis $ Redis.hlen (encodeKey key)
hset :: (KnownSymbol k, KnownSymbol f, Serialize x, HashOrNX xs k)
=> Proxy k -> Proxy f -> x
-> Edis xs (SetHash xs k f (StringOf x)) (Either Reply Bool)
hset key field val = Edis $ Redis.hset (encodeKey key) (encodeKey field) (encode val)
hsetnx :: (KnownSymbol k, KnownSymbol f, Serialize x, HashOrNX xs k)
=> Proxy k -> Proxy f -> x
-> Edis xs (If (MemHash xs k f) xs (SetHash xs k f (StringOf x))) (Either Reply Bool)
hsetnx key field val = Edis $ Redis.hsetnx (encodeKey key) (encodeKey field) (encode val)
| banacorn/tredis | src/Database/Edis/Command/Hash.hs | mit | 2,619 | 0 | 15 | 579 | 1,028 | 519 | 509 | 47 | 1 |
-- https://www.codewars.com/kata/five-fundamental-monads
{-# LANGUAGE NoImplicitPrelude #-}
module Monads where
import Prelude hiding (Maybe(..), Monad)
class Monad m where
return :: a -> m a
(>>=) :: m a -> (a -> m b) -> m b
newtype Identity a = Identity { runIdentity :: a } deriving (Eq, Show)
data Maybe a = Nothing | Just a deriving (Eq, Show)
newtype State s a = State { runState :: s -> (a, s) }
newtype Reader r a = Reader { runReader :: r -> a }
newtype Writer w a = Writer { runWriter :: (a, w) }
instance Monad Identity where
return = Identity
Identity x >>= f = f x
instance Monad Maybe where
return = Just
Just x >>= f = f x
Nothing >>= _ = Nothing
instance Monad (State s) where
return x = State $ \s -> (x, s)
State h >>= f = State $ \s -> let (x, s') = h s; State h' = f x in h' s'
instance Monad (Reader r) where
return x = Reader $ const x
Reader h >>= f = Reader $ \r -> let Reader h' = f $ h r in h' r
instance Monoid w => Monad (Writer w) where
return x = Writer (x, mempty)
Writer (x, w) >>= f = let Writer (x', w') = f x in Writer (x', mappend w w')
| airtial/Codegames | codewars/five-fundamental-monads.hs | gpl-2.0 | 1,113 | 0 | 13 | 274 | 529 | 280 | 249 | 27 | 0 |
#!/usr/bin/env stack
{- stack runghc --verbosity info --package pandoc-types -}
import Text.Pandoc.JSON
main :: IO ()
main = toJSONFilter dropHtmlBlocks
dropHtmlBlocks :: Block -> Block
dropHtmlBlocks (RawBlock (Format "html") _) = Plain []
dropHtmlBlocks x = x
| mstksg/hledger | tools/pandoc-drop-html-blocks.hs | gpl-3.0 | 265 | 0 | 9 | 40 | 71 | 37 | 34 | 6 | 1 |
module Rasa.Internal.EventsSpec where
import Test.Hspec
spec :: Spec
spec = return ()
| samcal/rasa | rasa/test/Rasa/Internal/EventsSpec.hs | gpl-3.0 | 88 | 0 | 6 | 14 | 27 | 16 | 11 | 4 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.DynamoDB.Types.Product
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.DynamoDB.Types.Product where
import Network.AWS.DynamoDB.Types.Sum
import Network.AWS.Prelude
-- | Represents an attribute for describing the key schema for the table and
-- indexes.
--
-- /See:/ 'attributeDefinition' smart constructor.
data AttributeDefinition = AttributeDefinition'
{ _adAttributeName :: !Text
, _adAttributeType :: !ScalarAttributeType
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AttributeDefinition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'adAttributeName'
--
-- * 'adAttributeType'
attributeDefinition
:: Text -- ^ 'adAttributeName'
-> ScalarAttributeType -- ^ 'adAttributeType'
-> AttributeDefinition
attributeDefinition pAttributeName_ pAttributeType_ =
AttributeDefinition'
{ _adAttributeName = pAttributeName_
, _adAttributeType = pAttributeType_
}
-- | A name for the attribute.
adAttributeName :: Lens' AttributeDefinition Text
adAttributeName = lens _adAttributeName (\ s a -> s{_adAttributeName = a});
-- | The data type for the attribute.
adAttributeType :: Lens' AttributeDefinition ScalarAttributeType
adAttributeType = lens _adAttributeType (\ s a -> s{_adAttributeType = a});
instance FromJSON AttributeDefinition where
parseJSON
= withObject "AttributeDefinition"
(\ x ->
AttributeDefinition' <$>
(x .: "AttributeName") <*> (x .: "AttributeType"))
instance ToJSON AttributeDefinition where
toJSON AttributeDefinition'{..}
= object
(catMaybes
[Just ("AttributeName" .= _adAttributeName),
Just ("AttributeType" .= _adAttributeType)])
-- | Represents the data for an attribute. You can set one, and only one, of
-- the elements.
--
-- Each attribute in an item is a name-value pair. An attribute can be
-- single-valued or multi-valued set. For example, a book item can have
-- title and authors attributes. Each book has one title but can have many
-- authors. The multi-valued attribute is a set; duplicate values are not
-- allowed.
--
-- /See:/ 'attributeValue' smart constructor.
data AttributeValue = AttributeValue'
{ _avL :: !(Maybe [AttributeValue])
, _avNS :: !(Maybe [Text])
, _avM :: !(Maybe (Map Text AttributeValue))
, _avNULL :: !(Maybe Bool)
, _avN :: !(Maybe Text)
, _avBS :: !(Maybe [Base64])
, _avB :: !(Maybe Base64)
, _avSS :: !(Maybe [Text])
, _avS :: !(Maybe Text)
, _avBOOL :: !(Maybe Bool)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AttributeValue' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'avL'
--
-- * 'avNS'
--
-- * 'avM'
--
-- * 'avNULL'
--
-- * 'avN'
--
-- * 'avBS'
--
-- * 'avB'
--
-- * 'avSS'
--
-- * 'avS'
--
-- * 'avBOOL'
attributeValue
:: AttributeValue
attributeValue =
AttributeValue'
{ _avL = Nothing
, _avNS = Nothing
, _avM = Nothing
, _avNULL = Nothing
, _avN = Nothing
, _avBS = Nothing
, _avB = Nothing
, _avSS = Nothing
, _avS = Nothing
, _avBOOL = Nothing
}
-- | A List of attribute values.
avL :: Lens' AttributeValue [AttributeValue]
avL = lens _avL (\ s a -> s{_avL = a}) . _Default . _Coerce;
-- | A Number Set data type.
avNS :: Lens' AttributeValue [Text]
avNS = lens _avNS (\ s a -> s{_avNS = a}) . _Default . _Coerce;
-- | A Map of attribute values.
avM :: Lens' AttributeValue (HashMap Text AttributeValue)
avM = lens _avM (\ s a -> s{_avM = a}) . _Default . _Map;
-- | A Null data type.
avNULL :: Lens' AttributeValue (Maybe Bool)
avNULL = lens _avNULL (\ s a -> s{_avNULL = a});
-- | A Number data type.
avN :: Lens' AttributeValue (Maybe Text)
avN = lens _avN (\ s a -> s{_avN = a});
-- | A Binary Set data type.
avBS :: Lens' AttributeValue [ByteString]
avBS = lens _avBS (\ s a -> s{_avBS = a}) . _Default . _Coerce;
-- | A Binary data type.
--
-- /Note:/ This 'Lens' automatically encodes and decodes Base64 data,
-- despite what the AWS documentation might say.
-- The underlying isomorphism will encode to Base64 representation during
-- serialisation, and decode from Base64 representation during deserialisation.
-- This 'Lens' accepts and returns only raw unencoded data.
avB :: Lens' AttributeValue (Maybe ByteString)
avB = lens _avB (\ s a -> s{_avB = a}) . mapping _Base64;
-- | A String Set data type.
avSS :: Lens' AttributeValue [Text]
avSS = lens _avSS (\ s a -> s{_avSS = a}) . _Default . _Coerce;
-- | A String data type.
avS :: Lens' AttributeValue (Maybe Text)
avS = lens _avS (\ s a -> s{_avS = a});
-- | A Boolean data type.
avBOOL :: Lens' AttributeValue (Maybe Bool)
avBOOL = lens _avBOOL (\ s a -> s{_avBOOL = a});
instance FromJSON AttributeValue where
parseJSON
= withObject "AttributeValue"
(\ x ->
AttributeValue' <$>
(x .:? "L" .!= mempty) <*> (x .:? "NS" .!= mempty)
<*> (x .:? "M" .!= mempty)
<*> (x .:? "NULL")
<*> (x .:? "N")
<*> (x .:? "BS" .!= mempty)
<*> (x .:? "B")
<*> (x .:? "SS" .!= mempty)
<*> (x .:? "S")
<*> (x .:? "BOOL"))
instance ToJSON AttributeValue where
toJSON AttributeValue'{..}
= object
(catMaybes
[("L" .=) <$> _avL, ("NS" .=) <$> _avNS,
("M" .=) <$> _avM, ("NULL" .=) <$> _avNULL,
("N" .=) <$> _avN, ("BS" .=) <$> _avBS,
("B" .=) <$> _avB, ("SS" .=) <$> _avSS,
("S" .=) <$> _avS, ("BOOL" .=) <$> _avBOOL])
-- | For the /UpdateItem/ operation, represents the attributes to be
-- modified, the action to perform on each, and the new value for each.
--
-- You cannot use /UpdateItem/ to update any primary key attributes.
-- Instead, you will need to delete the item, and then use /PutItem/ to
-- create a new item with new attributes.
--
-- Attribute values cannot be null; string and binary type attributes must
-- have lengths greater than zero; and set type attributes must not be
-- empty. Requests with empty values will be rejected with a
-- /ValidationException/ exception.
--
-- /See:/ 'attributeValueUpdate' smart constructor.
data AttributeValueUpdate = AttributeValueUpdate'
{ _avuValue :: !(Maybe AttributeValue)
, _avuAction :: !(Maybe AttributeAction)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AttributeValueUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'avuValue'
--
-- * 'avuAction'
attributeValueUpdate
:: AttributeValueUpdate
attributeValueUpdate =
AttributeValueUpdate'
{ _avuValue = Nothing
, _avuAction = Nothing
}
-- | Undocumented member.
avuValue :: Lens' AttributeValueUpdate (Maybe AttributeValue)
avuValue = lens _avuValue (\ s a -> s{_avuValue = a});
-- | Specifies how to perform the update. Valid values are 'PUT' (default),
-- 'DELETE', and 'ADD'. The behavior depends on whether the specified
-- primary key already exists in the table.
--
-- __If an item with the specified /Key/ is found in the table:__
--
-- - 'PUT' - Adds the specified attribute to the item. If the attribute
-- already exists, it is replaced by the new value.
--
-- - 'DELETE' - If no value is specified, the attribute and its value are
-- removed from the item. The data type of the specified value must
-- match the existing value\'s data type.
--
-- If a /set/ of values is specified, then those values are subtracted
-- from the old set. For example, if the attribute value was the set
-- '[a,b,c]' and the /DELETE/ action specified '[a,c]', then the final
-- attribute value would be '[b]'. Specifying an empty set is an error.
--
-- - 'ADD' - If the attribute does not already exist, then the attribute
-- and its values are added to the item. If the attribute does exist,
-- then the behavior of 'ADD' depends on the data type of the
-- attribute:
--
-- - If the existing attribute is a number, and if /Value/ is also a
-- number, then the /Value/ is mathematically added to the existing
-- attribute. If /Value/ is a negative number, then it is
-- subtracted from the existing attribute.
--
-- If you use 'ADD' to increment or decrement a number value for an
-- item that doesn\'t exist before the update, DynamoDB uses 0 as
-- the initial value.
--
-- In addition, if you use 'ADD' to update an existing item, and
-- intend to increment or decrement an attribute value which does
-- not yet exist, DynamoDB uses '0' as the initial value. For
-- example, suppose that the item you want to update does not yet
-- have an attribute named /itemcount/, but you decide to 'ADD' the
-- number '3' to this attribute anyway, even though it currently
-- does not exist. DynamoDB will create the /itemcount/ attribute,
-- set its initial value to '0', and finally add '3' to it. The
-- result will be a new /itemcount/ attribute in the item, with a
-- value of '3'.
--
-- - If the existing data type is a set, and if the /Value/ is also a
-- set, then the /Value/ is added to the existing set. (This is a
-- /set/ operation, not mathematical addition.) For example, if the
-- attribute value was the set '[1,2]', and the 'ADD' action
-- specified '[3]', then the final attribute value would be
-- '[1,2,3]'. An error occurs if an Add action is specified for a
-- set attribute and the attribute type specified does not match
-- the existing set type.
--
-- Both sets must have the same primitive data type. For example,
-- if the existing data type is a set of strings, the /Value/ must
-- also be a set of strings. The same holds true for number sets
-- and binary sets.
--
-- This action is only valid for an existing attribute whose data type
-- is number or is a set. Do not use 'ADD' for any other data types.
--
-- __If no item with the specified /Key/ is found:__
--
-- - 'PUT' - DynamoDB creates a new item with the specified primary key,
-- and then adds the attribute.
--
-- - 'DELETE' - Nothing happens; there is no attribute to delete.
--
-- - 'ADD' - DynamoDB creates an item with the supplied primary key and
-- number (or set of numbers) for the attribute value. The only data
-- types allowed are number and number set; no other data types can be
-- specified.
--
avuAction :: Lens' AttributeValueUpdate (Maybe AttributeAction)
avuAction = lens _avuAction (\ s a -> s{_avuAction = a});
instance ToJSON AttributeValueUpdate where
toJSON AttributeValueUpdate'{..}
= object
(catMaybes
[("Value" .=) <$> _avuValue,
("Action" .=) <$> _avuAction])
-- | Represents the amount of provisioned throughput capacity consumed on a
-- table or an index.
--
-- /See:/ 'capacity' smart constructor.
newtype Capacity = Capacity'
{ _cCapacityUnits :: Maybe Double
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Capacity' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cCapacityUnits'
capacity
:: Capacity
capacity =
Capacity'
{ _cCapacityUnits = Nothing
}
-- | The total number of capacity units consumed on a table or an index.
cCapacityUnits :: Lens' Capacity (Maybe Double)
cCapacityUnits = lens _cCapacityUnits (\ s a -> s{_cCapacityUnits = a});
instance FromJSON Capacity where
parseJSON
= withObject "Capacity"
(\ x -> Capacity' <$> (x .:? "CapacityUnits"))
-- | Represents the selection criteria for a /Query/ or /Scan/ operation:
--
-- - For a /Query/ operation, /Condition/ is used for specifying the
-- /KeyConditions/ to use when querying a table or an index. For
-- /KeyConditions/, only the following comparison operators are
-- supported:
--
-- 'EQ | LE | LT | GE | GT | BEGINS_WITH | BETWEEN'
--
-- /Condition/ is also used in a /QueryFilter/, which evaluates the
-- query results and returns only the desired values.
--
-- - For a /Scan/ operation, /Condition/ is used in a /ScanFilter/, which
-- evaluates the scan results and returns only the desired values.
--
--
-- /See:/ 'condition' smart constructor.
data Condition = Condition'
{ _cAttributeValueList :: !(Maybe [AttributeValue])
, _cComparisonOperator :: !ComparisonOperator
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Condition' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cAttributeValueList'
--
-- * 'cComparisonOperator'
condition
:: ComparisonOperator -- ^ 'cComparisonOperator'
-> Condition
condition pComparisonOperator_ =
Condition'
{ _cAttributeValueList = Nothing
, _cComparisonOperator = pComparisonOperator_
}
-- | One or more values to evaluate against the supplied attribute. The
-- number of values in the list depends on the /ComparisonOperator/ being
-- used.
--
-- For type Number, value comparisons are numeric.
--
-- String value comparisons for greater than, equals, or less than are
-- based on ASCII character code values. For example, 'a' is greater than
-- 'A', and 'a' is greater than 'B'. For a list of code values, see
-- <http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters>.
--
-- For Binary, DynamoDB treats each byte of the binary data as unsigned
-- when it compares binary values.
cAttributeValueList :: Lens' Condition [AttributeValue]
cAttributeValueList = lens _cAttributeValueList (\ s a -> s{_cAttributeValueList = a}) . _Default . _Coerce;
-- | A comparator for evaluating attributes. For example, equals, greater
-- than, less than, etc.
--
-- The following comparison operators are available:
--
-- 'EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN | BETWEEN'
--
-- The following are descriptions of each comparison operator.
--
-- - 'EQ' : Equal. 'EQ' is supported for all datatypes, including lists
-- and maps.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, Binary, String Set, Number Set, or Binary
-- Set. If an item contains an /AttributeValue/ element of a different
-- type than the one provided in the request, the value does not match.
-- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not equal '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'NE' : Not equal. 'NE' is supported for all datatypes, including
-- lists and maps.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type
-- String, Number, Binary, String Set, Number Set, or Binary Set. If an
-- item contains an /AttributeValue/ of a different type than the one
-- provided in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not equal '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'LE' : Less than or equal.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If an item
-- contains an /AttributeValue/ element of a different type than the
-- one provided in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'LT' : Less than.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type
-- String, Number, or Binary (not a set type). If an item contains an
-- /AttributeValue/ element of a different type than the one provided
-- in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'GE' : Greater than or equal.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If an item
-- contains an /AttributeValue/ element of a different type than the
-- one provided in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'GT' : Greater than.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If an item
-- contains an /AttributeValue/ element of a different type than the
-- one provided in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'NOT_NULL' : The attribute exists. 'NOT_NULL' is supported for all
-- datatypes, including lists and maps.
--
-- This operator tests for the existence of an attribute, not its data
-- type. If the data type of attribute \"'a'\" is null, and you
-- evaluate it using 'NOT_NULL', the result is a Boolean /true/. This
-- result is because the attribute \"'a'\" exists; its data type is not
-- relevant to the 'NOT_NULL' comparison operator.
--
-- - 'NULL' : The attribute does not exist. 'NULL' is supported for all
-- datatypes, including lists and maps.
--
-- This operator tests for the nonexistence of an attribute, not its
-- data type. If the data type of attribute \"'a'\" is null, and you
-- evaluate it using 'NULL', the result is a Boolean /false/. This is
-- because the attribute \"'a'\" exists; its data type is not relevant
-- to the 'NULL' comparison operator.
--
-- - 'CONTAINS' : Checks for a subsequence, or value in a set.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If the target
-- attribute of the comparison is of type String, then the operator
-- checks for a substring match. If the target attribute of the
-- comparison is of type Binary, then the operator looks for a
-- subsequence of the target that matches the input. If the target
-- attribute of the comparison is a set (\"'SS'\", \"'NS'\", or
-- \"'BS'\"), then the operator evaluates to true if it finds an exact
-- match with any member of the set.
--
-- CONTAINS is supported for lists: When evaluating \"'a CONTAINS b'\",
-- \"'a'\" can be a list; however, \"'b'\" cannot be a set, a map, or a
-- list.
--
-- - 'NOT_CONTAINS' : Checks for absence of a subsequence, or absence of
-- a value in a set.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If the target
-- attribute of the comparison is a String, then the operator checks
-- for the absence of a substring match. If the target attribute of the
-- comparison is Binary, then the operator checks for the absence of a
-- subsequence of the target that matches the input. If the target
-- attribute of the comparison is a set (\"'SS'\", \"'NS'\", or
-- \"'BS'\"), then the operator evaluates to true if it /does not/ find
-- an exact match with any member of the set.
--
-- NOT_CONTAINS is supported for lists: When evaluating
-- \"'a NOT CONTAINS b'\", \"'a'\" can be a list; however, \"'b'\"
-- cannot be a set, a map, or a list.
--
-- - 'BEGINS_WITH' : Checks for a prefix.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type
-- String or Binary (not a Number or a set type). The target attribute
-- of the comparison must be of type String or Binary (not a Number or
-- a set type).
--
-- - 'IN' : Checks for matching elements within two sets.
--
-- /AttributeValueList/ can contain one or more /AttributeValue/
-- elements of type String, Number, or Binary (not a set type). These
-- attributes are compared against an existing set type attribute of an
-- item. If any elements of the input set are present in the item
-- attribute, the expression evaluates to true.
--
-- - 'BETWEEN' : Greater than or equal to the first value, and less than
-- or equal to the second value.
--
-- /AttributeValueList/ must contain two /AttributeValue/ elements of
-- the same type, either String, Number, or Binary (not a set type). A
-- target attribute matches if the target value is greater than, or
-- equal to, the first element and less than, or equal to, the second
-- element. If an item contains an /AttributeValue/ element of a
-- different type than the one provided in the request, the value does
-- not match. For example, '{\"S\":\"6\"}' does not compare to
-- '{\"N\":\"6\"}'. Also, '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'
--
-- For usage examples of /AttributeValueList/ and /ComparisonOperator/, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.html Legacy Conditional Parameters>
-- in the /Amazon DynamoDB Developer Guide/.
cComparisonOperator :: Lens' Condition ComparisonOperator
cComparisonOperator = lens _cComparisonOperator (\ s a -> s{_cComparisonOperator = a});
instance ToJSON Condition where
toJSON Condition'{..}
= object
(catMaybes
[("AttributeValueList" .=) <$> _cAttributeValueList,
Just ("ComparisonOperator" .= _cComparisonOperator)])
-- | The capacity units consumed by an operation. The data returned includes
-- the total provisioned throughput consumed, along with statistics for the
-- table and any indexes involved in the operation. /ConsumedCapacity/ is
-- only returned if the request asked for it. For more information, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ProvisionedThroughputIntro.html Provisioned Throughput>
-- in the /Amazon DynamoDB Developer Guide/.
--
-- /See:/ 'consumedCapacity' smart constructor.
data ConsumedCapacity = ConsumedCapacity'
{ _ccGlobalSecondaryIndexes :: !(Maybe (Map Text Capacity))
, _ccCapacityUnits :: !(Maybe Double)
, _ccLocalSecondaryIndexes :: !(Maybe (Map Text Capacity))
, _ccTable :: !(Maybe Capacity)
, _ccTableName :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ConsumedCapacity' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccGlobalSecondaryIndexes'
--
-- * 'ccCapacityUnits'
--
-- * 'ccLocalSecondaryIndexes'
--
-- * 'ccTable'
--
-- * 'ccTableName'
consumedCapacity
:: ConsumedCapacity
consumedCapacity =
ConsumedCapacity'
{ _ccGlobalSecondaryIndexes = Nothing
, _ccCapacityUnits = Nothing
, _ccLocalSecondaryIndexes = Nothing
, _ccTable = Nothing
, _ccTableName = Nothing
}
-- | The amount of throughput consumed on each global index affected by the
-- operation.
ccGlobalSecondaryIndexes :: Lens' ConsumedCapacity (HashMap Text Capacity)
ccGlobalSecondaryIndexes = lens _ccGlobalSecondaryIndexes (\ s a -> s{_ccGlobalSecondaryIndexes = a}) . _Default . _Map;
-- | The total number of capacity units consumed by the operation.
ccCapacityUnits :: Lens' ConsumedCapacity (Maybe Double)
ccCapacityUnits = lens _ccCapacityUnits (\ s a -> s{_ccCapacityUnits = a});
-- | The amount of throughput consumed on each local index affected by the
-- operation.
ccLocalSecondaryIndexes :: Lens' ConsumedCapacity (HashMap Text Capacity)
ccLocalSecondaryIndexes = lens _ccLocalSecondaryIndexes (\ s a -> s{_ccLocalSecondaryIndexes = a}) . _Default . _Map;
-- | The amount of throughput consumed on the table affected by the
-- operation.
ccTable :: Lens' ConsumedCapacity (Maybe Capacity)
ccTable = lens _ccTable (\ s a -> s{_ccTable = a});
-- | The name of the table that was affected by the operation.
ccTableName :: Lens' ConsumedCapacity (Maybe Text)
ccTableName = lens _ccTableName (\ s a -> s{_ccTableName = a});
instance FromJSON ConsumedCapacity where
parseJSON
= withObject "ConsumedCapacity"
(\ x ->
ConsumedCapacity' <$>
(x .:? "GlobalSecondaryIndexes" .!= mempty) <*>
(x .:? "CapacityUnits")
<*> (x .:? "LocalSecondaryIndexes" .!= mempty)
<*> (x .:? "Table")
<*> (x .:? "TableName"))
-- | Represents a new global secondary index to be added to an existing
-- table.
--
-- /See:/ 'createGlobalSecondaryIndexAction' smart constructor.
data CreateGlobalSecondaryIndexAction = CreateGlobalSecondaryIndexAction'
{ _cgsiaIndexName :: !Text
, _cgsiaKeySchema :: !(List1 KeySchemaElement)
, _cgsiaProjection :: !Projection
, _cgsiaProvisionedThroughput :: !ProvisionedThroughput
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateGlobalSecondaryIndexAction' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cgsiaIndexName'
--
-- * 'cgsiaKeySchema'
--
-- * 'cgsiaProjection'
--
-- * 'cgsiaProvisionedThroughput'
createGlobalSecondaryIndexAction
:: Text -- ^ 'cgsiaIndexName'
-> NonEmpty KeySchemaElement -- ^ 'cgsiaKeySchema'
-> Projection -- ^ 'cgsiaProjection'
-> ProvisionedThroughput -- ^ 'cgsiaProvisionedThroughput'
-> CreateGlobalSecondaryIndexAction
createGlobalSecondaryIndexAction pIndexName_ pKeySchema_ pProjection_ pProvisionedThroughput_ =
CreateGlobalSecondaryIndexAction'
{ _cgsiaIndexName = pIndexName_
, _cgsiaKeySchema = _List1 # pKeySchema_
, _cgsiaProjection = pProjection_
, _cgsiaProvisionedThroughput = pProvisionedThroughput_
}
-- | The name of the global secondary index to be created.
cgsiaIndexName :: Lens' CreateGlobalSecondaryIndexAction Text
cgsiaIndexName = lens _cgsiaIndexName (\ s a -> s{_cgsiaIndexName = a});
-- | The key schema for the global secondary index.
cgsiaKeySchema :: Lens' CreateGlobalSecondaryIndexAction (NonEmpty KeySchemaElement)
cgsiaKeySchema = lens _cgsiaKeySchema (\ s a -> s{_cgsiaKeySchema = a}) . _List1;
-- | Undocumented member.
cgsiaProjection :: Lens' CreateGlobalSecondaryIndexAction Projection
cgsiaProjection = lens _cgsiaProjection (\ s a -> s{_cgsiaProjection = a});
-- | Undocumented member.
cgsiaProvisionedThroughput :: Lens' CreateGlobalSecondaryIndexAction ProvisionedThroughput
cgsiaProvisionedThroughput = lens _cgsiaProvisionedThroughput (\ s a -> s{_cgsiaProvisionedThroughput = a});
instance ToJSON CreateGlobalSecondaryIndexAction
where
toJSON CreateGlobalSecondaryIndexAction'{..}
= object
(catMaybes
[Just ("IndexName" .= _cgsiaIndexName),
Just ("KeySchema" .= _cgsiaKeySchema),
Just ("Projection" .= _cgsiaProjection),
Just
("ProvisionedThroughput" .=
_cgsiaProvisionedThroughput)])
-- | Represents a global secondary index to be deleted from an existing
-- table.
--
-- /See:/ 'deleteGlobalSecondaryIndexAction' smart constructor.
newtype DeleteGlobalSecondaryIndexAction = DeleteGlobalSecondaryIndexAction'
{ _dgsiaIndexName :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteGlobalSecondaryIndexAction' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dgsiaIndexName'
deleteGlobalSecondaryIndexAction
:: Text -- ^ 'dgsiaIndexName'
-> DeleteGlobalSecondaryIndexAction
deleteGlobalSecondaryIndexAction pIndexName_ =
DeleteGlobalSecondaryIndexAction'
{ _dgsiaIndexName = pIndexName_
}
-- | The name of the global secondary index to be deleted.
dgsiaIndexName :: Lens' DeleteGlobalSecondaryIndexAction Text
dgsiaIndexName = lens _dgsiaIndexName (\ s a -> s{_dgsiaIndexName = a});
instance ToJSON DeleteGlobalSecondaryIndexAction
where
toJSON DeleteGlobalSecondaryIndexAction'{..}
= object
(catMaybes [Just ("IndexName" .= _dgsiaIndexName)])
-- | Represents a request to perform a /DeleteItem/ operation on an item.
--
-- /See:/ 'deleteRequest' smart constructor.
newtype DeleteRequest = DeleteRequest'
{ _drKey :: Map Text AttributeValue
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'drKey'
deleteRequest
:: DeleteRequest
deleteRequest =
DeleteRequest'
{ _drKey = mempty
}
-- | A map of attribute name to attribute values, representing the primary
-- key of the item to delete. All of the table\'s primary key attributes
-- must be specified, and their data types must match those of the table\'s
-- key schema.
drKey :: Lens' DeleteRequest (HashMap Text AttributeValue)
drKey = lens _drKey (\ s a -> s{_drKey = a}) . _Map;
instance FromJSON DeleteRequest where
parseJSON
= withObject "DeleteRequest"
(\ x -> DeleteRequest' <$> (x .:? "Key" .!= mempty))
instance ToJSON DeleteRequest where
toJSON DeleteRequest'{..}
= object (catMaybes [Just ("Key" .= _drKey)])
-- | Represents a condition to be compared with an attribute value. This
-- condition can be used with /DeleteItem/, /PutItem/ or /UpdateItem/
-- operations; if the comparison evaluates to true, the operation succeeds;
-- if not, the operation fails. You can use /ExpectedAttributeValue/ in one
-- of two different ways:
--
-- - Use /AttributeValueList/ to specify one or more values to compare
-- against an attribute. Use /ComparisonOperator/ to specify how you
-- want to perform the comparison. If the comparison evaluates to true,
-- then the conditional operation succeeds.
--
-- - Use /Value/ to specify a value that DynamoDB will compare against an
-- attribute. If the values match, then /ExpectedAttributeValue/
-- evaluates to true and the conditional operation succeeds.
-- Optionally, you can also set /Exists/ to false, indicating that you
-- /do not/ expect to find the attribute value in the table. In this
-- case, the conditional operation succeeds only if the comparison
-- evaluates to false.
--
-- /Value/ and /Exists/ are incompatible with /AttributeValueList/ and
-- /ComparisonOperator/. Note that if you use both sets of parameters at
-- once, DynamoDB will return a /ValidationException/ exception.
--
-- /See:/ 'expectedAttributeValue' smart constructor.
data ExpectedAttributeValue = ExpectedAttributeValue'
{ _eavAttributeValueList :: !(Maybe [AttributeValue])
, _eavExists :: !(Maybe Bool)
, _eavValue :: !(Maybe AttributeValue)
, _eavComparisonOperator :: !(Maybe ComparisonOperator)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ExpectedAttributeValue' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eavAttributeValueList'
--
-- * 'eavExists'
--
-- * 'eavValue'
--
-- * 'eavComparisonOperator'
expectedAttributeValue
:: ExpectedAttributeValue
expectedAttributeValue =
ExpectedAttributeValue'
{ _eavAttributeValueList = Nothing
, _eavExists = Nothing
, _eavValue = Nothing
, _eavComparisonOperator = Nothing
}
-- | One or more values to evaluate against the supplied attribute. The
-- number of values in the list depends on the /ComparisonOperator/ being
-- used.
--
-- For type Number, value comparisons are numeric.
--
-- String value comparisons for greater than, equals, or less than are
-- based on ASCII character code values. For example, 'a' is greater than
-- 'A', and 'a' is greater than 'B'. For a list of code values, see
-- <http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters>.
--
-- For Binary, DynamoDB treats each byte of the binary data as unsigned
-- when it compares binary values.
--
-- For information on specifying data types in JSON, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html JSON Data Format>
-- in the /Amazon DynamoDB Developer Guide/.
eavAttributeValueList :: Lens' ExpectedAttributeValue [AttributeValue]
eavAttributeValueList = lens _eavAttributeValueList (\ s a -> s{_eavAttributeValueList = a}) . _Default . _Coerce;
-- | Causes DynamoDB to evaluate the value before attempting a conditional
-- operation:
--
-- - If /Exists/ is 'true', DynamoDB will check to see if that attribute
-- value already exists in the table. If it is found, then the
-- operation succeeds. If it is not found, the operation fails with a
-- /ConditionalCheckFailedException/.
--
-- - If /Exists/ is 'false', DynamoDB assumes that the attribute value
-- does not exist in the table. If in fact the value does not exist,
-- then the assumption is valid and the operation succeeds. If the
-- value is found, despite the assumption that it does not exist, the
-- operation fails with a /ConditionalCheckFailedException/.
--
-- The default setting for /Exists/ is 'true'. If you supply a /Value/ all
-- by itself, DynamoDB assumes the attribute exists: You don\'t have to set
-- /Exists/ to 'true', because it is implied.
--
-- DynamoDB returns a /ValidationException/ if:
--
-- - /Exists/ is 'true' but there is no /Value/ to check. (You expect a
-- value to exist, but don\'t specify what that value is.)
--
-- - /Exists/ is 'false' but you also provide a /Value/. (You cannot
-- expect an attribute to have a value, while also expecting it not to
-- exist.)
--
eavExists :: Lens' ExpectedAttributeValue (Maybe Bool)
eavExists = lens _eavExists (\ s a -> s{_eavExists = a});
-- | Undocumented member.
eavValue :: Lens' ExpectedAttributeValue (Maybe AttributeValue)
eavValue = lens _eavValue (\ s a -> s{_eavValue = a});
-- | A comparator for evaluating attributes in the /AttributeValueList/. For
-- example, equals, greater than, less than, etc.
--
-- The following comparison operators are available:
--
-- 'EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN | BETWEEN'
--
-- The following are descriptions of each comparison operator.
--
-- - 'EQ' : Equal. 'EQ' is supported for all datatypes, including lists
-- and maps.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, Binary, String Set, Number Set, or Binary
-- Set. If an item contains an /AttributeValue/ element of a different
-- type than the one provided in the request, the value does not match.
-- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not equal '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'NE' : Not equal. 'NE' is supported for all datatypes, including
-- lists and maps.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type
-- String, Number, Binary, String Set, Number Set, or Binary Set. If an
-- item contains an /AttributeValue/ of a different type than the one
-- provided in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not equal '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'LE' : Less than or equal.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If an item
-- contains an /AttributeValue/ element of a different type than the
-- one provided in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'LT' : Less than.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type
-- String, Number, or Binary (not a set type). If an item contains an
-- /AttributeValue/ element of a different type than the one provided
-- in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'GE' : Greater than or equal.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If an item
-- contains an /AttributeValue/ element of a different type than the
-- one provided in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'GT' : Greater than.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If an item
-- contains an /AttributeValue/ element of a different type than the
-- one provided in the request, the value does not match. For example,
-- '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also,
-- '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'.
--
-- - 'NOT_NULL' : The attribute exists. 'NOT_NULL' is supported for all
-- datatypes, including lists and maps.
--
-- This operator tests for the existence of an attribute, not its data
-- type. If the data type of attribute \"'a'\" is null, and you
-- evaluate it using 'NOT_NULL', the result is a Boolean /true/. This
-- result is because the attribute \"'a'\" exists; its data type is not
-- relevant to the 'NOT_NULL' comparison operator.
--
-- - 'NULL' : The attribute does not exist. 'NULL' is supported for all
-- datatypes, including lists and maps.
--
-- This operator tests for the nonexistence of an attribute, not its
-- data type. If the data type of attribute \"'a'\" is null, and you
-- evaluate it using 'NULL', the result is a Boolean /false/. This is
-- because the attribute \"'a'\" exists; its data type is not relevant
-- to the 'NULL' comparison operator.
--
-- - 'CONTAINS' : Checks for a subsequence, or value in a set.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If the target
-- attribute of the comparison is of type String, then the operator
-- checks for a substring match. If the target attribute of the
-- comparison is of type Binary, then the operator looks for a
-- subsequence of the target that matches the input. If the target
-- attribute of the comparison is a set (\"'SS'\", \"'NS'\", or
-- \"'BS'\"), then the operator evaluates to true if it finds an exact
-- match with any member of the set.
--
-- CONTAINS is supported for lists: When evaluating \"'a CONTAINS b'\",
-- \"'a'\" can be a list; however, \"'b'\" cannot be a set, a map, or a
-- list.
--
-- - 'NOT_CONTAINS' : Checks for absence of a subsequence, or absence of
-- a value in a set.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ element
-- of type String, Number, or Binary (not a set type). If the target
-- attribute of the comparison is a String, then the operator checks
-- for the absence of a substring match. If the target attribute of the
-- comparison is Binary, then the operator checks for the absence of a
-- subsequence of the target that matches the input. If the target
-- attribute of the comparison is a set (\"'SS'\", \"'NS'\", or
-- \"'BS'\"), then the operator evaluates to true if it /does not/ find
-- an exact match with any member of the set.
--
-- NOT_CONTAINS is supported for lists: When evaluating
-- \"'a NOT CONTAINS b'\", \"'a'\" can be a list; however, \"'b'\"
-- cannot be a set, a map, or a list.
--
-- - 'BEGINS_WITH' : Checks for a prefix.
--
-- /AttributeValueList/ can contain only one /AttributeValue/ of type
-- String or Binary (not a Number or a set type). The target attribute
-- of the comparison must be of type String or Binary (not a Number or
-- a set type).
--
-- - 'IN' : Checks for matching elements within two sets.
--
-- /AttributeValueList/ can contain one or more /AttributeValue/
-- elements of type String, Number, or Binary (not a set type). These
-- attributes are compared against an existing set type attribute of an
-- item. If any elements of the input set are present in the item
-- attribute, the expression evaluates to true.
--
-- - 'BETWEEN' : Greater than or equal to the first value, and less than
-- or equal to the second value.
--
-- /AttributeValueList/ must contain two /AttributeValue/ elements of
-- the same type, either String, Number, or Binary (not a set type). A
-- target attribute matches if the target value is greater than, or
-- equal to, the first element and less than, or equal to, the second
-- element. If an item contains an /AttributeValue/ element of a
-- different type than the one provided in the request, the value does
-- not match. For example, '{\"S\":\"6\"}' does not compare to
-- '{\"N\":\"6\"}'. Also, '{\"N\":\"6\"}' does not compare to
-- '{\"NS\":[\"6\", \"2\", \"1\"]}'
--
eavComparisonOperator :: Lens' ExpectedAttributeValue (Maybe ComparisonOperator)
eavComparisonOperator = lens _eavComparisonOperator (\ s a -> s{_eavComparisonOperator = a});
instance ToJSON ExpectedAttributeValue where
toJSON ExpectedAttributeValue'{..}
= object
(catMaybes
[("AttributeValueList" .=) <$>
_eavAttributeValueList,
("Exists" .=) <$> _eavExists,
("Value" .=) <$> _eavValue,
("ComparisonOperator" .=) <$>
_eavComparisonOperator])
-- | Represents the properties of a global secondary index.
--
-- /See:/ 'globalSecondaryIndex' smart constructor.
data GlobalSecondaryIndex = GlobalSecondaryIndex'
{ _gsiIndexName :: !Text
, _gsiKeySchema :: !(List1 KeySchemaElement)
, _gsiProjection :: !Projection
, _gsiProvisionedThroughput :: !ProvisionedThroughput
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GlobalSecondaryIndex' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gsiIndexName'
--
-- * 'gsiKeySchema'
--
-- * 'gsiProjection'
--
-- * 'gsiProvisionedThroughput'
globalSecondaryIndex
:: Text -- ^ 'gsiIndexName'
-> NonEmpty KeySchemaElement -- ^ 'gsiKeySchema'
-> Projection -- ^ 'gsiProjection'
-> ProvisionedThroughput -- ^ 'gsiProvisionedThroughput'
-> GlobalSecondaryIndex
globalSecondaryIndex pIndexName_ pKeySchema_ pProjection_ pProvisionedThroughput_ =
GlobalSecondaryIndex'
{ _gsiIndexName = pIndexName_
, _gsiKeySchema = _List1 # pKeySchema_
, _gsiProjection = pProjection_
, _gsiProvisionedThroughput = pProvisionedThroughput_
}
-- | The name of the global secondary index. The name must be unique among
-- all other indexes on this table.
gsiIndexName :: Lens' GlobalSecondaryIndex Text
gsiIndexName = lens _gsiIndexName (\ s a -> s{_gsiIndexName = a});
-- | The complete key schema for a global secondary index, which consists of
-- one or more pairs of attribute names and key types ('HASH' or 'RANGE').
gsiKeySchema :: Lens' GlobalSecondaryIndex (NonEmpty KeySchemaElement)
gsiKeySchema = lens _gsiKeySchema (\ s a -> s{_gsiKeySchema = a}) . _List1;
-- | Undocumented member.
gsiProjection :: Lens' GlobalSecondaryIndex Projection
gsiProjection = lens _gsiProjection (\ s a -> s{_gsiProjection = a});
-- | Undocumented member.
gsiProvisionedThroughput :: Lens' GlobalSecondaryIndex ProvisionedThroughput
gsiProvisionedThroughput = lens _gsiProvisionedThroughput (\ s a -> s{_gsiProvisionedThroughput = a});
instance ToJSON GlobalSecondaryIndex where
toJSON GlobalSecondaryIndex'{..}
= object
(catMaybes
[Just ("IndexName" .= _gsiIndexName),
Just ("KeySchema" .= _gsiKeySchema),
Just ("Projection" .= _gsiProjection),
Just
("ProvisionedThroughput" .=
_gsiProvisionedThroughput)])
-- | Represents the properties of a global secondary index.
--
-- /See:/ 'globalSecondaryIndexDescription' smart constructor.
data GlobalSecondaryIndexDescription = GlobalSecondaryIndexDescription'
{ _gsidBackfilling :: !(Maybe Bool)
, _gsidIndexSizeBytes :: !(Maybe Integer)
, _gsidIndexStatus :: !(Maybe IndexStatus)
, _gsidProvisionedThroughput :: !(Maybe ProvisionedThroughputDescription)
, _gsidIndexARN :: !(Maybe Text)
, _gsidKeySchema :: !(Maybe (List1 KeySchemaElement))
, _gsidProjection :: !(Maybe Projection)
, _gsidItemCount :: !(Maybe Integer)
, _gsidIndexName :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GlobalSecondaryIndexDescription' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gsidBackfilling'
--
-- * 'gsidIndexSizeBytes'
--
-- * 'gsidIndexStatus'
--
-- * 'gsidProvisionedThroughput'
--
-- * 'gsidIndexARN'
--
-- * 'gsidKeySchema'
--
-- * 'gsidProjection'
--
-- * 'gsidItemCount'
--
-- * 'gsidIndexName'
globalSecondaryIndexDescription
:: GlobalSecondaryIndexDescription
globalSecondaryIndexDescription =
GlobalSecondaryIndexDescription'
{ _gsidBackfilling = Nothing
, _gsidIndexSizeBytes = Nothing
, _gsidIndexStatus = Nothing
, _gsidProvisionedThroughput = Nothing
, _gsidIndexARN = Nothing
, _gsidKeySchema = Nothing
, _gsidProjection = Nothing
, _gsidItemCount = Nothing
, _gsidIndexName = Nothing
}
-- | Indicates whether the index is currently backfilling. /Backfilling/ is
-- the process of reading items from the table and determining whether they
-- can be added to the index. (Not all items will qualify: For example, a
-- hash key attribute cannot have any duplicates.) If an item can be added
-- to the index, DynamoDB will do so. After all items have been processed,
-- the backfilling operation is complete and /Backfilling/ is false.
--
-- For indexes that were created during a /CreateTable/ operation, the
-- /Backfilling/ attribute does not appear in the /DescribeTable/ output.
gsidBackfilling :: Lens' GlobalSecondaryIndexDescription (Maybe Bool)
gsidBackfilling = lens _gsidBackfilling (\ s a -> s{_gsidBackfilling = a});
-- | The total size of the specified index, in bytes. DynamoDB updates this
-- value approximately every six hours. Recent changes might not be
-- reflected in this value.
gsidIndexSizeBytes :: Lens' GlobalSecondaryIndexDescription (Maybe Integer)
gsidIndexSizeBytes = lens _gsidIndexSizeBytes (\ s a -> s{_gsidIndexSizeBytes = a});
-- | The current state of the global secondary index:
--
-- - /CREATING/ - The index is being created.
--
-- - /UPDATING/ - The index is being updated.
--
-- - /DELETING/ - The index is being deleted.
--
-- - /ACTIVE/ - The index is ready for use.
--
gsidIndexStatus :: Lens' GlobalSecondaryIndexDescription (Maybe IndexStatus)
gsidIndexStatus = lens _gsidIndexStatus (\ s a -> s{_gsidIndexStatus = a});
-- | Undocumented member.
gsidProvisionedThroughput :: Lens' GlobalSecondaryIndexDescription (Maybe ProvisionedThroughputDescription)
gsidProvisionedThroughput = lens _gsidProvisionedThroughput (\ s a -> s{_gsidProvisionedThroughput = a});
-- | The Amazon Resource Name (ARN) that uniquely identifies the index.
gsidIndexARN :: Lens' GlobalSecondaryIndexDescription (Maybe Text)
gsidIndexARN = lens _gsidIndexARN (\ s a -> s{_gsidIndexARN = a});
-- | The complete key schema for the global secondary index, consisting of
-- one or more pairs of attribute names and key types ('HASH' or 'RANGE').
gsidKeySchema :: Lens' GlobalSecondaryIndexDescription (Maybe (NonEmpty KeySchemaElement))
gsidKeySchema = lens _gsidKeySchema (\ s a -> s{_gsidKeySchema = a}) . mapping _List1;
-- | Undocumented member.
gsidProjection :: Lens' GlobalSecondaryIndexDescription (Maybe Projection)
gsidProjection = lens _gsidProjection (\ s a -> s{_gsidProjection = a});
-- | The number of items in the specified index. DynamoDB updates this value
-- approximately every six hours. Recent changes might not be reflected in
-- this value.
gsidItemCount :: Lens' GlobalSecondaryIndexDescription (Maybe Integer)
gsidItemCount = lens _gsidItemCount (\ s a -> s{_gsidItemCount = a});
-- | The name of the global secondary index.
gsidIndexName :: Lens' GlobalSecondaryIndexDescription (Maybe Text)
gsidIndexName = lens _gsidIndexName (\ s a -> s{_gsidIndexName = a});
instance FromJSON GlobalSecondaryIndexDescription
where
parseJSON
= withObject "GlobalSecondaryIndexDescription"
(\ x ->
GlobalSecondaryIndexDescription' <$>
(x .:? "Backfilling") <*> (x .:? "IndexSizeBytes")
<*> (x .:? "IndexStatus")
<*> (x .:? "ProvisionedThroughput")
<*> (x .:? "IndexArn")
<*> (x .:? "KeySchema")
<*> (x .:? "Projection")
<*> (x .:? "ItemCount")
<*> (x .:? "IndexName"))
-- | Represents one of the following:
--
-- - A new global secondary index to be added to an existing table.
--
-- - New provisioned throughput parameters for an existing global
-- secondary index.
--
-- - An existing global secondary index to be removed from an existing
-- table.
--
--
-- /See:/ 'globalSecondaryIndexUpdate' smart constructor.
data GlobalSecondaryIndexUpdate = GlobalSecondaryIndexUpdate'
{ _gsiuCreate :: !(Maybe CreateGlobalSecondaryIndexAction)
, _gsiuDelete :: !(Maybe DeleteGlobalSecondaryIndexAction)
, _gsiuUpdate :: !(Maybe UpdateGlobalSecondaryIndexAction)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GlobalSecondaryIndexUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gsiuCreate'
--
-- * 'gsiuDelete'
--
-- * 'gsiuUpdate'
globalSecondaryIndexUpdate
:: GlobalSecondaryIndexUpdate
globalSecondaryIndexUpdate =
GlobalSecondaryIndexUpdate'
{ _gsiuCreate = Nothing
, _gsiuDelete = Nothing
, _gsiuUpdate = Nothing
}
-- | The parameters required for creating a global secondary index on an
-- existing table:
--
-- - 'IndexName '
--
-- - 'KeySchema '
--
-- - 'AttributeDefinitions '
--
-- - 'Projection '
--
-- - 'ProvisionedThroughput '
--
gsiuCreate :: Lens' GlobalSecondaryIndexUpdate (Maybe CreateGlobalSecondaryIndexAction)
gsiuCreate = lens _gsiuCreate (\ s a -> s{_gsiuCreate = a});
-- | The name of an existing global secondary index to be removed.
gsiuDelete :: Lens' GlobalSecondaryIndexUpdate (Maybe DeleteGlobalSecondaryIndexAction)
gsiuDelete = lens _gsiuDelete (\ s a -> s{_gsiuDelete = a});
-- | The name of an existing global secondary index, along with new
-- provisioned throughput settings to be applied to that index.
gsiuUpdate :: Lens' GlobalSecondaryIndexUpdate (Maybe UpdateGlobalSecondaryIndexAction)
gsiuUpdate = lens _gsiuUpdate (\ s a -> s{_gsiuUpdate = a});
instance ToJSON GlobalSecondaryIndexUpdate where
toJSON GlobalSecondaryIndexUpdate'{..}
= object
(catMaybes
[("Create" .=) <$> _gsiuCreate,
("Delete" .=) <$> _gsiuDelete,
("Update" .=) <$> _gsiuUpdate])
-- | Information about item collections, if any, that were affected by the
-- operation. /ItemCollectionMetrics/ is only returned if the request asked
-- for it. If the table does not have any local secondary indexes, this
-- information is not returned in the response.
--
-- /See:/ 'itemCollectionMetrics' smart constructor.
data ItemCollectionMetrics = ItemCollectionMetrics'
{ _icmItemCollectionKey :: !(Maybe (Map Text AttributeValue))
, _icmSizeEstimateRangeGB :: !(Maybe [Double])
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ItemCollectionMetrics' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'icmItemCollectionKey'
--
-- * 'icmSizeEstimateRangeGB'
itemCollectionMetrics
:: ItemCollectionMetrics
itemCollectionMetrics =
ItemCollectionMetrics'
{ _icmItemCollectionKey = Nothing
, _icmSizeEstimateRangeGB = Nothing
}
-- | The hash key value of the item collection. This value is the same as the
-- hash key of the item.
icmItemCollectionKey :: Lens' ItemCollectionMetrics (HashMap Text AttributeValue)
icmItemCollectionKey = lens _icmItemCollectionKey (\ s a -> s{_icmItemCollectionKey = a}) . _Default . _Map;
-- | An estimate of item collection size, in gigabytes. This value is a
-- two-element array containing a lower bound and an upper bound for the
-- estimate. The estimate includes the size of all the items in the table,
-- plus the size of all attributes projected into all of the local
-- secondary indexes on that table. Use this estimate to measure whether a
-- local secondary index is approaching its size limit.
--
-- The estimate is subject to change over time; therefore, do not rely on
-- the precision or accuracy of the estimate.
icmSizeEstimateRangeGB :: Lens' ItemCollectionMetrics [Double]
icmSizeEstimateRangeGB = lens _icmSizeEstimateRangeGB (\ s a -> s{_icmSizeEstimateRangeGB = a}) . _Default . _Coerce;
instance FromJSON ItemCollectionMetrics where
parseJSON
= withObject "ItemCollectionMetrics"
(\ x ->
ItemCollectionMetrics' <$>
(x .:? "ItemCollectionKey" .!= mempty) <*>
(x .:? "SizeEstimateRangeGB" .!= mempty))
-- | Represents /a single element/ of a key schema. A key schema specifies
-- the attributes that make up the primary key of a table, or the key
-- attributes of an index.
--
-- A /KeySchemaElement/ represents exactly one attribute of the primary
-- key. For example, a hash type primary key would be represented by one
-- /KeySchemaElement/. A hash-and-range type primary key would require one
-- /KeySchemaElement/ for the hash attribute, and another
-- /KeySchemaElement/ for the range attribute.
--
-- /See:/ 'keySchemaElement' smart constructor.
data KeySchemaElement = KeySchemaElement'
{ _kseAttributeName :: !Text
, _kseKeyType :: !KeyType
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'KeySchemaElement' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'kseAttributeName'
--
-- * 'kseKeyType'
keySchemaElement
:: Text -- ^ 'kseAttributeName'
-> KeyType -- ^ 'kseKeyType'
-> KeySchemaElement
keySchemaElement pAttributeName_ pKeyType_ =
KeySchemaElement'
{ _kseAttributeName = pAttributeName_
, _kseKeyType = pKeyType_
}
-- | The name of a key attribute.
kseAttributeName :: Lens' KeySchemaElement Text
kseAttributeName = lens _kseAttributeName (\ s a -> s{_kseAttributeName = a});
-- | The attribute data, consisting of the data type and the attribute value
-- itself.
kseKeyType :: Lens' KeySchemaElement KeyType
kseKeyType = lens _kseKeyType (\ s a -> s{_kseKeyType = a});
instance FromJSON KeySchemaElement where
parseJSON
= withObject "KeySchemaElement"
(\ x ->
KeySchemaElement' <$>
(x .: "AttributeName") <*> (x .: "KeyType"))
instance ToJSON KeySchemaElement where
toJSON KeySchemaElement'{..}
= object
(catMaybes
[Just ("AttributeName" .= _kseAttributeName),
Just ("KeyType" .= _kseKeyType)])
-- | Represents a set of primary keys and, for each key, the attributes to
-- retrieve from the table.
--
-- For each primary key, you must provide /all/ of the key attributes. For
-- example, with a hash type primary key, you only need to provide the hash
-- attribute. For a hash-and-range type primary key, you must provide
-- /both/ the hash attribute and the range attribute.
--
-- /See:/ 'keysAndAttributes' smart constructor.
data KeysAndAttributes = KeysAndAttributes'
{ _kaaProjectionExpression :: !(Maybe Text)
, _kaaAttributesToGet :: !(Maybe (List1 Text))
, _kaaExpressionAttributeNames :: !(Maybe (Map Text Text))
, _kaaConsistentRead :: !(Maybe Bool)
, _kaaKeys :: !(List1 (Map Text AttributeValue))
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'KeysAndAttributes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'kaaProjectionExpression'
--
-- * 'kaaAttributesToGet'
--
-- * 'kaaExpressionAttributeNames'
--
-- * 'kaaConsistentRead'
--
-- * 'kaaKeys'
keysAndAttributes
:: NonEmpty (HashMap Text AttributeValue) -- ^ 'kaaKeys'
-> KeysAndAttributes
keysAndAttributes pKeys_ =
KeysAndAttributes'
{ _kaaProjectionExpression = Nothing
, _kaaAttributesToGet = Nothing
, _kaaExpressionAttributeNames = Nothing
, _kaaConsistentRead = Nothing
, _kaaKeys = _List1 # pKeys_
}
-- | A string that identifies one or more attributes to retrieve from the
-- table. These attributes can include scalars, sets, or elements of a JSON
-- document. The attributes in the /ProjectionExpression/ must be separated
-- by commas.
--
-- If no attribute names are specified, then all attributes will be
-- returned. If any of the requested attributes are not found, they will
-- not appear in the result.
--
-- For more information, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.AccessingItemAttributes.html Accessing Item Attributes>
-- in the /Amazon DynamoDB Developer Guide/.
--
-- /ProjectionExpression/ replaces the legacy /AttributesToGet/ parameter.
kaaProjectionExpression :: Lens' KeysAndAttributes (Maybe Text)
kaaProjectionExpression = lens _kaaProjectionExpression (\ s a -> s{_kaaProjectionExpression = a});
-- | One or more attributes to retrieve from the table or index. If no
-- attribute names are specified then all attributes will be returned. If
-- any of the specified attributes are not found, they will not appear in
-- the result.
kaaAttributesToGet :: Lens' KeysAndAttributes (Maybe (NonEmpty Text))
kaaAttributesToGet = lens _kaaAttributesToGet (\ s a -> s{_kaaAttributesToGet = a}) . mapping _List1;
-- | One or more substitution tokens for attribute names in an expression.
-- The following are some use cases for using /ExpressionAttributeNames/:
--
-- - To access an attribute whose name conflicts with a DynamoDB reserved
-- word.
--
-- - To create a placeholder for repeating occurrences of an attribute
-- name in an expression.
--
-- - To prevent special characters in an attribute name from being
-- misinterpreted in an expression.
--
-- Use the __#__ character in an expression to dereference an attribute
-- name. For example, consider the following attribute name:
--
-- - 'Percentile'
--
-- The name of this attribute conflicts with a reserved word, so it cannot
-- be used directly in an expression. (For the complete list of reserved
-- words, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html Reserved Words>
-- in the /Amazon DynamoDB Developer Guide/). To work around this, you
-- could specify the following for /ExpressionAttributeNames/:
--
-- - '{\"#P\":\"Percentile\"}'
--
-- You could then use this substitution in an expression, as in this
-- example:
--
-- - '#P = :val'
--
-- Tokens that begin with the __:__ character are /expression attribute
-- values/, which are placeholders for the actual value at runtime.
--
-- For more information on expression attribute names, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.AccessingItemAttributes.html Accessing Item Attributes>
-- in the /Amazon DynamoDB Developer Guide/.
kaaExpressionAttributeNames :: Lens' KeysAndAttributes (HashMap Text Text)
kaaExpressionAttributeNames = lens _kaaExpressionAttributeNames (\ s a -> s{_kaaExpressionAttributeNames = a}) . _Default . _Map;
-- | The consistency of a read operation. If set to 'true', then a strongly
-- consistent read is used; otherwise, an eventually consistent read is
-- used.
kaaConsistentRead :: Lens' KeysAndAttributes (Maybe Bool)
kaaConsistentRead = lens _kaaConsistentRead (\ s a -> s{_kaaConsistentRead = a});
-- | The primary key attribute values that define the items and the
-- attributes associated with the items.
kaaKeys :: Lens' KeysAndAttributes (NonEmpty (HashMap Text AttributeValue))
kaaKeys = lens _kaaKeys (\ s a -> s{_kaaKeys = a}) . _List1;
instance FromJSON KeysAndAttributes where
parseJSON
= withObject "KeysAndAttributes"
(\ x ->
KeysAndAttributes' <$>
(x .:? "ProjectionExpression") <*>
(x .:? "AttributesToGet")
<*> (x .:? "ExpressionAttributeNames" .!= mempty)
<*> (x .:? "ConsistentRead")
<*> (x .: "Keys"))
instance ToJSON KeysAndAttributes where
toJSON KeysAndAttributes'{..}
= object
(catMaybes
[("ProjectionExpression" .=) <$>
_kaaProjectionExpression,
("AttributesToGet" .=) <$> _kaaAttributesToGet,
("ExpressionAttributeNames" .=) <$>
_kaaExpressionAttributeNames,
("ConsistentRead" .=) <$> _kaaConsistentRead,
Just ("Keys" .= _kaaKeys)])
-- | Represents the properties of a local secondary index.
--
-- /See:/ 'localSecondaryIndex' smart constructor.
data LocalSecondaryIndex = LocalSecondaryIndex'
{ _lsiIndexName :: !Text
, _lsiKeySchema :: !(List1 KeySchemaElement)
, _lsiProjection :: !Projection
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LocalSecondaryIndex' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsiIndexName'
--
-- * 'lsiKeySchema'
--
-- * 'lsiProjection'
localSecondaryIndex
:: Text -- ^ 'lsiIndexName'
-> NonEmpty KeySchemaElement -- ^ 'lsiKeySchema'
-> Projection -- ^ 'lsiProjection'
-> LocalSecondaryIndex
localSecondaryIndex pIndexName_ pKeySchema_ pProjection_ =
LocalSecondaryIndex'
{ _lsiIndexName = pIndexName_
, _lsiKeySchema = _List1 # pKeySchema_
, _lsiProjection = pProjection_
}
-- | The name of the local secondary index. The name must be unique among all
-- other indexes on this table.
lsiIndexName :: Lens' LocalSecondaryIndex Text
lsiIndexName = lens _lsiIndexName (\ s a -> s{_lsiIndexName = a});
-- | The complete key schema for the local secondary index, consisting of one
-- or more pairs of attribute names and key types ('HASH' or 'RANGE').
lsiKeySchema :: Lens' LocalSecondaryIndex (NonEmpty KeySchemaElement)
lsiKeySchema = lens _lsiKeySchema (\ s a -> s{_lsiKeySchema = a}) . _List1;
-- | Undocumented member.
lsiProjection :: Lens' LocalSecondaryIndex Projection
lsiProjection = lens _lsiProjection (\ s a -> s{_lsiProjection = a});
instance ToJSON LocalSecondaryIndex where
toJSON LocalSecondaryIndex'{..}
= object
(catMaybes
[Just ("IndexName" .= _lsiIndexName),
Just ("KeySchema" .= _lsiKeySchema),
Just ("Projection" .= _lsiProjection)])
-- | Represents the properties of a local secondary index.
--
-- /See:/ 'localSecondaryIndexDescription' smart constructor.
data LocalSecondaryIndexDescription = LocalSecondaryIndexDescription'
{ _lsidIndexSizeBytes :: !(Maybe Integer)
, _lsidIndexARN :: !(Maybe Text)
, _lsidKeySchema :: !(Maybe (List1 KeySchemaElement))
, _lsidProjection :: !(Maybe Projection)
, _lsidItemCount :: !(Maybe Integer)
, _lsidIndexName :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'LocalSecondaryIndexDescription' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lsidIndexSizeBytes'
--
-- * 'lsidIndexARN'
--
-- * 'lsidKeySchema'
--
-- * 'lsidProjection'
--
-- * 'lsidItemCount'
--
-- * 'lsidIndexName'
localSecondaryIndexDescription
:: LocalSecondaryIndexDescription
localSecondaryIndexDescription =
LocalSecondaryIndexDescription'
{ _lsidIndexSizeBytes = Nothing
, _lsidIndexARN = Nothing
, _lsidKeySchema = Nothing
, _lsidProjection = Nothing
, _lsidItemCount = Nothing
, _lsidIndexName = Nothing
}
-- | The total size of the specified index, in bytes. DynamoDB updates this
-- value approximately every six hours. Recent changes might not be
-- reflected in this value.
lsidIndexSizeBytes :: Lens' LocalSecondaryIndexDescription (Maybe Integer)
lsidIndexSizeBytes = lens _lsidIndexSizeBytes (\ s a -> s{_lsidIndexSizeBytes = a});
-- | The Amazon Resource Name (ARN) that uniquely identifies the index.
lsidIndexARN :: Lens' LocalSecondaryIndexDescription (Maybe Text)
lsidIndexARN = lens _lsidIndexARN (\ s a -> s{_lsidIndexARN = a});
-- | The complete index key schema, which consists of one or more pairs of
-- attribute names and key types ('HASH' or 'RANGE').
lsidKeySchema :: Lens' LocalSecondaryIndexDescription (Maybe (NonEmpty KeySchemaElement))
lsidKeySchema = lens _lsidKeySchema (\ s a -> s{_lsidKeySchema = a}) . mapping _List1;
-- | Undocumented member.
lsidProjection :: Lens' LocalSecondaryIndexDescription (Maybe Projection)
lsidProjection = lens _lsidProjection (\ s a -> s{_lsidProjection = a});
-- | The number of items in the specified index. DynamoDB updates this value
-- approximately every six hours. Recent changes might not be reflected in
-- this value.
lsidItemCount :: Lens' LocalSecondaryIndexDescription (Maybe Integer)
lsidItemCount = lens _lsidItemCount (\ s a -> s{_lsidItemCount = a});
-- | Represents the name of the local secondary index.
lsidIndexName :: Lens' LocalSecondaryIndexDescription (Maybe Text)
lsidIndexName = lens _lsidIndexName (\ s a -> s{_lsidIndexName = a});
instance FromJSON LocalSecondaryIndexDescription
where
parseJSON
= withObject "LocalSecondaryIndexDescription"
(\ x ->
LocalSecondaryIndexDescription' <$>
(x .:? "IndexSizeBytes") <*> (x .:? "IndexArn") <*>
(x .:? "KeySchema")
<*> (x .:? "Projection")
<*> (x .:? "ItemCount")
<*> (x .:? "IndexName"))
-- | Represents attributes that are copied (projected) from the table into an
-- index. These are in addition to the primary key attributes and index key
-- attributes, which are automatically projected.
--
-- /See:/ 'projection' smart constructor.
data Projection = Projection'
{ _pProjectionType :: !(Maybe ProjectionType)
, _pNonKeyAttributes :: !(Maybe (List1 Text))
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'Projection' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pProjectionType'
--
-- * 'pNonKeyAttributes'
projection
:: Projection
projection =
Projection'
{ _pProjectionType = Nothing
, _pNonKeyAttributes = Nothing
}
-- | The set of attributes that are projected into the index:
--
-- - 'KEYS_ONLY' - Only the index and primary keys are projected into the
-- index.
--
-- - 'INCLUDE' - Only the specified table attributes are projected into
-- the index. The list of projected attributes are in
-- /NonKeyAttributes/.
--
-- - 'ALL' - All of the table attributes are projected into the index.
--
pProjectionType :: Lens' Projection (Maybe ProjectionType)
pProjectionType = lens _pProjectionType (\ s a -> s{_pProjectionType = a});
-- | Represents the non-key attribute names which will be projected into the
-- index.
--
-- For local secondary indexes, the total count of /NonKeyAttributes/
-- summed across all of the local secondary indexes, must not exceed 20. If
-- you project the same attribute into two different indexes, this counts
-- as two distinct attributes when determining the total.
pNonKeyAttributes :: Lens' Projection (Maybe (NonEmpty Text))
pNonKeyAttributes = lens _pNonKeyAttributes (\ s a -> s{_pNonKeyAttributes = a}) . mapping _List1;
instance FromJSON Projection where
parseJSON
= withObject "Projection"
(\ x ->
Projection' <$>
(x .:? "ProjectionType") <*>
(x .:? "NonKeyAttributes"))
instance ToJSON Projection where
toJSON Projection'{..}
= object
(catMaybes
[("ProjectionType" .=) <$> _pProjectionType,
("NonKeyAttributes" .=) <$> _pNonKeyAttributes])
-- | Represents the provisioned throughput settings for a specified table or
-- index. The settings can be modified using the /UpdateTable/ operation.
--
-- For current minimum and maximum provisioned throughput values, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html Limits>
-- in the /Amazon DynamoDB Developer Guide/.
--
-- /See:/ 'provisionedThroughput' smart constructor.
data ProvisionedThroughput = ProvisionedThroughput'
{ _ptReadCapacityUnits :: !Nat
, _ptWriteCapacityUnits :: !Nat
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProvisionedThroughput' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ptReadCapacityUnits'
--
-- * 'ptWriteCapacityUnits'
provisionedThroughput
:: Natural -- ^ 'ptReadCapacityUnits'
-> Natural -- ^ 'ptWriteCapacityUnits'
-> ProvisionedThroughput
provisionedThroughput pReadCapacityUnits_ pWriteCapacityUnits_ =
ProvisionedThroughput'
{ _ptReadCapacityUnits = _Nat # pReadCapacityUnits_
, _ptWriteCapacityUnits = _Nat # pWriteCapacityUnits_
}
-- | The maximum number of strongly consistent reads consumed per second
-- before DynamoDB returns a /ThrottlingException/. For more information,
-- see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ProvisionedThroughput Specifying Read and Write Requirements>
-- in the /Amazon DynamoDB Developer Guide/.
ptReadCapacityUnits :: Lens' ProvisionedThroughput Natural
ptReadCapacityUnits = lens _ptReadCapacityUnits (\ s a -> s{_ptReadCapacityUnits = a}) . _Nat;
-- | The maximum number of writes consumed per second before DynamoDB returns
-- a /ThrottlingException/. For more information, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/WorkingWithTables.html#ProvisionedThroughput Specifying Read and Write Requirements>
-- in the /Amazon DynamoDB Developer Guide/.
ptWriteCapacityUnits :: Lens' ProvisionedThroughput Natural
ptWriteCapacityUnits = lens _ptWriteCapacityUnits (\ s a -> s{_ptWriteCapacityUnits = a}) . _Nat;
instance ToJSON ProvisionedThroughput where
toJSON ProvisionedThroughput'{..}
= object
(catMaybes
[Just ("ReadCapacityUnits" .= _ptReadCapacityUnits),
Just
("WriteCapacityUnits" .= _ptWriteCapacityUnits)])
-- | Represents the provisioned throughput settings for the table, consisting
-- of read and write capacity units, along with data about increases and
-- decreases.
--
-- /See:/ 'provisionedThroughputDescription' smart constructor.
data ProvisionedThroughputDescription = ProvisionedThroughputDescription'
{ _ptdReadCapacityUnits :: !(Maybe Nat)
, _ptdLastDecreaseDateTime :: !(Maybe POSIX)
, _ptdWriteCapacityUnits :: !(Maybe Nat)
, _ptdNumberOfDecreasesToday :: !(Maybe Nat)
, _ptdLastIncreaseDateTime :: !(Maybe POSIX)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProvisionedThroughputDescription' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ptdReadCapacityUnits'
--
-- * 'ptdLastDecreaseDateTime'
--
-- * 'ptdWriteCapacityUnits'
--
-- * 'ptdNumberOfDecreasesToday'
--
-- * 'ptdLastIncreaseDateTime'
provisionedThroughputDescription
:: ProvisionedThroughputDescription
provisionedThroughputDescription =
ProvisionedThroughputDescription'
{ _ptdReadCapacityUnits = Nothing
, _ptdLastDecreaseDateTime = Nothing
, _ptdWriteCapacityUnits = Nothing
, _ptdNumberOfDecreasesToday = Nothing
, _ptdLastIncreaseDateTime = Nothing
}
-- | The maximum number of strongly consistent reads consumed per second
-- before DynamoDB returns a /ThrottlingException/. Eventually consistent
-- reads require less effort than strongly consistent reads, so a setting
-- of 50 /ReadCapacityUnits/ per second provides 100 eventually consistent
-- /ReadCapacityUnits/ per second.
ptdReadCapacityUnits :: Lens' ProvisionedThroughputDescription (Maybe Natural)
ptdReadCapacityUnits = lens _ptdReadCapacityUnits (\ s a -> s{_ptdReadCapacityUnits = a}) . mapping _Nat;
-- | The date and time of the last provisioned throughput decrease for this
-- table.
ptdLastDecreaseDateTime :: Lens' ProvisionedThroughputDescription (Maybe UTCTime)
ptdLastDecreaseDateTime = lens _ptdLastDecreaseDateTime (\ s a -> s{_ptdLastDecreaseDateTime = a}) . mapping _Time;
-- | The maximum number of writes consumed per second before DynamoDB returns
-- a /ThrottlingException/.
ptdWriteCapacityUnits :: Lens' ProvisionedThroughputDescription (Maybe Natural)
ptdWriteCapacityUnits = lens _ptdWriteCapacityUnits (\ s a -> s{_ptdWriteCapacityUnits = a}) . mapping _Nat;
-- | The number of provisioned throughput decreases for this table during
-- this UTC calendar day. For current maximums on provisioned throughput
-- decreases, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Limits.html Limits>
-- in the /Amazon DynamoDB Developer Guide/.
ptdNumberOfDecreasesToday :: Lens' ProvisionedThroughputDescription (Maybe Natural)
ptdNumberOfDecreasesToday = lens _ptdNumberOfDecreasesToday (\ s a -> s{_ptdNumberOfDecreasesToday = a}) . mapping _Nat;
-- | The date and time of the last provisioned throughput increase for this
-- table.
ptdLastIncreaseDateTime :: Lens' ProvisionedThroughputDescription (Maybe UTCTime)
ptdLastIncreaseDateTime = lens _ptdLastIncreaseDateTime (\ s a -> s{_ptdLastIncreaseDateTime = a}) . mapping _Time;
instance FromJSON ProvisionedThroughputDescription
where
parseJSON
= withObject "ProvisionedThroughputDescription"
(\ x ->
ProvisionedThroughputDescription' <$>
(x .:? "ReadCapacityUnits") <*>
(x .:? "LastDecreaseDateTime")
<*> (x .:? "WriteCapacityUnits")
<*> (x .:? "NumberOfDecreasesToday")
<*> (x .:? "LastIncreaseDateTime"))
-- | Represents a request to perform a /PutItem/ operation on an item.
--
-- /See:/ 'putRequest' smart constructor.
newtype PutRequest = PutRequest'
{ _prItem :: Map Text AttributeValue
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'PutRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prItem'
putRequest
:: PutRequest
putRequest =
PutRequest'
{ _prItem = mempty
}
-- | A map of attribute name to attribute values, representing the primary
-- key of an item to be processed by /PutItem/. All of the table\'s primary
-- key attributes must be specified, and their data types must match those
-- of the table\'s key schema. If any attributes are present in the item
-- which are part of an index key schema for the table, their types must
-- match the index key schema.
prItem :: Lens' PutRequest (HashMap Text AttributeValue)
prItem = lens _prItem (\ s a -> s{_prItem = a}) . _Map;
instance FromJSON PutRequest where
parseJSON
= withObject "PutRequest"
(\ x -> PutRequest' <$> (x .:? "Item" .!= mempty))
instance ToJSON PutRequest where
toJSON PutRequest'{..}
= object (catMaybes [Just ("Item" .= _prItem)])
-- | Represents the DynamoDB Streams configuration for a table in DynamoDB.
--
-- /See:/ 'streamSpecification' smart constructor.
data StreamSpecification = StreamSpecification'
{ _ssStreamViewType :: !(Maybe StreamViewType)
, _ssStreamEnabled :: !(Maybe Bool)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'StreamSpecification' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ssStreamViewType'
--
-- * 'ssStreamEnabled'
streamSpecification
:: StreamSpecification
streamSpecification =
StreamSpecification'
{ _ssStreamViewType = Nothing
, _ssStreamEnabled = Nothing
}
-- | The DynamoDB Streams settings for the table. These settings consist of:
--
-- - /StreamEnabled/ - Indicates whether DynamoDB Streams is enabled
-- (true) or disabled (false) on the table.
--
-- - /StreamViewType/ - When an item in the table is modified,
-- /StreamViewType/ determines what information is written to the
-- stream for this table. Valid values for /StreamViewType/ are:
--
-- - /KEYS_ONLY/ - Only the key attributes of the modified item are
-- written to the stream.
--
-- - /NEW_IMAGE/ - The entire item, as it appears after it was
-- modified, is written to the stream.
--
-- - /OLD_IMAGE/ - The entire item, as it appeared before it was
-- modified, is written to the stream.
--
-- - /NEW_AND_OLD_IMAGES/ - Both the new and the old item images of
-- the item are written to the stream.
--
ssStreamViewType :: Lens' StreamSpecification (Maybe StreamViewType)
ssStreamViewType = lens _ssStreamViewType (\ s a -> s{_ssStreamViewType = a});
-- | Indicates whether DynamoDB Streams is enabled (true) or disabled (false)
-- on the table.
ssStreamEnabled :: Lens' StreamSpecification (Maybe Bool)
ssStreamEnabled = lens _ssStreamEnabled (\ s a -> s{_ssStreamEnabled = a});
instance FromJSON StreamSpecification where
parseJSON
= withObject "StreamSpecification"
(\ x ->
StreamSpecification' <$>
(x .:? "StreamViewType") <*> (x .:? "StreamEnabled"))
instance ToJSON StreamSpecification where
toJSON StreamSpecification'{..}
= object
(catMaybes
[("StreamViewType" .=) <$> _ssStreamViewType,
("StreamEnabled" .=) <$> _ssStreamEnabled])
-- | Represents the properties of a table.
--
-- /See:/ 'tableDescription' smart constructor.
data TableDescription = TableDescription'
{ _tdTableSizeBytes :: !(Maybe Integer)
, _tdAttributeDefinitions :: !(Maybe [AttributeDefinition])
, _tdLatestStreamARN :: !(Maybe Text)
, _tdProvisionedThroughput :: !(Maybe ProvisionedThroughputDescription)
, _tdTableStatus :: !(Maybe TableStatus)
, _tdTableARN :: !(Maybe Text)
, _tdKeySchema :: !(Maybe (List1 KeySchemaElement))
, _tdGlobalSecondaryIndexes :: !(Maybe [GlobalSecondaryIndexDescription])
, _tdLatestStreamLabel :: !(Maybe Text)
, _tdLocalSecondaryIndexes :: !(Maybe [LocalSecondaryIndexDescription])
, _tdCreationDateTime :: !(Maybe POSIX)
, _tdItemCount :: !(Maybe Integer)
, _tdTableName :: !(Maybe Text)
, _tdStreamSpecification :: !(Maybe StreamSpecification)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'TableDescription' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tdTableSizeBytes'
--
-- * 'tdAttributeDefinitions'
--
-- * 'tdLatestStreamARN'
--
-- * 'tdProvisionedThroughput'
--
-- * 'tdTableStatus'
--
-- * 'tdTableARN'
--
-- * 'tdKeySchema'
--
-- * 'tdGlobalSecondaryIndexes'
--
-- * 'tdLatestStreamLabel'
--
-- * 'tdLocalSecondaryIndexes'
--
-- * 'tdCreationDateTime'
--
-- * 'tdItemCount'
--
-- * 'tdTableName'
--
-- * 'tdStreamSpecification'
tableDescription
:: TableDescription
tableDescription =
TableDescription'
{ _tdTableSizeBytes = Nothing
, _tdAttributeDefinitions = Nothing
, _tdLatestStreamARN = Nothing
, _tdProvisionedThroughput = Nothing
, _tdTableStatus = Nothing
, _tdTableARN = Nothing
, _tdKeySchema = Nothing
, _tdGlobalSecondaryIndexes = Nothing
, _tdLatestStreamLabel = Nothing
, _tdLocalSecondaryIndexes = Nothing
, _tdCreationDateTime = Nothing
, _tdItemCount = Nothing
, _tdTableName = Nothing
, _tdStreamSpecification = Nothing
}
-- | The total size of the specified table, in bytes. DynamoDB updates this
-- value approximately every six hours. Recent changes might not be
-- reflected in this value.
tdTableSizeBytes :: Lens' TableDescription (Maybe Integer)
tdTableSizeBytes = lens _tdTableSizeBytes (\ s a -> s{_tdTableSizeBytes = a});
-- | An array of /AttributeDefinition/ objects. Each of these objects
-- describes one attribute in the table and index key schema.
--
-- Each /AttributeDefinition/ object in this array is composed of:
--
-- - /AttributeName/ - The name of the attribute.
--
-- - /AttributeType/ - The data type for the attribute.
--
tdAttributeDefinitions :: Lens' TableDescription [AttributeDefinition]
tdAttributeDefinitions = lens _tdAttributeDefinitions (\ s a -> s{_tdAttributeDefinitions = a}) . _Default . _Coerce;
-- | The Amazon Resource Name (ARN) that uniquely identifies the latest
-- stream for this table.
tdLatestStreamARN :: Lens' TableDescription (Maybe Text)
tdLatestStreamARN = lens _tdLatestStreamARN (\ s a -> s{_tdLatestStreamARN = a});
-- | The provisioned throughput settings for the table, consisting of read
-- and write capacity units, along with data about increases and decreases.
tdProvisionedThroughput :: Lens' TableDescription (Maybe ProvisionedThroughputDescription)
tdProvisionedThroughput = lens _tdProvisionedThroughput (\ s a -> s{_tdProvisionedThroughput = a});
-- | The current state of the table:
--
-- - /CREATING/ - The table is being created.
--
-- - /UPDATING/ - The table is being updated.
--
-- - /DELETING/ - The table is being deleted.
--
-- - /ACTIVE/ - The table is ready for use.
--
tdTableStatus :: Lens' TableDescription (Maybe TableStatus)
tdTableStatus = lens _tdTableStatus (\ s a -> s{_tdTableStatus = a});
-- | The Amazon Resource Name (ARN) that uniquely identifies the table.
tdTableARN :: Lens' TableDescription (Maybe Text)
tdTableARN = lens _tdTableARN (\ s a -> s{_tdTableARN = a});
-- | The primary key structure for the table. Each /KeySchemaElement/
-- consists of:
--
-- - /AttributeName/ - The name of the attribute.
--
-- - /KeyType/ - The key type for the attribute. Can be either 'HASH' or
-- 'RANGE'.
--
-- For more information about primary keys, see
-- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelPrimaryKey Primary Key>
-- in the /Amazon DynamoDB Developer Guide/.
tdKeySchema :: Lens' TableDescription (Maybe (NonEmpty KeySchemaElement))
tdKeySchema = lens _tdKeySchema (\ s a -> s{_tdKeySchema = a}) . mapping _List1;
-- | The global secondary indexes, if any, on the table. Each index is scoped
-- to a given hash key value. Each element is composed of:
--
-- - /Backfilling/ - If true, then the index is currently in the
-- backfilling phase. Backfilling occurs only when a new global
-- secondary index is added to the table; it is the process by which
-- DynamoDB populates the new index with data from the table. (This
-- attribute does not appear for indexes that were created during a
-- /CreateTable/ operation.)
--
-- - /IndexName/ - The name of the global secondary index.
--
-- - /IndexSizeBytes/ - The total size of the global secondary index, in
-- bytes. DynamoDB updates this value approximately every six hours.
-- Recent changes might not be reflected in this value.
--
-- - /IndexStatus/ - The current status of the global secondary index:
--
-- - /CREATING/ - The index is being created.
--
-- - /UPDATING/ - The index is being updated.
--
-- - /DELETING/ - The index is being deleted.
--
-- - /ACTIVE/ - The index is ready for use.
--
-- - /ItemCount/ - The number of items in the global secondary index.
-- DynamoDB updates this value approximately every six hours. Recent
-- changes might not be reflected in this value.
--
-- - /KeySchema/ - Specifies the complete index key schema. The attribute
-- names in the key schema must be between 1 and 255 characters
-- (inclusive). The key schema must begin with the same hash key
-- attribute as the table.
--
-- - /Projection/ - Specifies attributes that are copied (projected) from
-- the table into the index. These are in addition to the primary key
-- attributes and index key attributes, which are automatically
-- projected. Each attribute specification is composed of:
--
-- - /ProjectionType/ - One of the following:
--
-- - 'KEYS_ONLY' - Only the index and primary keys are projected
-- into the index.
--
-- - 'INCLUDE' - Only the specified table attributes are
-- projected into the index. The list of projected attributes
-- are in /NonKeyAttributes/.
--
-- - 'ALL' - All of the table attributes are projected into the
-- index.
--
-- - /NonKeyAttributes/ - A list of one or more non-key attribute
-- names that are projected into the secondary index. The total
-- count of attributes provided in /NonKeyAttributes/, summed
-- across all of the secondary indexes, must not exceed 20. If you
-- project the same attribute into two different indexes, this
-- counts as two distinct attributes when determining the total.
--
-- - /ProvisionedThroughput/ - The provisioned throughput settings for
-- the global secondary index, consisting of read and write capacity
-- units, along with data about increases and decreases.
--
-- If the table is in the 'DELETING' state, no information about indexes
-- will be returned.
tdGlobalSecondaryIndexes :: Lens' TableDescription [GlobalSecondaryIndexDescription]
tdGlobalSecondaryIndexes = lens _tdGlobalSecondaryIndexes (\ s a -> s{_tdGlobalSecondaryIndexes = a}) . _Default . _Coerce;
-- | A timestamp, in ISO 8601 format, for this stream.
--
-- Note that /LatestStreamLabel/ is not a unique identifier for the stream,
-- because it is possible that a stream from another table might have the
-- same timestamp. However, the combination of the following three elements
-- is guaranteed to be unique:
--
-- - the AWS customer ID.
--
-- - the table name.
--
-- - the /StreamLabel/.
--
tdLatestStreamLabel :: Lens' TableDescription (Maybe Text)
tdLatestStreamLabel = lens _tdLatestStreamLabel (\ s a -> s{_tdLatestStreamLabel = a});
-- | Represents one or more local secondary indexes on the table. Each index
-- is scoped to a given hash key value. Tables with one or more local
-- secondary indexes are subject to an item collection size limit, where
-- the amount of data within a given item collection cannot exceed 10 GB.
-- Each element is composed of:
--
-- - /IndexName/ - The name of the local secondary index.
--
-- - /KeySchema/ - Specifies the complete index key schema. The attribute
-- names in the key schema must be between 1 and 255 characters
-- (inclusive). The key schema must begin with the same hash key
-- attribute as the table.
--
-- - /Projection/ - Specifies attributes that are copied (projected) from
-- the table into the index. These are in addition to the primary key
-- attributes and index key attributes, which are automatically
-- projected. Each attribute specification is composed of:
--
-- - /ProjectionType/ - One of the following:
--
-- - 'KEYS_ONLY' - Only the index and primary keys are projected
-- into the index.
--
-- - 'INCLUDE' - Only the specified table attributes are
-- projected into the index. The list of projected attributes
-- are in /NonKeyAttributes/.
--
-- - 'ALL' - All of the table attributes are projected into the
-- index.
--
-- - /NonKeyAttributes/ - A list of one or more non-key attribute
-- names that are projected into the secondary index. The total
-- count of attributes provided in /NonKeyAttributes/, summed
-- across all of the secondary indexes, must not exceed 20. If you
-- project the same attribute into two different indexes, this
-- counts as two distinct attributes when determining the total.
--
-- - /IndexSizeBytes/ - Represents the total size of the index, in bytes.
-- DynamoDB updates this value approximately every six hours. Recent
-- changes might not be reflected in this value.
--
-- - /ItemCount/ - Represents the number of items in the index. DynamoDB
-- updates this value approximately every six hours. Recent changes
-- might not be reflected in this value.
--
-- If the table is in the 'DELETING' state, no information about indexes
-- will be returned.
tdLocalSecondaryIndexes :: Lens' TableDescription [LocalSecondaryIndexDescription]
tdLocalSecondaryIndexes = lens _tdLocalSecondaryIndexes (\ s a -> s{_tdLocalSecondaryIndexes = a}) . _Default . _Coerce;
-- | The date and time when the table was created, in
-- <http://www.epochconverter.com/ UNIX epoch time> format.
tdCreationDateTime :: Lens' TableDescription (Maybe UTCTime)
tdCreationDateTime = lens _tdCreationDateTime (\ s a -> s{_tdCreationDateTime = a}) . mapping _Time;
-- | The number of items in the specified table. DynamoDB updates this value
-- approximately every six hours. Recent changes might not be reflected in
-- this value.
tdItemCount :: Lens' TableDescription (Maybe Integer)
tdItemCount = lens _tdItemCount (\ s a -> s{_tdItemCount = a});
-- | The name of the table.
tdTableName :: Lens' TableDescription (Maybe Text)
tdTableName = lens _tdTableName (\ s a -> s{_tdTableName = a});
-- | The current DynamoDB Streams configuration for the table.
tdStreamSpecification :: Lens' TableDescription (Maybe StreamSpecification)
tdStreamSpecification = lens _tdStreamSpecification (\ s a -> s{_tdStreamSpecification = a});
instance FromJSON TableDescription where
parseJSON
= withObject "TableDescription"
(\ x ->
TableDescription' <$>
(x .:? "TableSizeBytes") <*>
(x .:? "AttributeDefinitions" .!= mempty)
<*> (x .:? "LatestStreamArn")
<*> (x .:? "ProvisionedThroughput")
<*> (x .:? "TableStatus")
<*> (x .:? "TableArn")
<*> (x .:? "KeySchema")
<*> (x .:? "GlobalSecondaryIndexes" .!= mempty)
<*> (x .:? "LatestStreamLabel")
<*> (x .:? "LocalSecondaryIndexes" .!= mempty)
<*> (x .:? "CreationDateTime")
<*> (x .:? "ItemCount")
<*> (x .:? "TableName")
<*> (x .:? "StreamSpecification"))
-- | Represents the new provisioned throughput settings to be applied to a
-- global secondary index.
--
-- /See:/ 'updateGlobalSecondaryIndexAction' smart constructor.
data UpdateGlobalSecondaryIndexAction = UpdateGlobalSecondaryIndexAction'
{ _ugsiaIndexName :: !Text
, _ugsiaProvisionedThroughput :: !ProvisionedThroughput
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateGlobalSecondaryIndexAction' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ugsiaIndexName'
--
-- * 'ugsiaProvisionedThroughput'
updateGlobalSecondaryIndexAction
:: Text -- ^ 'ugsiaIndexName'
-> ProvisionedThroughput -- ^ 'ugsiaProvisionedThroughput'
-> UpdateGlobalSecondaryIndexAction
updateGlobalSecondaryIndexAction pIndexName_ pProvisionedThroughput_ =
UpdateGlobalSecondaryIndexAction'
{ _ugsiaIndexName = pIndexName_
, _ugsiaProvisionedThroughput = pProvisionedThroughput_
}
-- | The name of the global secondary index to be updated.
ugsiaIndexName :: Lens' UpdateGlobalSecondaryIndexAction Text
ugsiaIndexName = lens _ugsiaIndexName (\ s a -> s{_ugsiaIndexName = a});
-- | Undocumented member.
ugsiaProvisionedThroughput :: Lens' UpdateGlobalSecondaryIndexAction ProvisionedThroughput
ugsiaProvisionedThroughput = lens _ugsiaProvisionedThroughput (\ s a -> s{_ugsiaProvisionedThroughput = a});
instance ToJSON UpdateGlobalSecondaryIndexAction
where
toJSON UpdateGlobalSecondaryIndexAction'{..}
= object
(catMaybes
[Just ("IndexName" .= _ugsiaIndexName),
Just
("ProvisionedThroughput" .=
_ugsiaProvisionedThroughput)])
-- | Represents an operation to perform - either /DeleteItem/ or /PutItem/.
-- You can only request one of these operations, not both, in a single
-- /WriteRequest/. If you do need to perform both of these operations, you
-- will need to provide two separate /WriteRequest/ objects.
--
-- /See:/ 'writeRequest' smart constructor.
data WriteRequest = WriteRequest'
{ _wrDeleteRequest :: !(Maybe DeleteRequest)
, _wrPutRequest :: !(Maybe PutRequest)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'WriteRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'wrDeleteRequest'
--
-- * 'wrPutRequest'
writeRequest
:: WriteRequest
writeRequest =
WriteRequest'
{ _wrDeleteRequest = Nothing
, _wrPutRequest = Nothing
}
-- | A request to perform a /DeleteItem/ operation.
wrDeleteRequest :: Lens' WriteRequest (Maybe DeleteRequest)
wrDeleteRequest = lens _wrDeleteRequest (\ s a -> s{_wrDeleteRequest = a});
-- | A request to perform a /PutItem/ operation.
wrPutRequest :: Lens' WriteRequest (Maybe PutRequest)
wrPutRequest = lens _wrPutRequest (\ s a -> s{_wrPutRequest = a});
instance FromJSON WriteRequest where
parseJSON
= withObject "WriteRequest"
(\ x ->
WriteRequest' <$>
(x .:? "DeleteRequest") <*> (x .:? "PutRequest"))
instance ToJSON WriteRequest where
toJSON WriteRequest'{..}
= object
(catMaybes
[("DeleteRequest" .=) <$> _wrDeleteRequest,
("PutRequest" .=) <$> _wrPutRequest])
| fmapfmapfmap/amazonka | amazonka-dynamodb/gen/Network/AWS/DynamoDB/Types/Product.hs | mpl-2.0 | 97,603 | 0 | 24 | 20,741 | 11,539 | 6,980 | 4,559 | 1,021 | 1 |
{-# LANGUAGE RecordWildCards, ViewPatterns, TupleSections, PatternGuards #-}
module General.Log(
Log, logCreate, logNone, logAddMessage, logAddEntry,
Summary(..), logSummary,
) where
import Control.Concurrent.Extra
import Control.Applicative
import System.IO
import Data.Time.Calendar
import Data.Time.Clock
import Numeric.Extra
import Control.Monad.Extra
import qualified Data.Set as Set
import qualified Data.Map.Strict as Map
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Monoid
import General.Util
import Data.Maybe
import Data.List
import Data.IORef
import Prelude
data Log = Log
{logOutput :: Maybe (Var Handle)
,logCurrent :: IORef (Map.Map Day SummaryI)
,logInteresting :: String -> Bool
}
showTime :: UTCTime -> String
showTime = showUTCTime "%Y-%m-%dT%H:%M:%S%Q"
logNone :: IO Log
logNone = do ref <- newIORef Map.empty; return $ Log Nothing ref (const False)
logCreate :: Either Handle FilePath -> (String -> Bool) -> IO Log
logCreate store interesting = do
(h, old) <- case store of
Left h -> return (h, Map.empty)
Right file -> do
mp <- withFile file ReadMode $ \h -> do
src <- LBS.hGetContents h
let xs = mapMaybe (parseLogLine interesting) $ LBS.lines $ src
return $! foldl' (\mp (k,v) -> Map.alter (Just . maybe v (<> v)) k mp) Map.empty xs
(,mp) <$> openFile file AppendMode
hSetBuffering h LineBuffering
var <- newVar h
ref <- newIORef old
return $ Log (Just var) ref interesting
logAddMessage :: Log -> String -> IO ()
logAddMessage Log{..} msg = do
time <- showTime <$> getCurrentTime
whenJust logOutput $ \var -> withVar var $ \h ->
hPutStrLn h $ time ++ " - " ++ msg
logAddEntry :: Log -> String -> String -> Double -> Maybe String -> IO ()
logAddEntry Log{..} user question taken err = do
time <- getCurrentTime
let add v = atomicModifyIORef logCurrent $ \mp -> (Map.alter (Just . maybe v (<> v)) (utctDay time) mp, ())
if logInteresting question then
add $ SummaryI (Set.singleton user) 1 taken (toAverage taken) (if isJust err then 1 else 0)
else if isJust err then
add mempty{iErrors=1}
else
return ()
whenJust logOutput $ \var -> withVar var $ \h ->
hPutStrLn h $ unwords $ [showTime time, user, showDP 3 taken, question] ++
maybeToList (fmap ((++) "ERROR: " . unwords . words) err)
-- Summary collapsed
data Summary = Summary
{summaryDate :: Day
,summaryUsers :: Int
,summaryUses :: Int
,summarySlowest :: Double
,summaryAverage :: Double
,summaryErrors :: Int
}
-- Summary accumulating
data SummaryI = SummaryI
{iUsers :: !(Set.Set String) -- number of distinct users
,iUses :: !Int -- number of uses
,iSlowest :: !Double -- slowest result
,iAverage :: !(Average Double) -- average result
,iErrors :: !Int -- number of errors
}
instance Monoid SummaryI where
mempty = SummaryI Set.empty 0 0 (toAverage 0) 0
mappend (SummaryI x1 x2 x3 x4 x5) (SummaryI y1 y2 y3 y4 y5) =
SummaryI (f x1 y1) (x2+y2) (max x3 y3) (x4 <> y4) (x5+y5)
-- more efficient union for the very common case of a single element
where f x y | Set.size x == 1 = Set.insert (head $ Set.toList x) y
| Set.size y == 1 = Set.insert (head $ Set.toList y) x
| otherwise = Set.union x y
summarize :: Day -> SummaryI -> Summary
summarize date SummaryI{..} = Summary date (Set.size iUsers) iUses iSlowest (fromAverage iAverage) iErrors
parseLogLine :: (String -> Bool) -> LBS.ByteString -> Maybe (Day, SummaryI)
parseLogLine interesting (LBS.words -> time:user:dur:query:err)
| user /= LBS.pack "-"
, Just [a, b, c] <- fmap (map fst) $ mapM LBS.readInt $ LBS.split '-' $ LBS.takeWhile (/= 'T') time
= Just (fromGregorian (fromIntegral a) b c, SummaryI
(if use then Set.singleton $ LBS.unpack user else Set.empty)
(if use then 1 else 0)
(if use then dur2 else 0)
(toAverage $ if use then dur2 else 0)
(if [LBS.pack "ERROR:"] `isPrefixOf` err then 1 else 0))
where use = interesting $ LBS.unpack query
dur2 = let s = LBS.unpack dur in fromMaybe 0 $
if '.' `elem` s then readMaybe s else (/ 1000) . intToDouble <$> readMaybe s
parseLogLine _ _ = Nothing
logSummary :: Log -> IO [Summary]
logSummary Log{..} = map (uncurry summarize) . Map.toAscList <$> readIORef logCurrent
| BartAdv/hoogle | src/General/Log.hs | bsd-3-clause | 4,539 | 0 | 26 | 1,133 | 1,679 | 877 | 802 | 107 | 7 |
{-# LANGUAGE BangPatterns #-}
-- A simple wc-like program using Data.Iteratee.
-- Demonstrates a few different ways of composing iteratees.
module Main where
import Prelude as P
import Data.Iteratee
import Data.Iteratee.Char as C
import qualified Data.Iteratee as I
import qualified Data.ByteString.Char8 as BC
import Data.Word
import Data.Char
import Data.ListLike as LL
import System.Environment
-- | An iteratee to calculate the number of characters in a stream.
-- Very basic, assumes ASCII, not particularly efficient.
numChars :: (Monad m, ListLike s el) => I.Iteratee s m Int
numChars = I.length
-- | An iteratee to calculate the number of words in a stream of Word8's.
-- this operates on a Word8 stream in order to use ByteStrings.
--
-- This function converts the stream of Word8s into a stream of words,
-- then counts the words with Data.Iteratee.length
-- This is the equivalent of "length . BC.words".
numWords :: Monad m => I.Iteratee BC.ByteString m Int
numWords = I.joinI $ enumWordsBS I.length
-- | Count the number of lines, in the same manner as numWords.
numLines :: Monad m => I.Iteratee BC.ByteString m Int
numLines = I.joinI $ enumLinesBS I.length
-- | A much more efficient numLines using the foldl' iteratee.
-- Rather than converting a stream, this simply counts newline characters.
numLines2 :: Monad m => I.Iteratee BC.ByteString m Int
numLines2 = I.foldl' step 0
where
step !acc el = if el == (fromIntegral $ ord '\n') then acc + 1 else acc
-- | Combine multiple iteratees into a single unit using "enumPair".
-- The iteratees combined with enumPair are run in parallel.
-- Any number of iteratees can be joined with multiple enumPair's.
twoIter :: Monad m => I.Iteratee BC.ByteString m (Int, Int)
twoIter = numLines2 `I.zip` numChars
main = do
f:_ <- getArgs
words <- fileDriverVBuf 65536 twoIter f
print words
| iteloo/tsuru-sample | iteratee-0.8.9.6/Examples/word.hs | bsd-3-clause | 1,862 | 0 | 11 | 324 | 346 | 194 | 152 | 26 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import GHC.Conc
import Control.Exception
-- Create trivial invariants using a single TVar
main = do
putStr "\nStarting\n"
x <- atomically ( newTVar 42 )
putStr "\nAdding trivially true invariant (no TVar access)\n"
atomically ( alwaysSucceeds ( return 1 ) )
putStr "\nAdding trivially true invariant (no TVar access)\n"
atomically ( always ( return True ) )
putStr "\nAdding a trivially true invariant (TVar access)\n"
atomically ( alwaysSucceeds ( readTVar x ) )
putStr "\nAdding an invariant that's false when attempted to be added\n"
Control.Exception.catch (atomically ( do writeTVar x 100
alwaysSucceeds ( do v <- readTVar x
if (v == 100) then throw (ErrorCall "URK") else return () )
writeTVar x 0 ) )
(\(e::SomeException) -> putStr ("Caught: " ++ (show e) ++ "\n"))
putStr "\nWriting to a TVar watched by a trivially true invariant\n"
atomically ( writeTVar x 17 )
putStr "\nAdding a second trivially true invariant (same TVar access)\n"
atomically ( alwaysSucceeds ( readTVar x ) )
putStr "\nWriting to a TVar watched by both trivially true invariants\n"
atomically ( writeTVar x 18 )
putStr "\nAdding a trivially false invariant (no TVar access)\n"
Control.Exception.catch (atomically ( alwaysSucceeds ( throw (ErrorCall "Exn raised in invariant") ) ) )
(\(e::SomeException) -> putStr ("Caught: " ++ (show e) ++ "\n"))
putStr "\nAdding a trivially false invariant (no TVar access)\n"
Control.Exception.catch (atomically ( always ( throw (ErrorCall "Exn raised in invariant") ) ) )
(\(e::SomeException) -> putStr ("Caught: " ++ (show e) ++ "\n"))
putStr "\nAdding a trivially false invariant (no TVar access)\n"
Control.Exception.catch (atomically ( always ( return False ) ) )
(\(e::SomeException) -> putStr ("Caught: " ++ (show e) ++ "\n"))
putStr "\nAdding a trivially false invariant (with TVar access)\n"
Control.Exception.catch (atomically (
alwaysSucceeds ( do t <- readTVar x
throw (ErrorCall "Exn raised in invariant") ) ) )
(\(e::SomeException) -> putStr ("Caught: " ++ (show e) ++ "\n"))
putStr "\nAdding a third invariant true if TVar != 42\n"
atomically ( alwaysSucceeds ( do t <- readTVar x
if (t == 42) then throw (ErrorCall "Exn raised in invariant") else return () ) )
putStr "\nViolating third invariant by setting TVar to 42\n"
Control.Exception.catch (atomically ( writeTVar x 42 ) )
(\(e::SomeException) -> putStr ("Caught: " ++ (show e) ++ "\n"))
putStr "\nChecking final TVar contents\n"
t <- atomically ( readTVar x )
putStr ("Final value = " ++ (show t) ++ "\n")
putStr "\nDone\n"
| gridaphobe/packages-stm | tests/stm060.hs | bsd-3-clause | 2,923 | 0 | 20 | 756 | 798 | 378 | 420 | 49 | 3 |
{-# LANGUAGE ParallelListComp, BangPatterns #-}
import Solver
import Graphics.Gloss
import System.Environment
import Data.Maybe
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as VU
main :: IO ()
main
= do args <- getArgs
mainWithArgs args
mainWithArgs :: [String] -> IO ()
mainWithArgs [solverName,depthStr]
= let -- The solver we're using to calculate the acclerations.
solver = fromMaybe (error $ unlines
[ "unknown solver " ++ show solverName
, "choose one of " ++ (show $ map fst solvers) ])
$ lookup solverName solvers
depth = read depthStr
in mainGloss depth solver 400
mainWithArgs [solverName] = mainWithArgs [solverName,"4"]
mainWithArgs _ = putStrLn "Usage: rotations <vector|vectorised> <depth>"
-- | Run the simulation in a gloss window.
mainGloss
:: Int -- ^ Depth
-> Solver -- ^ Fn to calculate accels of each point.
-> Int -- ^ Size of window.
-> IO ()
mainGloss depth solver windowSize
= let draw t
= let pts = solver depth (realToFrac t)
in Color white $ Pictures $ map drawPoint $ VU.toList pts
in animate
(InWindow "Silly" -- window name
(windowSize, windowSize) -- window size
(10, 10)) -- window position
black -- background color
draw -- fn to convert a world to a picture
pointSize = 4
drawPoint :: (Double, Double) -> Picture
drawPoint (x, y)
= Translate (realToFrac x * 50) (realToFrac y * 50)
$ ThickCircle (pointSize / 2) pointSize
| mainland/dph | dph-examples/examples/spectral/Rotation/MainGloss.hs | bsd-3-clause | 1,901 | 2 | 18 | 745 | 413 | 218 | 195 | 42 | 1 |
{-# LANGUAGE Haskell98, CPP, DeriveDataTypeable, ForeignFunctionInterface, TypeSynonymInstances #-}
{-# LINE 1 "dist/dist-sandbox-261cd265/build/Network/Socket.hs" #-}
{-# LINE 1 "Network/Socket.hsc" #-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
{-# LINE 2 "Network/Socket.hsc" #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Network.Socket
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/network/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- The "Network.Socket" module is for when you want full control over
-- sockets. Essentially the entire C socket API is exposed through
-- this module; in general the operations follow the behaviour of the C
-- functions of the same name (consult your favourite Unix networking book).
--
-- A higher level interface to networking operations is provided
-- through the module "Network".
--
-----------------------------------------------------------------------------
{-# LINE 24 "Network/Socket.hsc" #-}
-- In order to process this file, you need to have ccall defined.
module Network.Socket
(
-- * Types
Socket(..)
, Family(..)
, isSupportedFamily
, SocketType(..)
, isSupportedSocketType
, SockAddr(..)
, isSupportedSockAddr
, SocketStatus(..)
, HostAddress
, hostAddressToTuple
, tupleToHostAddress
{-# LINE 42 "Network/Socket.hsc" #-}
, HostAddress6
, hostAddress6ToTuple
, tupleToHostAddress6
, FlowInfo
, ScopeID
{-# LINE 48 "Network/Socket.hsc" #-}
, htonl
, ntohl
, ShutdownCmd(..)
, ProtocolNumber
, defaultProtocol
, PortNumber(..)
-- PortNumber is used non-abstractly in Network.BSD. ToDo: remove
-- this use and make the type abstract.
-- * Address operations
, HostName
, ServiceName
{-# LINE 63 "Network/Socket.hsc" #-}
, AddrInfo(..)
, AddrInfoFlag(..)
, addrInfoFlagImplemented
, defaultHints
, getAddrInfo
, NameInfoFlag(..)
, getNameInfo
{-# LINE 76 "Network/Socket.hsc" #-}
-- * Socket operations
, socket
{-# LINE 80 "Network/Socket.hsc" #-}
, socketPair
{-# LINE 82 "Network/Socket.hsc" #-}
, connect
, bind
, listen
, accept
, getPeerName
, getSocketName
{-# LINE 90 "Network/Socket.hsc" #-}
-- get the credentials of our domain socket peer.
, getPeerCred
{-# LINE 95 "Network/Socket.hsc" #-}
{-# LINE 96 "Network/Socket.hsc" #-}
, socketPort
, socketToHandle
-- ** Sending and receiving data
-- *** Sending and receiving with String
-- $sendrecv
, send
, sendTo
, recv
, recvFrom
, recvLen
-- *** Sending and receiving with a buffer
, sendBuf
, recvBuf
, sendBufTo
, recvBufFrom
-- ** Misc
, inet_addr
, inet_ntoa
, shutdown
, close
-- ** Predicates on sockets
, isConnected
, isBound
, isListening
, isReadable
, isWritable
-- * Socket options
, SocketOption(..)
, isSupportedSocketOption
, getSocketOption
, setSocketOption
-- * File descriptor transmission
{-# LINE 138 "Network/Socket.hsc" #-}
, sendFd
, recvFd
{-# LINE 142 "Network/Socket.hsc" #-}
-- * Special constants
, aNY_PORT
, iNADDR_ANY
{-# LINE 147 "Network/Socket.hsc" #-}
, iN6ADDR_ANY
{-# LINE 149 "Network/Socket.hsc" #-}
, sOMAXCONN
, sOL_SOCKET
{-# LINE 152 "Network/Socket.hsc" #-}
, sCM_RIGHTS
{-# LINE 154 "Network/Socket.hsc" #-}
, maxListenQueue
-- * Initialisation
, withSocketsDo
-- * Very low level operations
-- in case you ever want to get at the underlying file descriptor..
, fdSocket
, mkSocket
, setNonBlockIfNeeded
-- * Deprecated aliases
-- $deprecated-aliases
, bindSocket
, sClose
, sIsConnected
, sIsBound
, sIsListening
, sIsReadable
, sIsWritable
-- * Internal
-- | The following are exported ONLY for use in the BSD module and
-- should not be used anywhere else.
, packFamily
, unpackFamily
, packSocketType
) where
import Data.Bits
import Data.Functor
import Data.List (foldl')
import Data.Maybe (isJust)
import Data.Word (Word8, Word32)
import Foreign.Ptr (Ptr, castPtr, nullPtr)
import Foreign.Storable (Storable(..))
import Foreign.C.Error
import Foreign.C.String (CString, withCString, withCStringLen, peekCString, peekCStringLen)
import Foreign.C.Types (CUInt, CChar)
import Foreign.C.Types (CInt(..), CSize(..))
import Foreign.Marshal.Alloc ( alloca, allocaBytes )
import Foreign.Marshal.Array ( peekArray )
import Foreign.Marshal.Utils ( maybeWith, with )
import System.IO
import Control.Monad (liftM, when)
import Control.Concurrent.MVar
import Data.Typeable
import System.IO.Error
import GHC.Conc (threadWaitRead, threadWaitWrite)
import GHC.Conc (closeFdWith)
{-# LINE 217 "Network/Socket.hsc" #-}
{-# LINE 220 "Network/Socket.hsc" #-}
import qualified GHC.IO.Device
import GHC.IO.Handle.FD
import GHC.IO.Exception
import GHC.IO
import qualified System.Posix.Internals
import Network.Socket.Internal
import Network.Socket.Types
import Prelude -- Silence AMP warnings
-- | Either a host name e.g., @\"haskell.org\"@ or a numeric host
-- address string consisting of a dotted decimal IPv4 address or an
-- IPv6 address e.g., @\"192.168.0.1\"@.
type HostName = String
type ServiceName = String
-- ----------------------------------------------------------------------------
-- On Windows, our sockets are not put in non-blocking mode (non-blocking
-- is not supported for regular file descriptors on Windows, and it would
-- be a pain to support it only for sockets). So there are two cases:
--
-- - the threaded RTS uses safe calls for socket operations to get
-- non-blocking I/O, just like the rest of the I/O library
--
-- - with the non-threaded RTS, only some operations on sockets will be
-- non-blocking. Reads and writes go through the normal async I/O
-- system. accept() uses asyncDoProc so is non-blocking. A handful
-- of others (recvFrom, sendFd, recvFd) will block all threads - if this
-- is a problem, -threaded is the workaround.
--
-----------------------------------------------------------------------------
-- Socket types
{-# LINE 265 "Network/Socket.hsc" #-}
-- | Smart constructor for constructing a 'Socket'. It should only be
-- called once for every new file descriptor. The caller must make
-- sure that the socket is in non-blocking mode. See
-- 'setNonBlockIfNeeded'.
mkSocket :: CInt
-> Family
-> SocketType
-> ProtocolNumber
-> SocketStatus
-> IO Socket
mkSocket fd fam sType pNum stat = do
mStat <- newMVar stat
withSocketsDo $ return ()
return (MkSocket fd fam sType pNum mStat)
fdSocket :: Socket -> CInt
fdSocket (MkSocket fd _ _ _ _) = fd
-- | This is the default protocol for a given service.
defaultProtocol :: ProtocolNumber
defaultProtocol = 0
-----------------------------------------------------------------------------
-- SockAddr
instance Show SockAddr where
{-# LINE 294 "Network/Socket.hsc" #-}
showsPrec _ (SockAddrUnix str) = showString str
{-# LINE 296 "Network/Socket.hsc" #-}
showsPrec _ (SockAddrInet port ha)
= showString (unsafePerformIO (inet_ntoa ha))
. showString ":"
. shows port
{-# LINE 301 "Network/Socket.hsc" #-}
showsPrec _ addr@(SockAddrInet6 port _ _ _)
= showChar '['
. showString (unsafePerformIO $
fst `liftM` getNameInfo [NI_NUMERICHOST] True False addr >>=
maybe (fail "showsPrec: impossible internal error") return)
. showString "]:"
. shows port
{-# LINE 309 "Network/Socket.hsc" #-}
{-# LINE 310 "Network/Socket.hsc" #-}
showsPrec _ (SockAddrCan ifidx) = shows ifidx
{-# LINE 312 "Network/Socket.hsc" #-}
-----------------------------------------------------------------------------
-- Connection Functions
-- In the following connection and binding primitives. The names of
-- the equivalent C functions have been preserved where possible. It
-- should be noted that some of these names used in the C library,
-- \tr{bind} in particular, have a different meaning to many Haskell
-- programmers and have thus been renamed by appending the prefix
-- Socket.
-- | Create a new socket using the given address family, socket type
-- and protocol number. The address family is usually 'AF_INET',
-- 'AF_INET6', or 'AF_UNIX'. The socket type is usually 'Stream' or
-- 'Datagram'. The protocol number is usually 'defaultProtocol'.
-- If 'AF_INET6' is used and the socket type is 'Stream' or 'Datagram',
-- the 'IPv6Only' socket option is set to 0 so that both IPv4 and IPv6
-- can be handled with one socket.
--
-- >>> let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV], addrSocketType = Stream }
-- >>> addr:_ <- getAddrInfo (Just hints) (Just "127.0.0.1") (Just "5000")
-- >>> sock@(MkSocket _ fam stype _ _) <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
-- >>> fam
-- AF_INET
-- >>> stype
-- Stream
-- >>> bind sock (addrAddress addr)
-- >>> getSocketName sock
-- 127.0.0.1:5000
socket :: Family -- Family Name (usually AF_INET)
-> SocketType -- Socket Type (usually Stream)
-> ProtocolNumber -- Protocol Number (getProtocolByName to find value)
-> IO Socket -- Unconnected Socket
socket family stype protocol = do
c_stype <- packSocketTypeOrThrow "socket" stype
fd <- throwSocketErrorIfMinus1Retry "Network.Socket.socket" $
c_socket (packFamily family) c_stype protocol
setNonBlockIfNeeded fd
socket_status <- newMVar NotConnected
withSocketsDo $ return ()
let sock = MkSocket fd family stype protocol socket_status
{-# LINE 354 "Network/Socket.hsc" #-}
-- The default value of the IPv6Only option is platform specific,
-- so we explicitly set it to 0 to provide a common default.
{-# LINE 362 "Network/Socket.hsc" #-}
when (family == AF_INET6 && (stype == Stream || stype == Datagram)) $
setSocketOption sock IPv6Only 0 `onException` close sock
{-# LINE 365 "Network/Socket.hsc" #-}
{-# LINE 366 "Network/Socket.hsc" #-}
return sock
-- | Build a pair of connected socket objects using the given address
-- family, socket type, and protocol number. Address family, socket
-- type, and protocol number are as for the 'socket' function above.
-- Availability: Unix.
{-# LINE 373 "Network/Socket.hsc" #-}
socketPair :: Family -- Family Name (usually AF_INET or AF_INET6)
-> SocketType -- Socket Type (usually Stream)
-> ProtocolNumber -- Protocol Number
-> IO (Socket, Socket) -- unnamed and connected.
socketPair family stype protocol = do
allocaBytes (2 * sizeOf (1 :: CInt)) $ \ fdArr -> do
c_stype <- packSocketTypeOrThrow "socketPair" stype
_rc <- throwSocketErrorIfMinus1Retry "Network.Socket.socketpair" $
c_socketpair (packFamily family) c_stype protocol fdArr
[fd1,fd2] <- peekArray 2 fdArr
s1 <- mkNonBlockingSocket fd1
s2 <- mkNonBlockingSocket fd2
return (s1,s2)
where
mkNonBlockingSocket fd = do
setNonBlockIfNeeded fd
stat <- newMVar Connected
withSocketsDo $ return ()
return (MkSocket fd family stype protocol stat)
foreign import ccall unsafe "socketpair"
c_socketpair :: CInt -> CInt -> CInt -> Ptr CInt -> IO CInt
{-# LINE 396 "Network/Socket.hsc" #-}
-- | Set the socket to nonblocking, if applicable to this platform.
--
-- Depending on the platform this is required when using sockets from file
-- descriptors that are passed in through 'recvFd' or other means.
setNonBlockIfNeeded :: CInt -> IO ()
setNonBlockIfNeeded fd =
System.Posix.Internals.setNonBlockingFD fd True
-----------------------------------------------------------------------------
-- Binding a socket
-- | Bind the socket to an address. The socket must not already be
-- bound. The 'Family' passed to @bind@ must be the
-- same as that passed to 'socket'. If the special port number
-- 'aNY_PORT' is passed then the system assigns the next available
-- use port.
bind :: Socket -- Unconnected Socket
-> SockAddr -- Address to Bind to
-> IO ()
bind (MkSocket s _family _stype _protocol socketStatus) addr = do
modifyMVar_ socketStatus $ \ status -> do
if status /= NotConnected
then
ioError $ userError $
"Network.Socket.bind: can't bind to socket with status " ++ show status
else do
withSockAddr addr $ \p_addr sz -> do
_status <- throwSocketErrorIfMinus1Retry "Network.Socket.bind" $
c_bind s p_addr (fromIntegral sz)
return Bound
-----------------------------------------------------------------------------
-- Connecting a socket
-- | Connect to a remote socket at address.
connect :: Socket -- Unconnected Socket
-> SockAddr -- Socket address stuff
-> IO ()
connect sock@(MkSocket s _family _stype _protocol socketStatus) addr = withSocketsDo $ do
modifyMVar_ socketStatus $ \currentStatus -> do
if currentStatus /= NotConnected && currentStatus /= Bound
then
ioError $ userError $
errLoc ++ ": can't connect to socket with status " ++ show currentStatus
else do
withSockAddr addr $ \p_addr sz -> do
let connectLoop = do
r <- c_connect s p_addr (fromIntegral sz)
if r == -1
then do
{-# LINE 449 "Network/Socket.hsc" #-}
err <- getErrno
case () of
_ | err == eINTR -> connectLoop
_ | err == eINPROGRESS -> connectBlocked
-- _ | err == eAGAIN -> connectBlocked
_otherwise -> throwSocketError errLoc
{-# LINE 458 "Network/Socket.hsc" #-}
else return ()
connectBlocked = do
threadWaitWrite (fromIntegral s)
err <- getSocketOption sock SoError
if (err == 0)
then return ()
else throwSocketErrorCode errLoc (fromIntegral err)
connectLoop
return Connected
where
errLoc = "Network.Socket.connect: " ++ show sock
-----------------------------------------------------------------------------
-- Listen
-- | Listen for connections made to the socket. The second argument
-- specifies the maximum number of queued connections and should be at
-- least 1; the maximum value is system-dependent (usually 5).
listen :: Socket -- Connected & Bound Socket
-> Int -- Queue Length
-> IO ()
listen (MkSocket s _family _stype _protocol socketStatus) backlog = do
modifyMVar_ socketStatus $ \ status -> do
if status /= Bound
then
ioError $ userError $
"Network.Socket.listen: can't listen on socket with status " ++ show status
else do
throwSocketErrorIfMinus1Retry_ "Network.Socket.listen" $
c_listen s (fromIntegral backlog)
return Listening
-----------------------------------------------------------------------------
-- Accept
--
-- A call to `accept' only returns when data is available on the given
-- socket, unless the socket has been set to non-blocking. It will
-- return a new socket which should be used to read the incoming data and
-- should then be closed. Using the socket returned by `accept' allows
-- incoming requests to be queued on the original socket.
-- | Accept a connection. The socket must be bound to an address and
-- listening for connections. The return value is a pair @(conn,
-- address)@ where @conn@ is a new socket object usable to send and
-- receive data on the connection, and @address@ is the address bound
-- to the socket on the other end of the connection.
accept :: Socket -- Queue Socket
-> IO (Socket, -- Readable Socket
SockAddr) -- Peer details
accept sock@(MkSocket s family stype protocol status) = do
currentStatus <- readMVar status
okay <- isAcceptable sock
if not okay
then
ioError $ userError $
"Network.Socket.accept: can't accept socket (" ++
show (family, stype, protocol) ++ ") with status " ++
show currentStatus
else do
let sz = sizeOfSockAddrByFamily family
allocaBytes sz $ \ sockaddr -> do
{-# LINE 537 "Network/Socket.hsc" #-}
with (fromIntegral sz) $ \ ptr_len -> do
{-# LINE 539 "Network/Socket.hsc" #-}
new_sock <- throwSocketErrorIfMinus1RetryMayBlock "Network.Socket.accept"
(threadWaitRead (fromIntegral s))
(c_accept4 s sockaddr ptr_len (2048))
{-# LINE 542 "Network/Socket.hsc" #-}
{-# LINE 547 "Network/Socket.hsc" #-}
{-# LINE 548 "Network/Socket.hsc" #-}
addr <- peekSockAddr sockaddr
new_status <- newMVar Connected
return ((MkSocket new_sock family stype protocol new_status), addr)
{-# LINE 562 "Network/Socket.hsc" #-}
-----------------------------------------------------------------------------
-- ** Sending and receiving data
-- $sendrecv
--
-- Do not use the @send@ and @recv@ functions defined in this section
-- in new code, as they incorrectly represent binary data as a Unicode
-- string. As a result, these functions are inefficient and may lead
-- to bugs in the program. Instead use the @send@ and @recv@
-- functions defined in the "Network.Socket.ByteString" module.
-----------------------------------------------------------------------------
-- sendTo & recvFrom
-- | Send data to the socket. The recipient can be specified
-- explicitly, so the socket need not be in a connected state.
-- Returns the number of bytes sent. Applications are responsible for
-- ensuring that all data has been sent.
--
-- NOTE: blocking on Windows unless you compile with -threaded (see
-- GHC ticket #1129)
{-# WARNING sendTo "Use sendTo defined in \"Network.Socket.ByteString\"" #-}
sendTo :: Socket -- (possibly) bound/connected Socket
-> String -- Data to send
-> SockAddr
-> IO Int -- Number of Bytes sent
sendTo sock xs addr = do
withCStringLen xs $ \(str, len) -> do
sendBufTo sock str len addr
-- | Send data to the socket. The recipient can be specified
-- explicitly, so the socket need not be in a connected state.
-- Returns the number of bytes sent. Applications are responsible for
-- ensuring that all data has been sent.
sendBufTo :: Socket -- (possibly) bound/connected Socket
-> Ptr a -> Int -- Data to send
-> SockAddr
-> IO Int -- Number of Bytes sent
sendBufTo sock@(MkSocket s _family _stype _protocol _status) ptr nbytes addr = do
withSockAddr addr $ \p_addr sz -> do
liftM fromIntegral $
throwSocketErrorWaitWrite sock "Network.Socket.sendTo" $
c_sendto s ptr (fromIntegral $ nbytes) 0{-flags-}
p_addr (fromIntegral sz)
-- | Receive data from the socket. The socket need not be in a
-- connected state. Returns @(bytes, nbytes, address)@ where @bytes@
-- is a @String@ of length @nbytes@ representing the data received and
-- @address@ is a 'SockAddr' representing the address of the sending
-- socket.
--
-- NOTE: blocking on Windows unless you compile with -threaded (see
-- GHC ticket #1129)
{-# WARNING recvFrom "Use recvFrom defined in \"Network.Socket.ByteString\"" #-}
recvFrom :: Socket -> Int -> IO (String, Int, SockAddr)
recvFrom sock nbytes =
allocaBytes nbytes $ \ptr -> do
(len, sockaddr) <- recvBufFrom sock ptr nbytes
str <- peekCStringLen (ptr, len)
return (str, len, sockaddr)
-- | Receive data from the socket, writing it into buffer instead of
-- creating a new string. The socket need not be in a connected
-- state. Returns @(nbytes, address)@ where @nbytes@ is the number of
-- bytes received and @address@ is a 'SockAddr' representing the
-- address of the sending socket.
--
-- NOTE: blocking on Windows unless you compile with -threaded (see
-- GHC ticket #1129)
recvBufFrom :: Socket -> Ptr a -> Int -> IO (Int, SockAddr)
recvBufFrom sock@(MkSocket s family _stype _protocol _status) ptr nbytes
| nbytes <= 0 = ioError (mkInvalidRecvArgError "Network.Socket.recvFrom")
| otherwise =
withNewSockAddr family $ \ptr_addr sz -> do
alloca $ \ptr_len -> do
poke ptr_len (fromIntegral sz)
len <- throwSocketErrorWaitRead sock "Network.Socket.recvFrom" $
c_recvfrom s ptr (fromIntegral nbytes) 0{-flags-}
ptr_addr ptr_len
let len' = fromIntegral len
if len' == 0
then ioError (mkEOFError "Network.Socket.recvFrom")
else do
flg <- isConnected sock
-- For at least one implementation (WinSock 2), recvfrom() ignores
-- filling in the sockaddr for connected TCP sockets. Cope with
-- this by using getPeerName instead.
sockaddr <-
if flg then
getPeerName sock
else
peekSockAddr ptr_addr
return (len', sockaddr)
-----------------------------------------------------------------------------
-- send & recv
-- | Send data to the socket. The socket must be connected to a remote
-- socket. Returns the number of bytes sent. Applications are
-- responsible for ensuring that all data has been sent.
--
-- Sending data to closed socket may lead to undefined behaviour.
{-# WARNING send "Use send defined in \"Network.Socket.ByteString\"" #-}
send :: Socket -- Bound/Connected Socket
-> String -- Data to send
-> IO Int -- Number of Bytes sent
send sock xs = withCStringLen xs $ \(str, len) ->
sendBuf sock (castPtr str) len
-- | Send data to the socket. The socket must be connected to a remote
-- socket. Returns the number of bytes sent. Applications are
-- responsible for ensuring that all data has been sent.
--
-- Sending data to closed socket may lead to undefined behaviour.
sendBuf :: Socket -- Bound/Connected Socket
-> Ptr Word8 -- Pointer to the data to send
-> Int -- Length of the buffer
-> IO Int -- Number of Bytes sent
sendBuf sock@(MkSocket s _family _stype _protocol _status) str len = do
liftM fromIntegral $
{-# LINE 695 "Network/Socket.hsc" #-}
throwSocketErrorWaitWrite sock "Network.Socket.sendBuf" $
c_send s str (fromIntegral len) 0{-flags-}
{-# LINE 698 "Network/Socket.hsc" #-}
-- | Receive data from the socket. The socket must be in a connected
-- state. This function may return fewer bytes than specified. If the
-- message is longer than the specified length, it may be discarded
-- depending on the type of socket. This function may block until a
-- message arrives.
--
-- Considering hardware and network realities, the maximum number of
-- bytes to receive should be a small power of 2, e.g., 4096.
--
-- For TCP sockets, a zero length return value means the peer has
-- closed its half side of the connection.
--
-- Receiving data from closed socket may lead to undefined behaviour.
{-# WARNING recv "Use recv defined in \"Network.Socket.ByteString\"" #-}
recv :: Socket -> Int -> IO String
recv sock l = fst <$> recvLen sock l
{-# WARNING recvLen "Use recvLen defined in \"Network.Socket.ByteString\"" #-}
recvLen :: Socket -> Int -> IO (String, Int)
recvLen sock nbytes =
allocaBytes nbytes $ \ptr -> do
len <- recvBuf sock ptr nbytes
s <- peekCStringLen (castPtr ptr,len)
return (s, len)
-- | Receive data from the socket. The socket must be in a connected
-- state. This function may return fewer bytes than specified. If the
-- message is longer than the specified length, it may be discarded
-- depending on the type of socket. This function may block until a
-- message arrives.
--
-- Considering hardware and network realities, the maximum number of
-- bytes to receive should be a small power of 2, e.g., 4096.
--
-- For TCP sockets, a zero length return value means the peer has
-- closed its half side of the connection.
--
-- Receiving data from closed socket may lead to undefined behaviour.
recvBuf :: Socket -> Ptr Word8 -> Int -> IO Int
recvBuf sock@(MkSocket s _family _stype _protocol _status) ptr nbytes
| nbytes <= 0 = ioError (mkInvalidRecvArgError "Network.Socket.recvBuf")
| otherwise = do
len <-
{-# LINE 749 "Network/Socket.hsc" #-}
throwSocketErrorWaitRead sock "Network.Socket.recvBuf" $
c_recv s (castPtr ptr) (fromIntegral nbytes) 0{-flags-}
{-# LINE 752 "Network/Socket.hsc" #-}
let len' = fromIntegral len
if len' == 0
then ioError (mkEOFError "Network.Socket.recvBuf")
else return len'
-- ---------------------------------------------------------------------------
-- socketPort
--
-- The port number the given socket is currently connected to can be
-- determined by calling $port$, is generally only useful when bind
-- was given $aNY\_PORT$.
socketPort :: Socket -- Connected & Bound Socket
-> IO PortNumber -- Port Number of Socket
socketPort sock@(MkSocket _ AF_INET _ _ _) = do
(SockAddrInet port _) <- getSocketName sock
return port
{-# LINE 771 "Network/Socket.hsc" #-}
socketPort sock@(MkSocket _ AF_INET6 _ _ _) = do
(SockAddrInet6 port _ _ _) <- getSocketName sock
return port
{-# LINE 775 "Network/Socket.hsc" #-}
socketPort (MkSocket _ family _ _ _) =
ioError $ userError $
"Network.Socket.socketPort: address family '" ++ show family ++
"' not supported."
-- ---------------------------------------------------------------------------
-- getPeerName
-- Calling $getPeerName$ returns the address details of the machine,
-- other than the local one, which is connected to the socket. This is
-- used in programs such as FTP to determine where to send the
-- returning data. The corresponding call to get the details of the
-- local machine is $getSocketName$.
getPeerName :: Socket -> IO SockAddr
getPeerName (MkSocket s family _ _ _) = do
withNewSockAddr family $ \ptr sz -> do
with (fromIntegral sz) $ \int_star -> do
throwSocketErrorIfMinus1Retry_ "Network.Socket.getPeerName" $
c_getpeername s ptr int_star
_sz <- peek int_star
peekSockAddr ptr
getSocketName :: Socket -> IO SockAddr
getSocketName (MkSocket s family _ _ _) = do
withNewSockAddr family $ \ptr sz -> do
with (fromIntegral sz) $ \int_star -> do
throwSocketErrorIfMinus1Retry_ "Network.Socket.getSocketName" $
c_getsockname s ptr int_star
peekSockAddr ptr
-----------------------------------------------------------------------------
-- Socket Properties
-- | Socket options for use with 'setSocketOption' and 'getSocketOption'.
--
-- The existence of a constructor does not imply that the relevant option
-- is supported on your system: see 'isSupportedSocketOption'
data SocketOption
= Debug -- ^ SO_DEBUG
| ReuseAddr -- ^ SO_REUSEADDR
| Type -- ^ SO_TYPE
| SoError -- ^ SO_ERROR
| DontRoute -- ^ SO_DONTROUTE
| Broadcast -- ^ SO_BROADCAST
| SendBuffer -- ^ SO_SNDBUF
| RecvBuffer -- ^ SO_RCVBUF
| KeepAlive -- ^ SO_KEEPALIVE
| OOBInline -- ^ SO_OOBINLINE
| TimeToLive -- ^ IP_TTL
| MaxSegment -- ^ TCP_MAXSEG
| NoDelay -- ^ TCP_NODELAY
| Cork -- ^ TCP_CORK
| Linger -- ^ SO_LINGER
| ReusePort -- ^ SO_REUSEPORT
| RecvLowWater -- ^ SO_RCVLOWAT
| SendLowWater -- ^ SO_SNDLOWAT
| RecvTimeOut -- ^ SO_RCVTIMEO
| SendTimeOut -- ^ SO_SNDTIMEO
| UseLoopBack -- ^ SO_USELOOPBACK
| UserTimeout -- ^ TCP_USER_TIMEOUT
| IPv6Only -- ^ IPV6_V6ONLY
| CustomSockOpt (CInt, CInt)
deriving (Show, Typeable)
-- | Does the 'SocketOption' exist on this system?
isSupportedSocketOption :: SocketOption -> Bool
isSupportedSocketOption = isJust . packSocketOption
-- | For a socket option, return Just (level, value) where level is the
-- corresponding C option level constant (e.g. SOL_SOCKET) and value is
-- the option constant itself (e.g. SO_DEBUG)
-- If either constant does not exist, return Nothing.
packSocketOption :: SocketOption -> Maybe (CInt, CInt)
packSocketOption so =
-- The Just here is a hack to disable GHC's overlapping pattern detection:
-- the problem is if all constants are present, the fallback pattern is
-- redundant, but if they aren't then it isn't. Hence we introduce an
-- extra pattern (Nothing) that can't possibly happen, so that the
-- fallback is always (in principle) necessary.
-- I feel a little bad for including this, but such are the sacrifices we
-- make while working with CPP - excluding the fallback pattern correctly
-- would be a serious nuisance.
-- (NB: comments elsewhere in this file refer to this one)
case Just so of
{-# LINE 862 "Network/Socket.hsc" #-}
{-# LINE 863 "Network/Socket.hsc" #-}
Just Debug -> Just ((1), (1))
{-# LINE 864 "Network/Socket.hsc" #-}
{-# LINE 865 "Network/Socket.hsc" #-}
{-# LINE 866 "Network/Socket.hsc" #-}
Just ReuseAddr -> Just ((1), (2))
{-# LINE 867 "Network/Socket.hsc" #-}
{-# LINE 868 "Network/Socket.hsc" #-}
{-# LINE 869 "Network/Socket.hsc" #-}
Just Type -> Just ((1), (3))
{-# LINE 870 "Network/Socket.hsc" #-}
{-# LINE 871 "Network/Socket.hsc" #-}
{-# LINE 872 "Network/Socket.hsc" #-}
Just SoError -> Just ((1), (4))
{-# LINE 873 "Network/Socket.hsc" #-}
{-# LINE 874 "Network/Socket.hsc" #-}
{-# LINE 875 "Network/Socket.hsc" #-}
Just DontRoute -> Just ((1), (5))
{-# LINE 876 "Network/Socket.hsc" #-}
{-# LINE 877 "Network/Socket.hsc" #-}
{-# LINE 878 "Network/Socket.hsc" #-}
Just Broadcast -> Just ((1), (6))
{-# LINE 879 "Network/Socket.hsc" #-}
{-# LINE 880 "Network/Socket.hsc" #-}
{-# LINE 881 "Network/Socket.hsc" #-}
Just SendBuffer -> Just ((1), (7))
{-# LINE 882 "Network/Socket.hsc" #-}
{-# LINE 883 "Network/Socket.hsc" #-}
{-# LINE 884 "Network/Socket.hsc" #-}
Just RecvBuffer -> Just ((1), (8))
{-# LINE 885 "Network/Socket.hsc" #-}
{-# LINE 886 "Network/Socket.hsc" #-}
{-# LINE 887 "Network/Socket.hsc" #-}
Just KeepAlive -> Just ((1), (9))
{-# LINE 888 "Network/Socket.hsc" #-}
{-# LINE 889 "Network/Socket.hsc" #-}
{-# LINE 890 "Network/Socket.hsc" #-}
Just OOBInline -> Just ((1), (10))
{-# LINE 891 "Network/Socket.hsc" #-}
{-# LINE 892 "Network/Socket.hsc" #-}
{-# LINE 893 "Network/Socket.hsc" #-}
Just Linger -> Just ((1), (13))
{-# LINE 894 "Network/Socket.hsc" #-}
{-# LINE 895 "Network/Socket.hsc" #-}
{-# LINE 896 "Network/Socket.hsc" #-}
Just ReusePort -> Just ((1), (15))
{-# LINE 897 "Network/Socket.hsc" #-}
{-# LINE 898 "Network/Socket.hsc" #-}
{-# LINE 899 "Network/Socket.hsc" #-}
Just RecvLowWater -> Just ((1), (18))
{-# LINE 900 "Network/Socket.hsc" #-}
{-# LINE 901 "Network/Socket.hsc" #-}
{-# LINE 902 "Network/Socket.hsc" #-}
Just SendLowWater -> Just ((1), (19))
{-# LINE 903 "Network/Socket.hsc" #-}
{-# LINE 904 "Network/Socket.hsc" #-}
{-# LINE 905 "Network/Socket.hsc" #-}
Just RecvTimeOut -> Just ((1), (20))
{-# LINE 906 "Network/Socket.hsc" #-}
{-# LINE 907 "Network/Socket.hsc" #-}
{-# LINE 908 "Network/Socket.hsc" #-}
Just SendTimeOut -> Just ((1), (21))
{-# LINE 909 "Network/Socket.hsc" #-}
{-# LINE 910 "Network/Socket.hsc" #-}
{-# LINE 913 "Network/Socket.hsc" #-}
{-# LINE 914 "Network/Socket.hsc" #-}
{-# LINE 915 "Network/Socket.hsc" #-}
{-# LINE 916 "Network/Socket.hsc" #-}
Just TimeToLive -> Just ((0), (2))
{-# LINE 917 "Network/Socket.hsc" #-}
{-# LINE 918 "Network/Socket.hsc" #-}
{-# LINE 919 "Network/Socket.hsc" #-}
{-# LINE 920 "Network/Socket.hsc" #-}
{-# LINE 921 "Network/Socket.hsc" #-}
Just MaxSegment -> Just ((6), (2))
{-# LINE 922 "Network/Socket.hsc" #-}
{-# LINE 923 "Network/Socket.hsc" #-}
{-# LINE 924 "Network/Socket.hsc" #-}
Just NoDelay -> Just ((6), (1))
{-# LINE 925 "Network/Socket.hsc" #-}
{-# LINE 926 "Network/Socket.hsc" #-}
{-# LINE 927 "Network/Socket.hsc" #-}
Just UserTimeout -> Just ((6), (18))
{-# LINE 928 "Network/Socket.hsc" #-}
{-# LINE 929 "Network/Socket.hsc" #-}
{-# LINE 930 "Network/Socket.hsc" #-}
Just Cork -> Just ((6), (3))
{-# LINE 931 "Network/Socket.hsc" #-}
{-# LINE 932 "Network/Socket.hsc" #-}
{-# LINE 933 "Network/Socket.hsc" #-}
{-# LINE 934 "Network/Socket.hsc" #-}
{-# LINE 935 "Network/Socket.hsc" #-}
Just IPv6Only -> Just ((41), (26))
{-# LINE 936 "Network/Socket.hsc" #-}
{-# LINE 937 "Network/Socket.hsc" #-}
{-# LINE 938 "Network/Socket.hsc" #-}
Just (CustomSockOpt opt) -> Just opt
_ -> Nothing
-- | Return the option level and option value if they exist,
-- otherwise throw an error that begins "Network.Socket." ++ the String
-- parameter
packSocketOption' :: String -> SocketOption -> IO (CInt, CInt)
packSocketOption' caller so = maybe err return (packSocketOption so)
where
err = ioError . userError . concat $ ["Network.Socket.", caller,
": socket option ", show so, " unsupported on this system"]
-- | Set a socket option that expects an Int value.
-- There is currently no API to set e.g. the timeval socket options
setSocketOption :: Socket
-> SocketOption -- Option Name
-> Int -- Option Value
-> IO ()
setSocketOption (MkSocket s _ _ _ _) so v = do
(level, opt) <- packSocketOption' "setSocketOption" so
with (fromIntegral v) $ \ptr_v -> do
throwSocketErrorIfMinus1_ "Network.Socket.setSocketOption" $
c_setsockopt s level opt ptr_v
(fromIntegral (sizeOf (undefined :: CInt)))
return ()
-- | Get a socket option that gives an Int value.
-- There is currently no API to get e.g. the timeval socket options
getSocketOption :: Socket
-> SocketOption -- Option Name
-> IO Int -- Option Value
getSocketOption (MkSocket s _ _ _ _) so = do
(level, opt) <- packSocketOption' "getSocketOption" so
alloca $ \ptr_v ->
with (fromIntegral (sizeOf (undefined :: CInt))) $ \ptr_sz -> do
throwSocketErrorIfMinus1Retry_ "Network.Socket.getSocketOption" $
c_getsockopt s level opt ptr_v ptr_sz
fromIntegral `liftM` peek ptr_v
{-# LINE 980 "Network/Socket.hsc" #-}
-- | Returns the processID, userID and groupID of the socket's peer.
--
-- Only available on platforms that support SO_PEERCRED or GETPEEREID(3)
-- on domain sockets.
-- GETPEEREID(3) returns userID and groupID. processID is always 0.
getPeerCred :: Socket -> IO (CUInt, CUInt, CUInt)
getPeerCred sock = do
{-# LINE 988 "Network/Socket.hsc" #-}
let fd = fdSocket sock
let sz = (12)
{-# LINE 990 "Network/Socket.hsc" #-}
allocaBytes sz $ \ ptr_cr ->
with (fromIntegral sz) $ \ ptr_sz -> do
_ <- ($) throwSocketErrorIfMinus1Retry "Network.Socket.getPeerCred" $
c_getsockopt fd (1) (17) ptr_cr ptr_sz
{-# LINE 994 "Network/Socket.hsc" #-}
pid <- ((\hsc_ptr -> peekByteOff hsc_ptr 0)) ptr_cr
{-# LINE 995 "Network/Socket.hsc" #-}
uid <- ((\hsc_ptr -> peekByteOff hsc_ptr 4)) ptr_cr
{-# LINE 996 "Network/Socket.hsc" #-}
gid <- ((\hsc_ptr -> peekByteOff hsc_ptr 8)) ptr_cr
{-# LINE 997 "Network/Socket.hsc" #-}
return (pid, uid, gid)
{-# LINE 1002 "Network/Socket.hsc" #-}
{-# LINE 1017 "Network/Socket.hsc" #-}
{-# LINE 1018 "Network/Socket.hsc" #-}
{-# LINE 1024 "Network/Socket.hsc" #-}
-- sending/receiving ancillary socket data; low-level mechanism
-- for transmitting file descriptors, mainly.
sendFd :: Socket -> CInt -> IO ()
sendFd sock outfd = do
_ <- ($) throwSocketErrorWaitWrite sock "Network.Socket.sendFd" $
c_sendFd (fdSocket sock) outfd
-- Note: If Winsock supported FD-passing, thi would have been
-- incorrect (since socket FDs need to be closed via closesocket().)
closeFd outfd
-- | Receive a file descriptor over a domain socket. Note that the resulting
-- file descriptor may have to be put into non-blocking mode in order to be
-- used safely. See 'setNonBlockIfNeeded'.
recvFd :: Socket -> IO CInt
recvFd sock = do
theFd <- throwSocketErrorWaitRead sock "Network.Socket.recvFd" $
c_recvFd (fdSocket sock)
return theFd
foreign import ccall unsafe "sendFd" c_sendFd :: CInt -> CInt -> IO CInt
foreign import ccall unsafe "recvFd" c_recvFd :: CInt -> IO CInt
{-# LINE 1047 "Network/Socket.hsc" #-}
-- ---------------------------------------------------------------------------
-- Utility Functions
aNY_PORT :: PortNumber
aNY_PORT = 0
-- | The IPv4 wild card address.
iNADDR_ANY :: HostAddress
iNADDR_ANY = htonl (0)
{-# LINE 1058 "Network/Socket.hsc" #-}
-- | Converts the from host byte order to network byte order.
foreign import ccall unsafe "htonl" htonl :: Word32 -> Word32
-- | Converts the from network byte order to host byte order.
foreign import ccall unsafe "ntohl" ntohl :: Word32 -> Word32
{-# LINE 1065 "Network/Socket.hsc" #-}
-- | The IPv6 wild card address.
iN6ADDR_ANY :: HostAddress6
iN6ADDR_ANY = (0, 0, 0, 0)
{-# LINE 1070 "Network/Socket.hsc" #-}
sOMAXCONN :: Int
sOMAXCONN = 128
{-# LINE 1073 "Network/Socket.hsc" #-}
sOL_SOCKET :: Int
sOL_SOCKET = 1
{-# LINE 1076 "Network/Socket.hsc" #-}
{-# LINE 1078 "Network/Socket.hsc" #-}
sCM_RIGHTS :: Int
sCM_RIGHTS = 1
{-# LINE 1080 "Network/Socket.hsc" #-}
{-# LINE 1081 "Network/Socket.hsc" #-}
-- | This is the value of SOMAXCONN, typically 128.
-- 128 is good enough for normal network servers but
-- is too small for high performance servers.
maxListenQueue :: Int
maxListenQueue = sOMAXCONN
-- -----------------------------------------------------------------------------
data ShutdownCmd
= ShutdownReceive
| ShutdownSend
| ShutdownBoth
deriving Typeable
sdownCmdToInt :: ShutdownCmd -> CInt
sdownCmdToInt ShutdownReceive = 0
sdownCmdToInt ShutdownSend = 1
sdownCmdToInt ShutdownBoth = 2
-- | Shut down one or both halves of the connection, depending on the
-- second argument to the function. If the second argument is
-- 'ShutdownReceive', further receives are disallowed. If it is
-- 'ShutdownSend', further sends are disallowed. If it is
-- 'ShutdownBoth', further sends and receives are disallowed.
shutdown :: Socket -> ShutdownCmd -> IO ()
shutdown (MkSocket s _ _ _ _) stype = do
throwSocketErrorIfMinus1Retry_ "Network.Socket.shutdown" $
c_shutdown s (sdownCmdToInt stype)
return ()
-- -----------------------------------------------------------------------------
-- | Close the socket. Sending data to or receiving data from closed socket
-- may lead to undefined behaviour.
close :: Socket -> IO ()
close (MkSocket s _ _ _ socketStatus) = do
modifyMVar_ socketStatus $ \ status ->
case status of
ConvertedToHandle ->
ioError (userError ("close: converted to a Handle, use hClose instead"))
Closed ->
return status
_ -> closeFdWith (closeFd . fromIntegral) (fromIntegral s) >> return Closed
-- -----------------------------------------------------------------------------
-- | Determines whether 'close' has been used on the 'Socket'. This
-- does /not/ indicate any status about the socket beyond this. If the
-- socket has been closed remotely, this function can still return
-- 'True'.
isConnected :: Socket -> IO Bool
isConnected (MkSocket _ _ _ _ status) = do
value <- readMVar status
return (value == Connected)
-- -----------------------------------------------------------------------------
-- Socket Predicates
isBound :: Socket -> IO Bool
isBound (MkSocket _ _ _ _ status) = do
value <- readMVar status
return (value == Bound)
isListening :: Socket -> IO Bool
isListening (MkSocket _ _ _ _ status) = do
value <- readMVar status
return (value == Listening)
isReadable :: Socket -> IO Bool
isReadable (MkSocket _ _ _ _ status) = do
value <- readMVar status
return (value == Listening || value == Connected)
isWritable :: Socket -> IO Bool
isWritable = isReadable -- sort of.
isAcceptable :: Socket -> IO Bool
{-# LINE 1160 "Network/Socket.hsc" #-}
isAcceptable (MkSocket _ AF_UNIX x _ status)
| x == Stream || x == SeqPacket = do
value <- readMVar status
return (value == Connected || value == Bound || value == Listening)
isAcceptable (MkSocket _ AF_UNIX _ _ _) = return False
{-# LINE 1166 "Network/Socket.hsc" #-}
isAcceptable (MkSocket _ _ _ _ status) = do
value <- readMVar status
return (value == Connected || value == Listening)
-- -----------------------------------------------------------------------------
-- Internet address manipulation routines:
inet_addr :: String -> IO HostAddress
inet_addr ipstr = withSocketsDo $ do
withCString ipstr $ \str -> do
had <- c_inet_addr str
if had == -1
then ioError $ userError $
"Network.Socket.inet_addr: Malformed address: " ++ ipstr
else return had -- network byte order
inet_ntoa :: HostAddress -> IO String
inet_ntoa haddr = withSocketsDo $ do
pstr <- c_inet_ntoa haddr
peekCString pstr
-- | Turns a Socket into an 'Handle'. By default, the new handle is
-- unbuffered. Use 'System.IO.hSetBuffering' to change the buffering.
--
-- Note that since a 'Handle' is automatically closed by a finalizer
-- when it is no longer referenced, you should avoid doing any more
-- operations on the 'Socket' after calling 'socketToHandle'. To
-- close the 'Socket' after 'socketToHandle', call 'System.IO.hClose'
-- on the 'Handle'.
socketToHandle :: Socket -> IOMode -> IO Handle
socketToHandle s@(MkSocket fd _ _ _ socketStatus) mode = do
modifyMVar socketStatus $ \ status ->
if status == ConvertedToHandle
then ioError (userError ("socketToHandle: already a Handle"))
else do
h <- fdToHandle' (fromIntegral fd) (Just GHC.IO.Device.Stream) True (show s) mode True{-bin-}
hSetBuffering h NoBuffering
return (ConvertedToHandle, h)
-- | Pack a list of values into a bitmask. The possible mappings from
-- value to bit-to-set are given as the first argument. We assume
-- that each value can cause exactly one bit to be set; unpackBits will
-- break if this property is not true.
packBits :: (Eq a, Num b, Bits b) => [(a, b)] -> [a] -> b
packBits mapping xs = foldl' pack 0 mapping
where pack acc (k, v) | k `elem` xs = acc .|. v
| otherwise = acc
-- | Unpack a bitmask into a list of values.
unpackBits :: (Num b, Bits b) => [(a, b)] -> b -> [a]
-- Be permissive and ignore unknown bit values. At least on OS X,
-- getaddrinfo returns an ai_flags field with bits set that have no
-- entry in <netdb.h>.
unpackBits [] _ = []
unpackBits ((k,v):xs) r
| r .&. v /= 0 = k : unpackBits xs (r .&. complement v)
| otherwise = unpackBits xs r
-----------------------------------------------------------------------------
-- Address and service lookups
{-# LINE 1233 "Network/Socket.hsc" #-}
-- | Flags that control the querying behaviour of 'getAddrInfo'.
-- For more information, see <https://tools.ietf.org/html/rfc3493#page-25>
data AddrInfoFlag =
-- | The list of returned 'AddrInfo' values will
-- only contain IPv4 addresses if the local system has at least
-- one IPv4 interface configured, and likewise for IPv6.
-- (Only some platforms support this.)
AI_ADDRCONFIG
-- | If 'AI_ALL' is specified, return all matching IPv6 and
-- IPv4 addresses. Otherwise, this flag has no effect.
-- (Only some platforms support this.)
| AI_ALL
-- | The 'addrCanonName' field of the first returned
-- 'AddrInfo' will contain the "canonical name" of the host.
| AI_CANONNAME
-- | The 'HostName' argument /must/ be a numeric
-- address in string form, and network name lookups will not be
-- attempted.
| AI_NUMERICHOST
-- | The 'ServiceName' argument /must/ be a port
-- number in string form, and service name lookups will not be
-- attempted. (Only some platforms support this.)
| AI_NUMERICSERV
-- | If no 'HostName' value is provided, the network
-- address in each 'SockAddr'
-- will be left as a "wild card", i.e. as either 'iNADDR_ANY'
-- or 'iN6ADDR_ANY'. This is useful for server applications that
-- will accept connections from any client.
| AI_PASSIVE
-- | If an IPv6 lookup is performed, and no IPv6
-- addresses are found, IPv6-mapped IPv4 addresses will be
-- returned. (Only some platforms support this.)
| AI_V4MAPPED
deriving (Eq, Read, Show, Typeable)
aiFlagMapping :: [(AddrInfoFlag, CInt)]
aiFlagMapping =
[
{-# LINE 1274 "Network/Socket.hsc" #-}
(AI_ADDRCONFIG, 32),
{-# LINE 1275 "Network/Socket.hsc" #-}
{-# LINE 1278 "Network/Socket.hsc" #-}
{-# LINE 1279 "Network/Socket.hsc" #-}
(AI_ALL, 16),
{-# LINE 1280 "Network/Socket.hsc" #-}
{-# LINE 1283 "Network/Socket.hsc" #-}
(AI_CANONNAME, 2),
{-# LINE 1284 "Network/Socket.hsc" #-}
(AI_NUMERICHOST, 4),
{-# LINE 1285 "Network/Socket.hsc" #-}
{-# LINE 1286 "Network/Socket.hsc" #-}
(AI_NUMERICSERV, 1024),
{-# LINE 1287 "Network/Socket.hsc" #-}
{-# LINE 1290 "Network/Socket.hsc" #-}
(AI_PASSIVE, 1),
{-# LINE 1291 "Network/Socket.hsc" #-}
{-# LINE 1292 "Network/Socket.hsc" #-}
(AI_V4MAPPED, 8)
{-# LINE 1293 "Network/Socket.hsc" #-}
{-# LINE 1296 "Network/Socket.hsc" #-}
]
-- | Indicate whether the given 'AddrInfoFlag' will have any effect on
-- this system.
addrInfoFlagImplemented :: AddrInfoFlag -> Bool
addrInfoFlagImplemented f = packBits aiFlagMapping [f] /= 0
data AddrInfo =
AddrInfo {
addrFlags :: [AddrInfoFlag],
addrFamily :: Family,
addrSocketType :: SocketType,
addrProtocol :: ProtocolNumber,
addrAddress :: SockAddr,
addrCanonName :: Maybe String
}
deriving (Eq, Show, Typeable)
instance Storable AddrInfo where
sizeOf _ = 48
{-# LINE 1316 "Network/Socket.hsc" #-}
alignment _ = alignment (undefined :: CInt)
peek p = do
ai_flags <- ((\hsc_ptr -> peekByteOff hsc_ptr 0)) p
{-# LINE 1320 "Network/Socket.hsc" #-}
ai_family <- ((\hsc_ptr -> peekByteOff hsc_ptr 4)) p
{-# LINE 1321 "Network/Socket.hsc" #-}
ai_socktype <- ((\hsc_ptr -> peekByteOff hsc_ptr 8)) p
{-# LINE 1322 "Network/Socket.hsc" #-}
ai_protocol <- ((\hsc_ptr -> peekByteOff hsc_ptr 12)) p
{-# LINE 1323 "Network/Socket.hsc" #-}
ai_addr <- ((\hsc_ptr -> peekByteOff hsc_ptr 24)) p >>= peekSockAddr
{-# LINE 1324 "Network/Socket.hsc" #-}
ai_canonname_ptr <- ((\hsc_ptr -> peekByteOff hsc_ptr 32)) p
{-# LINE 1325 "Network/Socket.hsc" #-}
ai_canonname <- if ai_canonname_ptr == nullPtr
then return Nothing
else liftM Just $ peekCString ai_canonname_ptr
socktype <- unpackSocketType' "AddrInfo.peek" ai_socktype
return (AddrInfo
{
addrFlags = unpackBits aiFlagMapping ai_flags,
addrFamily = unpackFamily ai_family,
addrSocketType = socktype,
addrProtocol = ai_protocol,
addrAddress = ai_addr,
addrCanonName = ai_canonname
})
poke p (AddrInfo flags family socketType protocol _ _) = do
c_stype <- packSocketTypeOrThrow "AddrInfo.poke" socketType
((\hsc_ptr -> pokeByteOff hsc_ptr 0)) p (packBits aiFlagMapping flags)
{-# LINE 1345 "Network/Socket.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 4)) p (packFamily family)
{-# LINE 1346 "Network/Socket.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 8)) p c_stype
{-# LINE 1347 "Network/Socket.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 12)) p protocol
{-# LINE 1348 "Network/Socket.hsc" #-}
-- stuff below is probably not needed, but let's zero it for safety
((\hsc_ptr -> pokeByteOff hsc_ptr 16)) p (0::CSize)
{-# LINE 1352 "Network/Socket.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 24)) p nullPtr
{-# LINE 1353 "Network/Socket.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 32)) p nullPtr
{-# LINE 1354 "Network/Socket.hsc" #-}
((\hsc_ptr -> pokeByteOff hsc_ptr 40)) p nullPtr
{-# LINE 1355 "Network/Socket.hsc" #-}
-- | Flags that control the querying behaviour of 'getNameInfo'.
-- For more information, see <https://tools.ietf.org/html/rfc3493#page-30>
data NameInfoFlag =
-- | Resolve a datagram-based service name. This is
-- required only for the few protocols that have different port
-- numbers for their datagram-based versions than for their
-- stream-based versions.
NI_DGRAM
-- | If the hostname cannot be looked up, an IO error is thrown.
| NI_NAMEREQD
-- | If a host is local, return only the hostname part of the FQDN.
| NI_NOFQDN
-- | The name of the host is not looked up.
-- Instead, a numeric representation of the host's
-- address is returned. For an IPv4 address, this will be a
-- dotted-quad string. For IPv6, it will be colon-separated
-- hexadecimal.
| NI_NUMERICHOST
-- | The name of the service is not
-- looked up. Instead, a numeric representation of the
-- service is returned.
| NI_NUMERICSERV
deriving (Eq, Read, Show, Typeable)
niFlagMapping :: [(NameInfoFlag, CInt)]
niFlagMapping = [(NI_DGRAM, 16),
{-# LINE 1383 "Network/Socket.hsc" #-}
(NI_NAMEREQD, 8),
{-# LINE 1384 "Network/Socket.hsc" #-}
(NI_NOFQDN, 4),
{-# LINE 1385 "Network/Socket.hsc" #-}
(NI_NUMERICHOST, 1),
{-# LINE 1386 "Network/Socket.hsc" #-}
(NI_NUMERICSERV, 2)]
{-# LINE 1387 "Network/Socket.hsc" #-}
-- | Default hints for address lookup with 'getAddrInfo'. The values
-- of the 'addrAddress' and 'addrCanonName' fields are 'undefined',
-- and are never inspected by 'getAddrInfo'.
--
-- >>> addrFlags defaultHints
-- []
-- >>> addrFamily defaultHints
-- AF_UNSPEC
-- >>> addrSocketType defaultHints
-- NoSocketType
-- >>> addrProtocol defaultHints
-- 0
defaultHints :: AddrInfo
defaultHints = AddrInfo {
addrFlags = [],
addrFamily = AF_UNSPEC,
addrSocketType = NoSocketType,
addrProtocol = defaultProtocol,
addrAddress = undefined,
addrCanonName = undefined
}
-- | Resolve a host or service name to one or more addresses.
-- The 'AddrInfo' values that this function returns contain 'SockAddr'
-- values that you can pass directly to 'connect' or
-- 'bind'.
--
-- This function is protocol independent. It can return both IPv4 and
-- IPv6 address information.
--
-- The 'AddrInfo' argument specifies the preferred query behaviour,
-- socket options, or protocol. You can override these conveniently
-- using Haskell's record update syntax on 'defaultHints', for example
-- as follows:
--
-- >>> let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Stream }
--
-- You must provide a 'Just' value for at least one of the 'HostName'
-- or 'ServiceName' arguments. 'HostName' can be either a numeric
-- network address (dotted quad for IPv4, colon-separated hex for
-- IPv6) or a hostname. In the latter case, its addresses will be
-- looked up unless 'AI_NUMERICHOST' is specified as a hint. If you
-- do not provide a 'HostName' value /and/ do not set 'AI_PASSIVE' as
-- a hint, network addresses in the result will contain the address of
-- the loopback interface.
--
-- If the query fails, this function throws an IO exception instead of
-- returning an empty list. Otherwise, it returns a non-empty list
-- of 'AddrInfo' values.
--
-- There are several reasons why a query might result in several
-- values. For example, the queried-for host could be multihomed, or
-- the service might be available via several protocols.
--
-- Note: the order of arguments is slightly different to that defined
-- for @getaddrinfo@ in RFC 2553. The 'AddrInfo' parameter comes first
-- to make partial application easier.
--
-- >>> addr:_ <- getAddrInfo (Just hints) (Just "127.0.0.1") (Just "http")
-- >>> addrAddress addr
-- 127.0.0.1:80
getAddrInfo :: Maybe AddrInfo -- ^ preferred socket type or protocol
-> Maybe HostName -- ^ host name to look up
-> Maybe ServiceName -- ^ service name to look up
-> IO [AddrInfo] -- ^ resolved addresses, with "best" first
getAddrInfo hints node service = withSocketsDo $
maybeWith withCString node $ \c_node ->
maybeWith withCString service $ \c_service ->
maybeWith with filteredHints $ \c_hints ->
alloca $ \ptr_ptr_addrs -> do
ret <- c_getaddrinfo c_node c_service c_hints ptr_ptr_addrs
case ret of
0 -> do ptr_addrs <- peek ptr_ptr_addrs
ais <- followAddrInfo ptr_addrs
c_freeaddrinfo ptr_addrs
return ais
_ -> do err <- gai_strerror ret
ioError (ioeSetErrorString
(mkIOError NoSuchThing "Network.Socket.getAddrInfo" Nothing
Nothing) err)
-- Leaving out the service and using AI_NUMERICSERV causes a
-- segfault on OS X 10.8.2. This code removes AI_NUMERICSERV
-- (which has no effect) in that case.
where
{-# LINE 1480 "Network/Socket.hsc" #-}
filteredHints = hints
{-# LINE 1482 "Network/Socket.hsc" #-}
followAddrInfo :: Ptr AddrInfo -> IO [AddrInfo]
followAddrInfo ptr_ai | ptr_ai == nullPtr = return []
| otherwise = do
a <- peek ptr_ai
as <- ((\hsc_ptr -> peekByteOff hsc_ptr 40)) ptr_ai >>= followAddrInfo
{-# LINE 1489 "Network/Socket.hsc" #-}
return (a:as)
foreign import ccall safe "hsnet_getaddrinfo"
c_getaddrinfo :: CString -> CString -> Ptr AddrInfo -> Ptr (Ptr AddrInfo)
-> IO CInt
foreign import ccall safe "hsnet_freeaddrinfo"
c_freeaddrinfo :: Ptr AddrInfo -> IO ()
gai_strerror :: CInt -> IO String
{-# LINE 1501 "Network/Socket.hsc" #-}
gai_strerror n = c_gai_strerror n >>= peekCString
foreign import ccall safe "gai_strerror"
c_gai_strerror :: CInt -> IO CString
{-# LINE 1508 "Network/Socket.hsc" #-}
withCStringIf :: Bool -> Int -> (CSize -> CString -> IO a) -> IO a
withCStringIf False _ f = f 0 nullPtr
withCStringIf True n f = allocaBytes n (f (fromIntegral n))
-- | Resolve an address to a host or service name.
-- This function is protocol independent.
-- The list of 'NameInfoFlag' values controls query behaviour.
--
-- If a host or service's name cannot be looked up, then the numeric
-- form of the address or service will be returned.
--
-- If the query fails, this function throws an IO exception.
--
-- Example:
-- @
-- (hostName, _) <- getNameInfo [] True False myAddress
-- @
getNameInfo :: [NameInfoFlag] -- ^ flags to control lookup behaviour
-> Bool -- ^ whether to look up a hostname
-> Bool -- ^ whether to look up a service name
-> SockAddr -- ^ the address to look up
-> IO (Maybe HostName, Maybe ServiceName)
getNameInfo flags doHost doService addr = withSocketsDo $
withCStringIf doHost (1025) $ \c_hostlen c_host ->
{-# LINE 1535 "Network/Socket.hsc" #-}
withCStringIf doService (32) $ \c_servlen c_serv -> do
{-# LINE 1536 "Network/Socket.hsc" #-}
withSockAddr addr $ \ptr_addr sz -> do
ret <- c_getnameinfo ptr_addr (fromIntegral sz) c_host c_hostlen
c_serv c_servlen (packBits niFlagMapping flags)
case ret of
0 -> do
let peekIf doIf c_val = if doIf
then liftM Just $ peekCString c_val
else return Nothing
host <- peekIf doHost c_host
serv <- peekIf doService c_serv
return (host, serv)
_ -> do err <- gai_strerror ret
ioError (ioeSetErrorString
(mkIOError NoSuchThing "Network.Socket.getNameInfo" Nothing
Nothing) err)
foreign import ccall safe "hsnet_getnameinfo"
c_getnameinfo :: Ptr SockAddr -> CInt{-CSockLen???-} -> CString -> CSize -> CString
-> CSize -> CInt -> IO CInt
{-# LINE 1556 "Network/Socket.hsc" #-}
mkInvalidRecvArgError :: String -> IOError
mkInvalidRecvArgError loc = ioeSetErrorString (mkIOError
InvalidArgument
loc Nothing Nothing) "non-positive length"
mkEOFError :: String -> IOError
mkEOFError loc = ioeSetErrorString (mkIOError EOF loc Nothing Nothing) "end of file"
-- ---------------------------------------------------------------------------
-- foreign imports from the C library
foreign import ccall unsafe "hsnet_inet_ntoa"
c_inet_ntoa :: HostAddress -> IO (Ptr CChar)
foreign import ccall unsafe "inet_addr"
c_inet_addr :: Ptr CChar -> IO HostAddress
foreign import ccall unsafe "shutdown"
c_shutdown :: CInt -> CInt -> IO CInt
closeFd :: CInt -> IO ()
closeFd fd = throwSocketErrorIfMinus1_ "Network.Socket.close" $ c_close fd
{-# LINE 1581 "Network/Socket.hsc" #-}
foreign import ccall unsafe "close"
c_close :: CInt -> IO CInt
{-# LINE 1587 "Network/Socket.hsc" #-}
foreign import ccall unsafe "socket"
c_socket :: CInt -> CInt -> CInt -> IO CInt
foreign import ccall unsafe "bind"
c_bind :: CInt -> Ptr SockAddr -> CInt{-CSockLen???-} -> IO CInt
foreign import ccall unsafe "connect"
c_connect :: CInt -> Ptr SockAddr -> CInt{-CSockLen???-} -> IO CInt
{-# LINE 1595 "Network/Socket.hsc" #-}
foreign import ccall unsafe "accept4"
c_accept4 :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> CInt -> IO CInt
{-# LINE 1601 "Network/Socket.hsc" #-}
foreign import ccall unsafe "listen"
c_listen :: CInt -> CInt -> IO CInt
{-# LINE 1610 "Network/Socket.hsc" #-}
foreign import ccall unsafe "send"
c_send :: CInt -> Ptr a -> CSize -> CInt -> IO CInt
foreign import ccall unsafe "sendto"
c_sendto :: CInt -> Ptr a -> CSize -> CInt -> Ptr SockAddr -> CInt -> IO CInt
foreign import ccall unsafe "recv"
c_recv :: CInt -> Ptr CChar -> CSize -> CInt -> IO CInt
foreign import ccall unsafe "recvfrom"
c_recvfrom :: CInt -> Ptr a -> CSize -> CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt
foreign import ccall unsafe "getpeername"
c_getpeername :: CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt
foreign import ccall unsafe "getsockname"
c_getsockname :: CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt
foreign import ccall unsafe "getsockopt"
c_getsockopt :: CInt -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> IO CInt
foreign import ccall unsafe "setsockopt"
c_setsockopt :: CInt -> CInt -> CInt -> Ptr CInt -> CInt -> IO CInt
{-# LINE 1633 "Network/Socket.hsc" #-}
-- ---------------------------------------------------------------------------
-- * Deprecated aliases
-- $deprecated-aliases
--
-- These aliases are deprecated and should not be used in new code.
-- They will be removed in some future version of the package.
{-# DEPRECATED bindSocket "use 'bind'" #-}
-- | Deprecated alias for 'bind'.
bindSocket :: Socket -- Unconnected Socket
-> SockAddr -- Address to Bind to
-> IO ()
bindSocket = bind
{-# DEPRECATED sClose "use 'close'" #-}
-- | Deprecated alias for 'close'.
sClose :: Socket -> IO ()
sClose = close
{-# DEPRECATED sIsConnected "use 'isConnected'" #-}
-- | Deprecated alias for 'isConnected'.
sIsConnected :: Socket -> IO Bool
sIsConnected = isConnected
{-# DEPRECATED sIsBound "use 'isBound'" #-}
-- | Deprecated alias for 'isBound'.
sIsBound :: Socket -> IO Bool
sIsBound = isBound
{-# DEPRECATED sIsListening "use 'isListening'" #-}
-- | Deprecated alias for 'isListening'.
sIsListening :: Socket -> IO Bool
sIsListening = isListening
{-# DEPRECATED sIsReadable "use 'isReadable'" #-}
-- | Deprecated alias for 'isReadable'.
sIsReadable :: Socket -> IO Bool
sIsReadable = isReadable
{-# DEPRECATED sIsWritable "use 'isWritable'" #-}
-- | Deprecated alias for 'isWritable'.
sIsWritable :: Socket -> IO Bool
sIsWritable = isWritable
| phischu/fragnix | tests/packages/scotty/Network.Socket.hs | bsd-3-clause | 60,647 | 480 | 33 | 13,640 | 9,357 | 5,328 | 4,029 | -1 | -1 |
{-# LANGUAGE BangPatterns, DeriveDataTypeable, RecordWildCards #-}
-- |
-- Module : Criterion.Analysis
-- Copyright : (c) 2009-2014 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Analysis code for benchmarks.
module Criterion.Analysis
(
Outliers(..)
, OutlierEffect(..)
, OutlierVariance(..)
, SampleAnalysis(..)
, analyseSample
, scale
, analyseMean
, countOutliers
, classifyOutliers
, noteOutliers
, outlierVariance
, resolveAccessors
, validateAccessors
, regress
) where
import Control.Arrow (second)
import Control.Monad (unless, when)
import Control.Monad.Reader (ask)
import Control.Monad.Trans
import Control.Monad.Trans.Except
import Criterion.IO.Printf (note, prolix)
import Criterion.Measurement (secs, threshold)
import Criterion.Monad (Criterion, getGen, getOverhead)
import Criterion.Types
import Data.Int (Int64)
import Data.Maybe (fromJust)
import Data.Monoid (Monoid(..))
import Statistics.Function (sort)
import Statistics.Quantile (weightedAvg)
import Statistics.Regression (bootstrapRegress, olsRegress)
import Statistics.Resampling (resample)
import Statistics.Sample (mean)
import Statistics.Sample.KernelDensity (kde)
import Statistics.Types (Estimator(..), Sample)
import System.Random.MWC (GenIO)
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Vector as V
import qualified Data.Vector.Generic as G
import qualified Data.Vector.Unboxed as U
import qualified Statistics.Resampling.Bootstrap as B
-- | Classify outliers in a data set, using the boxplot technique.
classifyOutliers :: Sample -> Outliers
classifyOutliers sa = U.foldl' ((. outlier) . mappend) mempty ssa
where outlier e = Outliers {
samplesSeen = 1
, lowSevere = if e <= loS && e < hiM then 1 else 0
, lowMild = if e > loS && e <= loM then 1 else 0
, highMild = if e >= hiM && e < hiS then 1 else 0
, highSevere = if e >= hiS && e > loM then 1 else 0
}
!loS = q1 - (iqr * 3)
!loM = q1 - (iqr * 1.5)
!hiM = q3 + (iqr * 1.5)
!hiS = q3 + (iqr * 3)
q1 = weightedAvg 1 4 ssa
q3 = weightedAvg 3 4 ssa
ssa = sort sa
iqr = q3 - q1
-- | Compute the extent to which outliers in the sample data affect
-- the sample mean and standard deviation.
outlierVariance :: B.Estimate -- ^ Bootstrap estimate of sample mean.
-> B.Estimate -- ^ Bootstrap estimate of sample
-- standard deviation.
-> Double -- ^ Number of original iterations.
-> OutlierVariance
outlierVariance µ σ a = OutlierVariance effect desc varOutMin
where
( effect, desc ) | varOutMin < 0.01 = (Unaffected, "no")
| varOutMin < 0.1 = (Slight, "slight")
| varOutMin < 0.5 = (Moderate, "moderate")
| otherwise = (Severe, "severe")
varOutMin = (minBy varOut 1 (minBy cMax 0 µgMin)) / σb2
varOut c = (ac / a) * (σb2 - ac * σg2) where ac = a - c
σb = B.estPoint σ
µa = B.estPoint µ / a
µgMin = µa / 2
σg = min (µgMin / 4) (σb / sqrt a)
σg2 = σg * σg
σb2 = σb * σb
minBy f q r = min (f q) (f r)
cMax x = fromIntegral (floor (-2 * k0 / (k1 + sqrt det)) :: Int)
where
k1 = σb2 - a * σg2 + ad
k0 = -a * ad
ad = a * d
d = k * k where k = µa - x
det = k1 * k1 - 4 * σg2 * k0
-- | Count the total number of outliers in a sample.
countOutliers :: Outliers -> Int64
countOutliers (Outliers _ a b c d) = a + b + c + d
{-# INLINE countOutliers #-}
-- | Display the mean of a 'Sample', and characterise the outliers
-- present in the sample.
analyseMean :: Sample
-> Int -- ^ Number of iterations used to
-- compute the sample.
-> Criterion Double
analyseMean a iters = do
let µ = mean a
_ <- note "mean is %s (%d iterations)\n" (secs µ) iters
noteOutliers . classifyOutliers $ a
return µ
-- | Multiply the 'Estimate's in an analysis by the given value, using
-- 'B.scale'.
scale :: Double -- ^ Value to multiply by.
-> SampleAnalysis -> SampleAnalysis
scale f s@SampleAnalysis{..} = s {
anMean = B.scale f anMean
, anStdDev = B.scale f anStdDev
}
-- | Perform an analysis of a measurement.
analyseSample :: Int -- ^ Experiment number.
-> String -- ^ Experiment name.
-> V.Vector Measured -- ^ Sample data.
-> ExceptT String Criterion Report
analyseSample i name meas = do
Config{..} <- ask
overhead <- lift getOverhead
let ests = [Mean,StdDev]
-- The use of filter here throws away very-low-quality
-- measurements when bootstrapping the mean and standard
-- deviations. Without this, the numbers look nonsensical when
-- very brief actions are measured.
stime = measure (measTime . rescale) .
G.filter ((>= threshold) . measTime) . G.map fixTime .
G.tail $ meas
fixTime m = m { measTime = measTime m - overhead / 2 }
n = G.length meas
s = G.length stime
_ <- lift $ prolix "bootstrapping with %d of %d samples (%d%%)\n"
s n ((s * 100) `quot` n)
gen <- lift getGen
rs <- mapM (\(ps,r) -> regress gen ps r meas) $
((["iters"],"time"):regressions)
resamps <- liftIO $ resample gen ests resamples stime
let [estMean,estStdDev] = B.bootstrapBCA confInterval stime ests resamps
ov = outlierVariance estMean estStdDev (fromIntegral n)
an = SampleAnalysis {
anRegress = rs
, anOverhead = overhead
, anMean = estMean
, anStdDev = estStdDev
, anOutlierVar = ov
}
return Report {
reportNumber = i
, reportName = name
, reportKeys = measureKeys
, reportMeasured = meas
, reportAnalysis = an
, reportOutliers = classifyOutliers stime
, reportKDEs = [uncurry (KDE "time") (kde 128 stime)]
}
-- | Regress the given predictors against the responder.
--
-- Errors may be returned under various circumstances, such as invalid
-- names or lack of needed data.
--
-- See 'olsRegress' for details of the regression performed.
regress :: GenIO
-> [String] -- ^ Predictor names.
-> String -- ^ Responder name.
-> V.Vector Measured
-> ExceptT String Criterion Regression
regress gen predNames respName meas = do
when (G.null meas) $
throwE "no measurements"
accs <- ExceptT . return $ validateAccessors predNames respName
let unmeasured = [n | (n, Nothing) <- map (second ($ G.head meas)) accs]
unless (null unmeasured) $
throwE $ "no data available for " ++ renderNames unmeasured
let (r:ps) = map ((`measure` meas) . (fromJust .) . snd) accs
Config{..} <- ask
(coeffs,r2) <- liftIO $
bootstrapRegress gen resamples confInterval olsRegress ps r
return Regression {
regResponder = respName
, regCoeffs = Map.fromList (zip (predNames ++ ["y"]) (G.toList coeffs))
, regRSquare = r2
}
singleton :: [a] -> Bool
singleton [_] = True
singleton _ = False
-- | Given a list of accessor names (see 'measureKeys'), return either
-- a mapping from accessor name to function or an error message if
-- any names are wrong.
resolveAccessors :: [String]
-> Either String [(String, Measured -> Maybe Double)]
resolveAccessors names =
case unresolved of
[] -> Right [(n, a) | (n, Just (a,_)) <- accessors]
_ -> Left $ "unknown metric " ++ renderNames unresolved
where
unresolved = [n | (n, Nothing) <- accessors]
accessors = flip map names $ \n -> (n, Map.lookup n measureAccessors)
-- | Given predictor and responder names, do some basic validation,
-- then hand back the relevant accessors.
validateAccessors :: [String] -- ^ Predictor names.
-> String -- ^ Responder name.
-> Either String [(String, Measured -> Maybe Double)]
validateAccessors predNames respName = do
when (null predNames) $
Left "no predictors specified"
let names = respName:predNames
dups = map head . filter (not . singleton) .
List.group . List.sort $ names
unless (null dups) $
Left $ "duplicated metric " ++ renderNames dups
resolveAccessors names
renderNames :: [String] -> String
renderNames = List.intercalate ", " . map show
-- | Display a report of the 'Outliers' present in a 'Sample'.
noteOutliers :: Outliers -> Criterion ()
noteOutliers o = do
let frac n = (100::Double) * fromIntegral n / fromIntegral (samplesSeen o)
check :: Int64 -> Double -> String -> Criterion ()
check k t d = when (frac k > t) $
note " %d (%.1g%%) %s\n" k (frac k) d
outCount = countOutliers o
when (outCount > 0) $ do
_ <- note "found %d outliers among %d samples (%.1g%%)\n"
outCount (samplesSeen o) (frac outCount)
check (lowSevere o) 0 "low severe"
check (lowMild o) 1 "low mild"
check (highMild o) 1 "high mild"
check (highSevere o) 0 "high severe"
| paulolieuthier/criterion | Criterion/Analysis.hs | bsd-2-clause | 9,668 | 0 | 24 | 2,908 | 2,640 | 1,429 | 1,211 | 194 | 5 |
{-
Copyright (C) 2013-2014 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Asciify
Copyright : Copyright (C) 2013-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Function to convert accented latin letters to their unaccented
ascii equivalents (used in constructing HTML identifiers).
-}
module Text.Pandoc.Asciify (toAsciiChar)
where
import qualified Data.Map as M
import Data.Char (isAscii)
toAsciiChar :: Char -> Maybe Char
toAsciiChar c | isAscii c = Just c
| otherwise = M.lookup c asciiMap
asciiMap :: M.Map Char Char
asciiMap = M.fromList
[('\192','A')
,('\193','A')
,('\194','A')
,('\195','A')
,('\196','A')
,('\197','A')
,('\199','C')
,('\200','E')
,('\201','E')
,('\202','E')
,('\203','E')
,('\204','I')
,('\205','I')
,('\206','I')
,('\207','I')
,('\209','N')
,('\210','O')
,('\211','O')
,('\212','O')
,('\213','O')
,('\214','O')
,('\217','U')
,('\218','U')
,('\219','U')
,('\220','U')
,('\221','Y')
,('\224','a')
,('\225','a')
,('\226','a')
,('\227','a')
,('\228','a')
,('\229','a')
,('\231','c')
,('\232','e')
,('\233','e')
,('\234','e')
,('\235','e')
,('\236','i')
,('\237','i')
,('\238','i')
,('\239','i')
,('\241','n')
,('\242','o')
,('\243','o')
,('\244','o')
,('\245','o')
,('\246','o')
,('\249','u')
,('\250','u')
,('\251','u')
,('\252','u')
,('\253','y')
,('\255','y')
,('\256','A')
,('\257','a')
,('\258','A')
,('\259','a')
,('\260','A')
,('\261','a')
,('\262','C')
,('\263','c')
,('\264','C')
,('\265','c')
,('\266','C')
,('\267','c')
,('\268','C')
,('\269','c')
,('\270','D')
,('\271','d')
,('\274','E')
,('\275','e')
,('\276','E')
,('\277','e')
,('\278','E')
,('\279','e')
,('\280','E')
,('\281','e')
,('\282','E')
,('\283','e')
,('\284','G')
,('\285','g')
,('\286','G')
,('\287','g')
,('\288','G')
,('\289','g')
,('\290','G')
,('\291','g')
,('\292','H')
,('\293','h')
,('\296','I')
,('\297','i')
,('\298','I')
,('\299','i')
,('\300','I')
,('\301','i')
,('\302','I')
,('\303','i')
,('\304','I')
,('\308','J')
,('\309','j')
,('\310','K')
,('\311','k')
,('\313','L')
,('\314','l')
,('\315','L')
,('\316','l')
,('\317','L')
,('\318','l')
,('\323','N')
,('\324','n')
,('\325','N')
,('\326','n')
,('\327','N')
,('\328','n')
,('\332','O')
,('\333','o')
,('\334','O')
,('\335','o')
,('\336','O')
,('\337','o')
,('\340','R')
,('\341','r')
,('\342','R')
,('\343','r')
,('\344','R')
,('\345','r')
,('\346','S')
,('\347','s')
,('\348','S')
,('\349','s')
,('\350','S')
,('\351','s')
,('\352','S')
,('\353','s')
,('\354','T')
,('\355','t')
,('\356','T')
,('\357','t')
,('\360','U')
,('\361','u')
,('\362','U')
,('\363','u')
,('\364','U')
,('\365','u')
,('\366','U')
,('\367','u')
,('\368','U')
,('\369','u')
,('\370','U')
,('\371','u')
,('\372','W')
,('\373','w')
,('\374','Y')
,('\375','y')
,('\376','Y')
,('\377','Z')
,('\378','z')
,('\379','Z')
,('\380','z')
,('\381','Z')
,('\382','z')
,('\416','O')
,('\417','o')
,('\431','U')
,('\432','u')
,('\461','A')
,('\462','a')
,('\463','I')
,('\464','i')
,('\465','O')
,('\466','o')
,('\467','U')
,('\468','u')
,('\486','G')
,('\487','g')
,('\488','K')
,('\489','k')
,('\490','O')
,('\491','o')
,('\496','j')
,('\500','G')
,('\501','g')
,('\504','N')
,('\505','n')
,('\512','A')
,('\513','a')
,('\514','A')
,('\515','a')
,('\516','E')
,('\517','e')
,('\518','E')
,('\519','e')
,('\520','I')
,('\521','i')
,('\522','I')
,('\523','i')
,('\524','O')
,('\525','o')
,('\526','O')
,('\527','o')
,('\528','R')
,('\529','r')
,('\530','R')
,('\531','r')
,('\532','U')
,('\533','u')
,('\534','U')
,('\535','u')
,('\536','S')
,('\537','s')
,('\538','T')
,('\539','t')
,('\542','H')
,('\543','h')
,('\550','A')
,('\551','a')
,('\552','E')
,('\553','e')
,('\558','O')
,('\559','o')
,('\562','Y')
,('\563','y')
,('\894',';')
,('\7680','A')
,('\7681','a')
,('\7682','B')
,('\7683','b')
,('\7684','B')
,('\7685','b')
,('\7686','B')
,('\7687','b')
,('\7690','D')
,('\7691','d')
,('\7692','D')
,('\7693','d')
,('\7694','D')
,('\7695','d')
,('\7696','D')
,('\7697','d')
,('\7698','D')
,('\7699','d')
,('\7704','E')
,('\7705','e')
,('\7706','E')
,('\7707','e')
,('\7710','F')
,('\7711','f')
,('\7712','G')
,('\7713','g')
,('\7714','H')
,('\7715','h')
,('\7716','H')
,('\7717','h')
,('\7718','H')
,('\7719','h')
,('\7720','H')
,('\7721','h')
,('\7722','H')
,('\7723','h')
,('\7724','I')
,('\7725','i')
,('\7728','K')
,('\7729','k')
,('\7730','K')
,('\7731','k')
,('\7732','K')
,('\7733','k')
,('\7734','L')
,('\7735','l')
,('\7738','L')
,('\7739','l')
,('\7740','L')
,('\7741','l')
,('\7742','M')
,('\7743','m')
,('\7744','M')
,('\7745','m')
,('\7746','M')
,('\7747','m')
,('\7748','N')
,('\7749','n')
,('\7750','N')
,('\7751','n')
,('\7752','N')
,('\7753','n')
,('\7754','N')
,('\7755','n')
,('\7764','P')
,('\7765','p')
,('\7766','P')
,('\7767','p')
,('\7768','R')
,('\7769','r')
,('\7770','R')
,('\7771','r')
,('\7774','R')
,('\7775','r')
,('\7776','S')
,('\7777','s')
,('\7778','S')
,('\7779','s')
,('\7786','T')
,('\7787','t')
,('\7788','T')
,('\7789','t')
,('\7790','T')
,('\7791','t')
,('\7792','T')
,('\7793','t')
,('\7794','U')
,('\7795','u')
,('\7796','U')
,('\7797','u')
,('\7798','U')
,('\7799','u')
,('\7804','V')
,('\7805','v')
,('\7806','V')
,('\7807','v')
,('\7808','W')
,('\7809','w')
,('\7810','W')
,('\7811','w')
,('\7812','W')
,('\7813','w')
,('\7814','W')
,('\7815','w')
,('\7816','W')
,('\7817','w')
,('\7818','X')
,('\7819','x')
,('\7820','X')
,('\7821','x')
,('\7822','Y')
,('\7823','y')
,('\7824','Z')
,('\7825','z')
,('\7826','Z')
,('\7827','z')
,('\7828','Z')
,('\7829','z')
,('\7830','h')
,('\7831','t')
,('\7832','w')
,('\7833','y')
,('\7840','A')
,('\7841','a')
,('\7842','A')
,('\7843','a')
,('\7864','E')
,('\7865','e')
,('\7866','E')
,('\7867','e')
,('\7868','E')
,('\7869','e')
,('\7880','I')
,('\7881','i')
,('\7882','I')
,('\7883','i')
,('\7884','O')
,('\7885','o')
,('\7886','O')
,('\7887','o')
,('\7908','U')
,('\7909','u')
,('\7910','U')
,('\7911','u')
,('\7922','Y')
,('\7923','y')
,('\7924','Y')
,('\7925','y')
,('\7926','Y')
,('\7927','y')
,('\7928','Y')
,('\7929','y')
,('\8175','`')
,('\8490','K')
,('\8800','=')
,('\8814','<')
,('\8815','>')
]
| peter-fogg/pardoc | src/Text/Pandoc/Asciify.hs | gpl-2.0 | 7,630 | 0 | 8 | 1,409 | 2,382 | 1,574 | 808 | 388 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Main
( main
) where
import Test.Tasty
import Test.Tasty.Options
import Data.Proxy
import Data.Typeable
import Distribution.Simple.Utils
import Distribution.Verbosity
import Distribution.Compat.Time
import qualified UnitTests.Distribution.Compat.CreatePipe
import qualified UnitTests.Distribution.Compat.ReadP
import qualified UnitTests.Distribution.Compat.Time
import qualified UnitTests.Distribution.Simple.Program.Internal
import qualified UnitTests.Distribution.Simple.Utils
import qualified UnitTests.Distribution.System
import qualified UnitTests.Distribution.Utils.NubList
import qualified UnitTests.Distribution.Version (versionTests)
tests :: Int -> TestTree
tests mtimeChangeCalibrated =
askOption $ \(OptionMtimeChangeDelay mtimeChangeProvided) ->
let mtimeChange = if mtimeChangeProvided /= 0
then mtimeChangeProvided
else mtimeChangeCalibrated
in
testGroup "Unit Tests" $
[ testGroup "Distribution.Compat.CreatePipe"
UnitTests.Distribution.Compat.CreatePipe.tests
, testGroup "Distribution.Compat.ReadP"
UnitTests.Distribution.Compat.ReadP.tests
, testGroup "Distribution.Compat.Time"
(UnitTests.Distribution.Compat.Time.tests mtimeChange)
, testGroup "Distribution.Simple.Program.Internal"
UnitTests.Distribution.Simple.Program.Internal.tests
, testGroup "Distribution.Simple.Utils"
UnitTests.Distribution.Simple.Utils.tests
, testGroup "Distribution.Utils.NubList"
UnitTests.Distribution.Utils.NubList.tests
, testGroup "Distribution.System"
UnitTests.Distribution.System.tests
, testGroup "Distribution.Version"
UnitTests.Distribution.Version.versionTests
]
extraOptions :: [OptionDescription]
extraOptions =
[ Option (Proxy :: Proxy OptionMtimeChangeDelay)
]
newtype OptionMtimeChangeDelay = OptionMtimeChangeDelay Int
deriving Typeable
instance IsOption OptionMtimeChangeDelay where
defaultValue = OptionMtimeChangeDelay 0
parseValue = fmap OptionMtimeChangeDelay . safeRead
optionName = return "mtime-change-delay"
optionHelp = return $ "How long to wait before attempting to detect"
++ "file modification, in microseconds"
main :: IO ()
main = do
(mtimeChange, mtimeChange') <- calibrateMtimeChangeDelay
let toMillis :: Int -> Double
toMillis x = fromIntegral x / 1000.0
notice normal $ "File modification time resolution calibration completed, "
++ "maximum delay observed: "
++ (show . toMillis $ mtimeChange ) ++ " ms. "
++ "Will be using delay of " ++ (show . toMillis $ mtimeChange')
++ " for test runs."
defaultMainWithIngredients
(includingOptions extraOptions : defaultIngredients)
(tests mtimeChange')
| thomie/cabal | Cabal/tests/UnitTests.hs | bsd-3-clause | 2,833 | 0 | 14 | 501 | 511 | 294 | 217 | 65 | 2 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE BangPatterns, NoImplicitPrelude #-}
-- Copyright (c) 2008, Ralf Hinze
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- * Redistributions of source code must retain the above
-- copyright notice, this list of conditions and the following
-- disclaimer.
--
-- * Redistributions in binary form must reproduce the above
-- copyright notice, this list of conditions and the following
-- disclaimer in the documentation and/or other materials
-- provided with the distribution.
--
-- * The names of the contributors may not be used to endorse or
-- promote products derived from this software without specific
-- prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-- FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-- COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-- INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-- (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-- HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-- STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
-- OF THE POSSIBILITY OF SUCH DAMAGE.
-- | A /priority search queue/ (henceforth /queue/) efficiently
-- supports the operations of both a search tree and a priority queue.
-- An 'Elem'ent is a product of a key, a priority, and a
-- value. Elements can be inserted, deleted, modified and queried in
-- logarithmic time, and the element with the least priority can be
-- retrieved in constant time. A queue can be built from a list of
-- elements, sorted by keys, in linear time.
--
-- This implementation is due to Ralf Hinze with some modifications by
-- Scott Dillard and Johan Tibell.
--
-- * Hinze, R., /A Simple Implementation Technique for Priority Search
-- Queues/, ICFP 2001, pp. 110-121
--
-- <http://citeseer.ist.psu.edu/hinze01simple.html>
module GHC.Event.PSQ
(
-- * Binding Type
Elem(..)
, Key
, Prio
-- * Priority Search Queue Type
, PSQ
-- * Query
, size
, null
, lookup
-- * Construction
, empty
, singleton
-- * Insertion
, insert
-- * Delete/Update
, delete
, adjust
-- * Conversion
, toList
, toAscList
, toDescList
, fromList
-- * Min
, findMin
, deleteMin
, minView
, atMost
) where
import GHC.Base hiding (empty)
import GHC.Num (Num(..))
import GHC.Show (Show(showsPrec))
import GHC.Event.Unique (Unique)
-- | @E k p@ binds the key @k@ with the priority @p@.
data Elem a = E
{ key :: {-# UNPACK #-} !Key
, prio :: {-# UNPACK #-} !Prio
, value :: a
} deriving (Eq, Show)
------------------------------------------------------------------------
-- | A mapping from keys @k@ to priorites @p@.
type Prio = Double
type Key = Unique
data PSQ a = Void
| Winner {-# UNPACK #-} !(Elem a)
!(LTree a)
{-# UNPACK #-} !Key -- max key
deriving (Eq, Show)
-- | /O(1)/ The number of elements in a queue.
size :: PSQ a -> Int
size Void = 0
size (Winner _ lt _) = 1 + size' lt
-- | /O(1)/ True if the queue is empty.
null :: PSQ a -> Bool
null Void = True
null (Winner _ _ _) = False
-- | /O(log n)/ The priority and value of a given key, or Nothing if
-- the key is not bound.
lookup :: Key -> PSQ a -> Maybe (Prio, a)
lookup k q = case tourView q of
Null -> Nothing
Single (E k' p v)
| k == k' -> Just (p, v)
| otherwise -> Nothing
tl `Play` tr
| k <= maxKey tl -> lookup k tl
| otherwise -> lookup k tr
------------------------------------------------------------------------
-- Construction
empty :: PSQ a
empty = Void
-- | /O(1)/ Build a queue with one element.
singleton :: Key -> Prio -> a -> PSQ a
singleton k p v = Winner (E k p v) Start k
------------------------------------------------------------------------
-- Insertion
-- | /O(log n)/ Insert a new key, priority and value in the queue. If
-- the key is already present in the queue, the associated priority
-- and value are replaced with the supplied priority and value.
insert :: Key -> Prio -> a -> PSQ a -> PSQ a
insert k p v q = case q of
Void -> singleton k p v
Winner (E k' p' v') Start _ -> case compare k k' of
LT -> singleton k p v `play` singleton k' p' v'
EQ -> singleton k p v
GT -> singleton k' p' v' `play` singleton k p v
Winner e (RLoser _ e' tl m tr) m'
| k <= m -> insert k p v (Winner e tl m) `play` (Winner e' tr m')
| otherwise -> (Winner e tl m) `play` insert k p v (Winner e' tr m')
Winner e (LLoser _ e' tl m tr) m'
| k <= m -> insert k p v (Winner e' tl m) `play` (Winner e tr m')
| otherwise -> (Winner e' tl m) `play` insert k p v (Winner e tr m')
------------------------------------------------------------------------
-- Delete/Update
-- | /O(log n)/ Delete a key and its priority and value from the
-- queue. When the key is not a member of the queue, the original
-- queue is returned.
delete :: Key -> PSQ a -> PSQ a
delete k q = case q of
Void -> empty
Winner (E k' p v) Start _
| k == k' -> empty
| otherwise -> singleton k' p v
Winner e (RLoser _ e' tl m tr) m'
| k <= m -> delete k (Winner e tl m) `play` (Winner e' tr m')
| otherwise -> (Winner e tl m) `play` delete k (Winner e' tr m')
Winner e (LLoser _ e' tl m tr) m'
| k <= m -> delete k (Winner e' tl m) `play` (Winner e tr m')
| otherwise -> (Winner e' tl m) `play` delete k (Winner e tr m')
-- | /O(log n)/ Update a priority at a specific key with the result
-- of the provided function. When the key is not a member of the
-- queue, the original queue is returned.
adjust :: (Prio -> Prio) -> Key -> PSQ a -> PSQ a
adjust f k q0 = go q0
where
go q = case q of
Void -> empty
Winner (E k' p v) Start _
| k == k' -> singleton k' (f p) v
| otherwise -> singleton k' p v
Winner e (RLoser _ e' tl m tr) m'
| k <= m -> go (Winner e tl m) `unsafePlay` (Winner e' tr m')
| otherwise -> (Winner e tl m) `unsafePlay` go (Winner e' tr m')
Winner e (LLoser _ e' tl m tr) m'
| k <= m -> go (Winner e' tl m) `unsafePlay` (Winner e tr m')
| otherwise -> (Winner e' tl m) `unsafePlay` go (Winner e tr m')
{-# INLINE adjust #-}
------------------------------------------------------------------------
-- Conversion
-- | /O(n*log n)/ Build a queue from a list of key/priority/value
-- tuples. If the list contains more than one priority and value for
-- the same key, the last priority and value for the key is retained.
fromList :: [Elem a] -> PSQ a
fromList = foldr (\(E k p v) q -> insert k p v q) empty
-- | /O(n)/ Convert to a list of key/priority/value tuples.
toList :: PSQ a -> [Elem a]
toList = toAscList
-- | /O(n)/ Convert to an ascending list.
toAscList :: PSQ a -> [Elem a]
toAscList q = seqToList (toAscLists q)
toAscLists :: PSQ a -> Sequ (Elem a)
toAscLists q = case tourView q of
Null -> emptySequ
Single e -> singleSequ e
tl `Play` tr -> toAscLists tl <> toAscLists tr
-- | /O(n)/ Convert to a descending list.
toDescList :: PSQ a -> [ Elem a ]
toDescList q = seqToList (toDescLists q)
toDescLists :: PSQ a -> Sequ (Elem a)
toDescLists q = case tourView q of
Null -> emptySequ
Single e -> singleSequ e
tl `Play` tr -> toDescLists tr <> toDescLists tl
------------------------------------------------------------------------
-- Min
-- | /O(1)/ The element with the lowest priority.
findMin :: PSQ a -> Maybe (Elem a)
findMin Void = Nothing
findMin (Winner e _ _) = Just e
-- | /O(log n)/ Delete the element with the lowest priority. Returns
-- an empty queue if the queue is empty.
deleteMin :: PSQ a -> PSQ a
deleteMin Void = Void
deleteMin (Winner _ t m) = secondBest t m
-- | /O(log n)/ Retrieve the binding with the least priority, and the
-- rest of the queue stripped of that binding.
minView :: PSQ a -> Maybe (Elem a, PSQ a)
minView Void = Nothing
minView (Winner e t m) = Just (e, secondBest t m)
secondBest :: LTree a -> Key -> PSQ a
secondBest Start _ = Void
secondBest (LLoser _ e tl m tr) m' = Winner e tl m `play` secondBest tr m'
secondBest (RLoser _ e tl m tr) m' = secondBest tl m `play` Winner e tr m'
-- | /O(r*(log n - log r))/ Return a list of elements ordered by
-- key whose priorities are at most @pt@.
atMost :: Prio -> PSQ a -> ([Elem a], PSQ a)
atMost pt q = let (sequ, q') = atMosts pt q
in (seqToList sequ, q')
atMosts :: Prio -> PSQ a -> (Sequ (Elem a), PSQ a)
atMosts !pt q = case q of
(Winner e _ _)
| prio e > pt -> (emptySequ, q)
Void -> (emptySequ, Void)
Winner e Start _ -> (singleSequ e, Void)
Winner e (RLoser _ e' tl m tr) m' ->
let (sequ, q') = atMosts pt (Winner e tl m)
(sequ', q'') = atMosts pt (Winner e' tr m')
in (sequ <> sequ', q' `play` q'')
Winner e (LLoser _ e' tl m tr) m' ->
let (sequ, q') = atMosts pt (Winner e' tl m)
(sequ', q'') = atMosts pt (Winner e tr m')
in (sequ <> sequ', q' `play` q'')
------------------------------------------------------------------------
-- Loser tree
type Size = Int
data LTree a = Start
| LLoser {-# UNPACK #-} !Size
{-# UNPACK #-} !(Elem a)
!(LTree a)
{-# UNPACK #-} !Key -- split key
!(LTree a)
| RLoser {-# UNPACK #-} !Size
{-# UNPACK #-} !(Elem a)
!(LTree a)
{-# UNPACK #-} !Key -- split key
!(LTree a)
deriving (Eq, Show)
size' :: LTree a -> Size
size' Start = 0
size' (LLoser s _ _ _ _) = s
size' (RLoser s _ _ _ _) = s
left, right :: LTree a -> LTree a
left Start = moduleError "left" "empty loser tree"
left (LLoser _ _ tl _ _ ) = tl
left (RLoser _ _ tl _ _ ) = tl
right Start = moduleError "right" "empty loser tree"
right (LLoser _ _ _ _ tr) = tr
right (RLoser _ _ _ _ tr) = tr
maxKey :: PSQ a -> Key
maxKey Void = moduleError "maxKey" "empty queue"
maxKey (Winner _ _ m) = m
lloser, rloser :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
lloser k p v tl m tr = LLoser (1 + size' tl + size' tr) (E k p v) tl m tr
rloser k p v tl m tr = RLoser (1 + size' tl + size' tr) (E k p v) tl m tr
------------------------------------------------------------------------
-- Balancing
-- | Balance factor
omega :: Int
omega = 4
lbalance, rbalance :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
lbalance k p v l m r
| size' l + size' r < 2 = lloser k p v l m r
| size' r > omega * size' l = lbalanceLeft k p v l m r
| size' l > omega * size' r = lbalanceRight k p v l m r
| otherwise = lloser k p v l m r
rbalance k p v l m r
| size' l + size' r < 2 = rloser k p v l m r
| size' r > omega * size' l = rbalanceLeft k p v l m r
| size' l > omega * size' r = rbalanceRight k p v l m r
| otherwise = rloser k p v l m r
lbalanceLeft :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
lbalanceLeft k p v l m r
| size' (left r) < size' (right r) = lsingleLeft k p v l m r
| otherwise = ldoubleLeft k p v l m r
lbalanceRight :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
lbalanceRight k p v l m r
| size' (left l) > size' (right l) = lsingleRight k p v l m r
| otherwise = ldoubleRight k p v l m r
rbalanceLeft :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
rbalanceLeft k p v l m r
| size' (left r) < size' (right r) = rsingleLeft k p v l m r
| otherwise = rdoubleLeft k p v l m r
rbalanceRight :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
rbalanceRight k p v l m r
| size' (left l) > size' (right l) = rsingleRight k p v l m r
| otherwise = rdoubleRight k p v l m r
lsingleLeft :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
lsingleLeft k1 p1 v1 t1 m1 (LLoser _ (E k2 p2 v2) t2 m2 t3)
| p1 <= p2 = lloser k1 p1 v1 (rloser k2 p2 v2 t1 m1 t2) m2 t3
| otherwise = lloser k2 p2 v2 (lloser k1 p1 v1 t1 m1 t2) m2 t3
lsingleLeft k1 p1 v1 t1 m1 (RLoser _ (E k2 p2 v2) t2 m2 t3) =
rloser k2 p2 v2 (lloser k1 p1 v1 t1 m1 t2) m2 t3
lsingleLeft _ _ _ _ _ _ = moduleError "lsingleLeft" "malformed tree"
rsingleLeft :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
rsingleLeft k1 p1 v1 t1 m1 (LLoser _ (E k2 p2 v2) t2 m2 t3) =
rloser k1 p1 v1 (rloser k2 p2 v2 t1 m1 t2) m2 t3
rsingleLeft k1 p1 v1 t1 m1 (RLoser _ (E k2 p2 v2) t2 m2 t3) =
rloser k2 p2 v2 (rloser k1 p1 v1 t1 m1 t2) m2 t3
rsingleLeft _ _ _ _ _ _ = moduleError "rsingleLeft" "malformed tree"
lsingleRight :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
lsingleRight k1 p1 v1 (LLoser _ (E k2 p2 v2) t1 m1 t2) m2 t3 =
lloser k2 p2 v2 t1 m1 (lloser k1 p1 v1 t2 m2 t3)
lsingleRight k1 p1 v1 (RLoser _ (E k2 p2 v2) t1 m1 t2) m2 t3 =
lloser k1 p1 v1 t1 m1 (lloser k2 p2 v2 t2 m2 t3)
lsingleRight _ _ _ _ _ _ = moduleError "lsingleRight" "malformed tree"
rsingleRight :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
rsingleRight k1 p1 v1 (LLoser _ (E k2 p2 v2) t1 m1 t2) m2 t3 =
lloser k2 p2 v2 t1 m1 (rloser k1 p1 v1 t2 m2 t3)
rsingleRight k1 p1 v1 (RLoser _ (E k2 p2 v2) t1 m1 t2) m2 t3
| p1 <= p2 = rloser k1 p1 v1 t1 m1 (lloser k2 p2 v2 t2 m2 t3)
| otherwise = rloser k2 p2 v2 t1 m1 (rloser k1 p1 v1 t2 m2 t3)
rsingleRight _ _ _ _ _ _ = moduleError "rsingleRight" "malformed tree"
ldoubleLeft :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
ldoubleLeft k1 p1 v1 t1 m1 (LLoser _ (E k2 p2 v2) t2 m2 t3) =
lsingleLeft k1 p1 v1 t1 m1 (lsingleRight k2 p2 v2 t2 m2 t3)
ldoubleLeft k1 p1 v1 t1 m1 (RLoser _ (E k2 p2 v2) t2 m2 t3) =
lsingleLeft k1 p1 v1 t1 m1 (rsingleRight k2 p2 v2 t2 m2 t3)
ldoubleLeft _ _ _ _ _ _ = moduleError "ldoubleLeft" "malformed tree"
ldoubleRight :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
ldoubleRight k1 p1 v1 (LLoser _ (E k2 p2 v2) t1 m1 t2) m2 t3 =
lsingleRight k1 p1 v1 (lsingleLeft k2 p2 v2 t1 m1 t2) m2 t3
ldoubleRight k1 p1 v1 (RLoser _ (E k2 p2 v2) t1 m1 t2) m2 t3 =
lsingleRight k1 p1 v1 (rsingleLeft k2 p2 v2 t1 m1 t2) m2 t3
ldoubleRight _ _ _ _ _ _ = moduleError "ldoubleRight" "malformed tree"
rdoubleLeft :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
rdoubleLeft k1 p1 v1 t1 m1 (LLoser _ (E k2 p2 v2) t2 m2 t3) =
rsingleLeft k1 p1 v1 t1 m1 (lsingleRight k2 p2 v2 t2 m2 t3)
rdoubleLeft k1 p1 v1 t1 m1 (RLoser _ (E k2 p2 v2) t2 m2 t3) =
rsingleLeft k1 p1 v1 t1 m1 (rsingleRight k2 p2 v2 t2 m2 t3)
rdoubleLeft _ _ _ _ _ _ = moduleError "rdoubleLeft" "malformed tree"
rdoubleRight :: Key -> Prio -> a -> LTree a -> Key -> LTree a -> LTree a
rdoubleRight k1 p1 v1 (LLoser _ (E k2 p2 v2) t1 m1 t2) m2 t3 =
rsingleRight k1 p1 v1 (lsingleLeft k2 p2 v2 t1 m1 t2) m2 t3
rdoubleRight k1 p1 v1 (RLoser _ (E k2 p2 v2) t1 m1 t2) m2 t3 =
rsingleRight k1 p1 v1 (rsingleLeft k2 p2 v2 t1 m1 t2) m2 t3
rdoubleRight _ _ _ _ _ _ = moduleError "rdoubleRight" "malformed tree"
-- | Take two pennants and returns a new pennant that is the union of
-- the two with the precondition that the keys in the first tree are
-- strictly smaller than the keys in the second tree.
play :: PSQ a -> PSQ a -> PSQ a
Void `play` t' = t'
t `play` Void = t
Winner e@(E k p v) t m `play` Winner e'@(E k' p' v') t' m'
| p <= p' = Winner e (rbalance k' p' v' t m t') m'
| otherwise = Winner e' (lbalance k p v t m t') m'
{-# INLINE play #-}
-- | A version of 'play' that can be used if the shape of the tree has
-- not changed or if the tree is known to be balanced.
unsafePlay :: PSQ a -> PSQ a -> PSQ a
Void `unsafePlay` t' = t'
t `unsafePlay` Void = t
Winner e@(E k p v) t m `unsafePlay` Winner e'@(E k' p' v') t' m'
| p <= p' = Winner e (rloser k' p' v' t m t') m'
| otherwise = Winner e' (lloser k p v t m t') m'
{-# INLINE unsafePlay #-}
data TourView a = Null
| Single {-# UNPACK #-} !(Elem a)
| (PSQ a) `Play` (PSQ a)
tourView :: PSQ a -> TourView a
tourView Void = Null
tourView (Winner e Start _) = Single e
tourView (Winner e (RLoser _ e' tl m tr) m') =
Winner e tl m `Play` Winner e' tr m'
tourView (Winner e (LLoser _ e' tl m tr) m') =
Winner e' tl m `Play` Winner e tr m'
------------------------------------------------------------------------
-- Utility functions
moduleError :: String -> String -> a
moduleError fun msg = errorWithoutStackTrace ("GHC.Event.PSQ." ++ fun ++ ':' : ' ' : msg)
{-# NOINLINE moduleError #-}
------------------------------------------------------------------------
-- Hughes's efficient sequence type
newtype Sequ a = Sequ ([a] -> [a])
emptySequ :: Sequ a
emptySequ = Sequ (\as -> as)
singleSequ :: a -> Sequ a
singleSequ a = Sequ (\as -> a : as)
(<>) :: Sequ a -> Sequ a -> Sequ a
Sequ x1 <> Sequ x2 = Sequ (\as -> x1 (x2 as))
infixr 5 <>
seqToList :: Sequ a -> [a]
seqToList (Sequ x) = x []
-- | @since 4.3.1.0
instance Show a => Show (Sequ a) where
showsPrec d a = showsPrec d (seqToList a)
| snoyberg/ghc | libraries/base/GHC/Event/PSQ.hs | bsd-3-clause | 18,113 | 0 | 14 | 5,068 | 6,446 | 3,231 | 3,215 | 304 | 6 |
module AmortizedQueue where
import Prelude hiding (head)
data AbsQueue a = AQ { front :: [a]
, rear :: [a] }
{-@ data AbsQueue a = AQ { front :: [a]
, rear :: {v:[a] | size v <= size front} } @-}
{-@ die :: {v:String | false} -> a @-}
die :: String -> a
die x = error x
{-@ measure size @-}
size :: [a] -> Int
size [] = 0
size (x:xs) = 1 + size xs
{-@ measure qsize @-}
qsize :: AbsQueue a -> Int
qsize (AQ xs ys) = size xs + size ys
{-@ invariant {v:[a] | size v >= 0} @-}
{-@ invariant {v:AbsQueue a | qsize v >= 0} @-}
{-@ type NEList a = {v:[a] | size v > 0} @-}
{-@ type NEQueue a = {v:AbsQueue a | qsize v > 0} @-}
makeQueue :: [a] -> [a] -> AbsQueue a
makeQueue f b
| size b <= size f = AQ f b
| otherwise = AQ (f ++ reverse b) []
{- measure qhead @-}
{-@ qhead :: NEQueue a -> a @-}
qhead (AQ f _) = head f
qhead _ = die "never"
{- measure head @-}
{-@ head :: NEList a -> a @-}
head (x:_) = x
-- head _ = die "never"
{- toList :: q:AbsQueue a -> {v:[a] | 0 < len v => head v = front q} @-}
toList :: AbsQueue a -> [a]
toList = undefined
-- forall q :: AbsQueue a, xs:: [a], x::a.
-- not (isEmpty(q)) && asList(q) == xs =>
{-
import leon.lang._
import leon.annotation._
object AmortizedQueue {
sealed abstract class List
case class Cons(head : Int, tail : List) extends List
case object Nil extends List
sealed abstract class AbsQueue
case class Queue(front : List, rear : List) extends AbsQueue
def size(list : List) : Int = (list match {
case Nil => 0
case Cons(_, xs) => 1 + size(xs)
}) ensuring(_ >= 0)
def content(l: List) : Set[Int] = l match {
case Nil => Set.empty[Int]
case Cons(x, xs) => Set(x) ++ content(xs)
}
def asList(queue : AbsQueue) : List = queue match {
case Queue(front, rear) => concat(front, reverse(rear))
}
def concat(l1 : List, l2 : List) : List = (l1 match {
case Nil => l2
case Cons(x,xs) => Cons(x, concat(xs, l2))
}) ensuring (res => size(res) == size(l1) + size(l2) && content(res) == content(l1) ++ content(l2))
def isAmortized(queue : AbsQueue) : Boolean = queue match {
case Queue(front, rear) => size(front) >= size(rear)
}
def isEmpty(queue : AbsQueue) : Boolean = queue match {
case Queue(Nil, Nil) => true
case _ => false
}
def reverse(l : List) : List = (l match {
case Nil => Nil
case Cons(x, xs) => concat(reverse(xs), Cons(x, Nil))
}) ensuring (content(_) == content(l))
def amortizedQueue(front : List, rear : List) : AbsQueue = {
if (size(rear) <= size(front))
Queue(front, rear)
else
Queue(concat(front, reverse(rear)), Nil)
} ensuring(isAmortized(_))
def enqueue(queue : AbsQueue, elem : Int) : AbsQueue = (queue match {
case Queue(front, rear) => amortizedQueue(front, Cons(elem, rear))
}) ensuring(isAmortized(_))
def tail(queue : AbsQueue) : AbsQueue = {
require(isAmortized(queue) && !isEmpty(queue))
queue match {
case Queue(Cons(f, fs), rear) => amortizedQueue(fs, rear)
}
} ensuring (isAmortized(_))
def front(queue : AbsQueue) : Int = {
require(isAmortized(queue) && !isEmpty(queue))
queue match {
case Queue(Cons(f, _), _) => f
}
}
@induct
def propFront(queue : AbsQueue, list : List) : Boolean = {
require(!isEmpty(queue) && isAmortized(queue))
if (asList(queue) == list) {
list match {
case Cons(x, _) => front(queue) == x
}
} else
true
} holds
def enqueueAndFront(queue : AbsQueue, elem : Int) : Boolean = {
if (isEmpty(queue))
front(enqueue(queue, elem)) == elem
else
true
} holds
def enqueueDequeueThrice(queue : AbsQueue, e1 : Int, e2 : Int, e3 : Int) : Boolean = {
if (isEmpty(queue)) {
val q1 = enqueue(queue, e1)
val q2 = enqueue(q1, e2)
val q3 = enqueue(q2, e3)
val e1prime = front(q3)
val q4 = tail(q3)
val e2prime = front(q4)
val q5 = tail(q4)
val e3prime = front(q5)
e1 == e1prime && e2 == e2prime && e3 == e3prime
} else
true
} holds
}
-}
| abakst/liquidhaskell | tests/todo/AmortizedQueue.hs | bsd-3-clause | 4,228 | 0 | 9 | 1,207 | 304 | 165 | 139 | 20 | 1 |
{-# LANGUAGE CPP, TupleSections #-}
{-# OPTIONS_GHC -fno-cse #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- Static flags
--
-- Static flags can only be set once, on the command-line. Inside GHC,
-- each static flag corresponds to a top-level value, usually of type Bool.
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module StaticFlags (
-- entry point
parseStaticFlags,
staticFlags,
initStaticOpts,
discardStaticFlags,
-- Output style options
opt_PprStyle_Debug,
opt_NoDebugOutput,
-- optimisation opts
opt_NoStateHack,
opt_NoOptCoercion,
-- For the parser
addOpt, removeOpt, v_opt_C_ready,
-- For options autocompletion
flagsStatic, flagsStaticNames
) where
#include "HsVersions.h"
import CmdLineParser
import FastString
import SrcLoc
import Util
import Panic
import Control.Monad
import Data.IORef
import System.IO.Unsafe ( unsafePerformIO )
-----------------------------------------------------------------------------
-- Static flags
-- | Parses GHC's static flags from a list of command line arguments.
--
-- These flags are static in the sense that they can be set only once and they
-- are global, meaning that they affect every instance of GHC running;
-- multiple GHC threads will use the same flags.
--
-- This function must be called before any session is started, i.e., before
-- the first call to 'GHC.withGhc'.
--
-- Static flags are more of a hack and are static for more or less historical
-- reasons. In the long run, most static flags should eventually become
-- dynamic flags.
--
-- XXX: can we add an auto-generated list of static flags here?
--
parseStaticFlags :: [Located String] -> IO ([Located String], [Located String])
parseStaticFlags = parseStaticFlagsFull flagsStatic
-- | Parse GHC's static flags as @parseStaticFlags@ does. However it also
-- takes a list of available static flags, such that certain flags can be
-- enabled or disabled through this argument.
parseStaticFlagsFull :: [Flag IO] -> [Located String]
-> IO ([Located String], [Located String])
parseStaticFlagsFull flagsAvailable args = do
ready <- readIORef v_opt_C_ready
when ready $ throwGhcExceptionIO (ProgramError "Too late for parseStaticFlags: call it before runGhc or runGhcT")
(leftover, errs, warns) <- processArgs flagsAvailable args
-- See Note [Handling errors when parsing commandline flags]
unless (null errs) $ throwGhcExceptionIO $
errorsToGhcException . map (("on the commandline", ) . unLoc) $ errs
-- see sanity code in staticOpts
writeIORef v_opt_C_ready True
return (leftover, warns)
-- holds the static opts while they're being collected, before
-- being unsafely read by unpacked_static_opts below.
GLOBAL_VAR(v_opt_C, [], [String])
GLOBAL_VAR(v_opt_C_ready, False, Bool)
staticFlags :: [String]
staticFlags = unsafePerformIO $ do
ready <- readIORef v_opt_C_ready
if (not ready)
then panic "Static flags have not been initialised!\n Please call GHC.parseStaticFlags early enough."
else readIORef v_opt_C
-- All the static flags should appear in this list. It describes how each
-- static flag should be processed. Two main purposes:
-- (a) if a command-line flag doesn't appear in the list, GHC can complain
-- (b) a command-line flag may remove, or add, other flags; e.g. the "-fno-X"
-- things
--
-- The common (PassFlag addOpt) action puts the static flag into the bunch of
-- things that are searched up by the top-level definitions like
-- opt_foo = lookUp (fsLit "-dfoo")
-- Note that ordering is important in the following list: any flag which
-- is a prefix flag (i.e. HasArg, Prefix, OptPrefix, AnySuffix) will override
-- flags further down the list with the same prefix.
-- see Note [Updating flag description in the User's Guide] in DynFlags
flagsStatic :: [Flag IO]
flagsStatic = [
------ Debugging ----------------------------------------------------
defFlag "dppr-debug" (PassFlag addOptEwM)
, defFlag "dno-debug-output" (PassFlag addOptEwM)
-- rest of the debugging flags are dynamic
------ Compiler flags -----------------------------------------------
-- All other "-fno-<blah>" options cancel out "-f<blah>" on the hsc cmdline
, defFlag "fno-"
(PrefixPred (\s -> isStaticFlag ("f"++s)) (\s -> removeOptEwM ("-f"++s)))
-- Pass all remaining "-f<blah>" options to hsc
, defFlag "f" (AnySuffixPred isStaticFlag addOptEwM)
]
isStaticFlag :: String -> Bool
isStaticFlag f = f `elem` flagsStaticNames
-- see Note [Updating flag description in the User's Guide] in DynFlags
flagsStaticNames :: [String]
flagsStaticNames = [
"fno-state-hack",
"fno-opt-coercion"
]
-- We specifically need to discard static flags for clients of the
-- GHC API, since they can't be safely reparsed or reinitialized. In general,
-- the existing flags do nothing other than control debugging and some low-level
-- optimizer phases, so for the most part this is OK.
--
-- See GHC issue #8276: http://ghc.haskell.org/trac/ghc/ticket/8276#comment:37
discardStaticFlags :: [String] -> [String]
discardStaticFlags = filter (\x -> x `notElem` flags)
where flags = [ "-fno-state-hack"
, "-fno-opt-coercion"
, "-dppr-debug"
, "-dno-debug-output"
]
initStaticOpts :: IO ()
initStaticOpts = writeIORef v_opt_C_ready True
addOpt :: String -> IO ()
addOpt = consIORef v_opt_C
removeOpt :: String -> IO ()
removeOpt f = do
fs <- readIORef v_opt_C
writeIORef v_opt_C $! filter (/= f) fs
type StaticP = EwM IO
addOptEwM :: String -> StaticP ()
addOptEwM = liftEwM . addOpt
removeOptEwM :: String -> StaticP ()
removeOptEwM = liftEwM . removeOpt
packed_static_opts :: [FastString]
packed_static_opts = map mkFastString staticFlags
lookUp :: FastString -> Bool
lookUp sw = sw `elem` packed_static_opts
-- debugging options
-- see Note [Updating flag description in the User's Guide] in DynFlags
opt_PprStyle_Debug :: Bool
opt_PprStyle_Debug = lookUp (fsLit "-dppr-debug")
opt_NoDebugOutput :: Bool
opt_NoDebugOutput = lookUp (fsLit "-dno-debug-output")
opt_NoStateHack :: Bool
opt_NoStateHack = lookUp (fsLit "-fno-state-hack")
opt_NoOptCoercion :: Bool
opt_NoOptCoercion = lookUp (fsLit "-fno-opt-coercion")
{-
-- (lookup_str "foo") looks for the flag -foo=X or -fooX,
-- and returns the string X
lookup_str :: String -> Maybe String
lookup_str sw
= case firstJusts (map (stripPrefix sw) staticFlags) of
Just ('=' : str) -> Just str
Just str -> Just str
Nothing -> Nothing
lookup_def_int :: String -> Int -> Int
lookup_def_int sw def = case (lookup_str sw) of
Nothing -> def -- Use default
Just xx -> try_read sw xx
lookup_def_float :: String -> Float -> Float
lookup_def_float sw def = case (lookup_str sw) of
Nothing -> def -- Use default
Just xx -> try_read sw xx
try_read :: Read a => String -> String -> a
-- (try_read sw str) tries to read s; if it fails, it
-- bleats about flag sw
try_read sw str
= case reads str of
((x,_):_) -> x -- Be forgiving: ignore trailing goop, and alternative parses
[] -> throwGhcException (UsageError ("Malformed argument " ++ str ++ " for flag " ++ sw))
-- ToDo: hack alert. We should really parse the arguments
-- and announce errors in a more civilised way.
-}
| acowley/ghc | compiler/main/StaticFlags.hs | bsd-3-clause | 7,834 | 0 | 13 | 1,718 | 924 | 528 | 396 | -1 | -1 |
module Utilities (toBinary, fl) where
import Stream
import Data.Ratio
-- Convert from an Integer to its signed-digit representation
toBinary :: Integer -> Stream
toBinary 0 = [0]
toBinary x = toBinary t ++ [x `mod` 2]
where t = x `div` 2
fl :: Stream -> Stream
fl (x:xs) = (f x):xs
where f 0 = 1
f 1 = 0
| ryantm/ghc | testsuite/tests/concurrent/prog001/Utilities.hs | bsd-3-clause | 328 | 3 | 9 | 85 | 130 | 72 | 58 | 11 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Examples.AllPrimitives
Description : Predefined network: allPrimitives.
Copyright : (c) Sanne Woude 2015
Predefined network: allPrimitives.
-}
module Examples.AllPrimitives (allPrimitives) where
import Madl.Network
import Examples.TypesAndFunctions
import Utils.Text
-- | The allPrimitives network, with or without a deadlock.
allPrimitives :: Bool -> MadlNetwork
allPrimitives dl = mkNetwork (NSpec components channels ports) where
src0 = Source "src0" (if dl then reqAndRsp else rsp)
src1 = Source "src1" req
fork = Fork "fork"
merge0 = Merge "merge0"
queue0 = Queue "queue0" 2
queue1 = Queue "queue1" 2
switch = Switch "switch" [isRsp, isReq]
join = ControlJoin "join"
merge1 = Merge "merge1"
sink = Sink "sink"
src0_fork = "src0_fork"
fork_queue0 = "fork_queue0"
fork_merge0 = "fork_merge0"
src1_merge0 = "src1_merge0"
merge0_queue1 = "merge0_queue1"
queue0_join = "queue0_join"
queue1_switch = "queue1_switch"
switch_join = "switch_join"
join_merge1 = "join_merge1"
switch_merge1 = "switch_merge1"
merge1_sink = "merge1_sink"
src0_o = ("src0" , "src0_fork")
src1_o = ("src1" , "src1_merge0")
fork_i = ("src0_fork" , "fork")
fork_a = ("fork" , "fork_queue0")
fork_b = ("fork" , "fork_merge0")
merge0_a = ("fork_merge0" , "merge0")
merge0_b = ("src1_merge0" , "merge0")
merge0_o = ("merge0" , "merge0_queue1")
queue0_i = ("fork_queue0" , "queue0")
queue0_o = ("queue0" , "queue0_join")
queue1_i = ("merge0_queue1" , "queue1")
queue1_o = ("queue1" , "queue1_switch")
switch_i = ("queue1_switch" , "switch")
switch_a = ("switch" , "switch_join")
switch_b = ("switch" , "switch_merge1")
join_a = ("queue0_join" , "join")
join_b = ("switch_join" , "join")
join_o = ("join" , "join_merge1")
merge1_a = ("join_merge1" , "merge1")
merge1_b = ("switch_merge1" , "merge1")
merge1_o = ("merge1" , "merge1_sink")
sink_i = ("merge1_sink" , "sink")
components =
map C [src0, src1, fork, merge0, queue0, queue1, switch, join, merge1, sink]
channels =
map Channel [src0_fork, fork_queue0, fork_merge0, src1_merge0, merge0_queue1, queue0_join, queue1_switch, switch_join, join_merge1, switch_merge1, merge1_sink]
ports :: [(Text, Text)]
ports = [src0_o, src1_o, fork_i, fork_a, fork_b, merge0_a, merge0_b, merge0_o, queue0_i, queue0_o, queue1_i, queue1_o, switch_i, switch_a, switch_b, join_a, join_b, join_o, merge1_a, merge1_b, merge1_o, sink_i]
| julienschmaltz/madl | examples/Examples/AllPrimitives.hs | mit | 2,842 | 0 | 9 | 767 | 633 | 386 | 247 | 56 | 2 |
-- Peano numbers
-- https://www.codewars.com/kata/5779b0f0ec883247b2000117
module Haskell.Codewars.Peano where
import Prelude hiding (even, odd, div, compare, Num, Int, Integer, Float, Double, Rational, Word)
data Peano = Zero | Succ Peano deriving (Eq, Show)
add, sub, mul, div :: Peano -> Peano -> Peano
add p1 Zero = p1
add Zero p2 = p2
add p1 (Succ p2) = add (Succ p1) p2
sub p1 Zero = p1
sub Zero _ = error "negative number"
sub (Succ p1) (Succ p2) = sub p1 p2
mul Zero _ = Zero
mul _ Zero = Zero
mul p1 (Succ Zero) = p1
mul (Succ Zero) p2 = p2
mul p1 (Succ p2) = add p1 (mul p1 p2)
div _ Zero = error "divide by 0"
div p1 (Succ Zero) = p1
div Zero _ = Zero
div p1 p2 | compare p1 p2 == LT = Zero
| otherwise = Succ (div (sub p1 p2) p2)
even, odd :: Peano -> Bool
even Zero = True
even (Succ p1) = not . even $ p1
odd = not . even
compare :: Peano -> Peano -> Ordering
compare Zero Zero = EQ
compare Zero (Succ _) = LT
compare (Succ _) Zero = GT
compare (Succ p1) (Succ p2) = compare p1 p2
| gafiatulin/codewars | src/5 kyu/Peano.hs | mit | 1,014 | 0 | 10 | 229 | 492 | 257 | 235 | 29 | 1 |
module Language.Rebeca.Fold.Erlang.Simulation where
import Control.Monad.Reader
import Control.Monad.State
import Language.Fold
import Language.Erlang.Builder
import Language.Erlang.Syntax
import qualified Language.Rebeca.Absrebeca as R
import Language.Rebeca.Algebra
import Language.Rebeca.Fold
import Language.Rebeca.Fold.Erlang.Refinement
simulationAlgebra = refinementAlgebra {
modelF = \envs rcs mai -> do
envs' <- sequence envs
setEnvVars envs'
rcs' <- sequence rcs
mai' <- mai
moduleName <- getModuleName
rtfactor <- getRtFactor
monitor <- getMonitor
let opts = [ ("monitor", tupleE [atomE "monitor", listE []])
, ("program", tupleE [atomE moduleName, atomE "main", listE [varE "Args"]])
, ("time_limit", numberE 1200)
, ("algorithm", tupleE [atomE "mce_alg_simulation", atomE "void"])
]
let sim = Function "simulate" [varP "Args"] (Apply (moduleE "mce" "start") [RecordCreate "mce_opts" (if monitor then opts else (drop 1 opts))])
return (Program (Module moduleName)
[Export ["main/1", "simulate/1"]]
[ Import "$MCERLANG_HOME/languages/erlang/src/include/state.hrl"
, Import "$MCERLANG_HOME/languages/erlang/src/include/process.hrl"
, Import "$MCERLANG_HOME/languages/erlang/src/include/node.hrl"
, Import "$MCERLANG_HOME/src/include/mce_opts.hrl" ]
[Define "RT_FACTOR" (num rtfactor)]
(concat rcs' ++ [mai', sim]))
, reactiveClassF = \id _ kr sv msi ms -> do
setKnownRebecs []
setStateVars []
id' <- id
kr' <- kr
setKnownRebecs kr'
sv' <- sv
setStateVars (map (\(_, id, _) -> id) sv')
msi' <- msi
ms' <- sequence ms
let initialsv = Assign (varP "StateVars") (Apply (moduleE "dict" "from_list") [listE (map (\(_, i, d) -> tupleE [atomE i, either atomE (\x -> x) d]) sv')])
initiallv = Assign (varP "LocalVars") (Apply (moduleE "dict" "new") [])
probeState = Apply (moduleE "mce_erl" "probe_state") [varE "InstanceName", varE "NewStateVars"]
recurs = Apply (atomE id') [varE "Env", varE "InstanceName", varE "KnownRebecs", varE "NewStateVars"]
return ([ Function id' [varP "Env", varP "InstanceName"] $
Receive [ Match (tupleP (map varP kr')) Nothing $
Apply (atomE id') [ varE "Env", varE "InstanceName"
, Apply (moduleE "dict" "from_list") [listE (map (\k -> tupleE [atomE k, varE k]) kr')]
]]
, Function id' [varP "Env", varP "InstanceName", varP "KnownRebecs"] $
Seq (Seq initialsv (Seq initiallv (Assign (tupleP [varP "NewStateVars", varP "_"]) (Receive [msi'])))) (Seq probeState recurs)
, Function id' [varP "Env", varP "InstanceName", varP "KnownRebecs", varP "StateVars"] $
Seq (Seq initiallv (Assign (tupleP [varP "NewStateVars", varP "_"]) (Receive ms'))) (Seq probeState recurs)
])
, msgSrvF = \id tps stms -> do
setLocalVars []
id' <- id
tps' <- sequence tps
stms' <- sequence stms
let patterns = tupleP [tupleP [varP "Sender", varP "TT", varP "DL"], atomP id', tupleP (map (varP . snd) tps')]
pred = InfixExp OpLOr (InfixExp OpEq (varE "DL") (atomE "inf")) (InfixExp OpLEq (Apply (moduleE "rebeca" "now") []) (varE "DL"))
probe = Apply (moduleE "mce_erl" "probe") [atomE "drop", atomE id']
return (Match patterns Nothing (Case pred [ Match (atomP "true") Nothing (formatReceive id' $ apply $ reverse stms')
, Match (atomP "false") Nothing (Seq probe (formatDrop id' retstm))]))
}
runSimulate :: R.Model -> ReaderT CompilerConf (State CompilerState) Program
runSimulate model = fold simulationAlgebra model
translateSimulation :: String -> Integer -> Bool -> R.Model -> Program
translateSimulation modelName rtfactor monitor model = evalState (runReaderT (runSimulate model) (initialConf {moduleName = modelName, rtfactor = rtfactor, monitor = monitor })) initialState
| arnihermann/timedreb2erl | src/Language/Rebeca/Fold/Erlang/Simulation.hs | mit | 4,296 | 0 | 28 | 1,187 | 1,454 | 739 | 715 | 68 | 2 |
-- | Options/Parsing
module Vaultaire.Collector.Nagios.Perfdata.Options where
import Vaultaire.Collector.Nagios.Perfdata.Types
import Options.Applicative
parseOptions :: IO NagiosOptions
parseOptions = execParser optionParser
-- | Parser which include all help info
optionParser :: ParserInfo NagiosOptions
optionParser =
info (helper <*> collectorOptions)
(fullDesc <>
progDesc "Vaultaire collector for Nagios perfdata files, can run with mod_gearman" <>
header "vaultaire-collector-nagios - writes datapoints from Nagios perfdata files to Vaultaire. Can run in daemon mode using the gearman protocol"
)
-- | The parser for all options for nagios-perfdata
collectorOptions :: Parser NagiosOptions
collectorOptions = NagiosOptions
<$> switch
(long "normalise-metrics"
<> short 's'
<> help "Normalise metrics to base SI units")
<*> switch
(long "gearman"
<> short 'g'
<> help "Run in gearman mode")
<*> strOption
(long "gearman-host"
<> short 'h'
<> value "localhost"
<> metavar "GEARMANHOST"
<> help "Hostname of Gearman server.")
<*> strOption
(long "gearman-port"
<> short 'p'
<> value "4730"
<> metavar "GEARMANPORT"
<> help "Port number Gearman server is listening on.")
<*> strOption
(long "function-name"
<> short 'f'
<> value "check_results"
<> metavar "FUNCTION-NAME"
<> help "Name of function to register with Gearman server.")
<*> strOption
(long "key-file"
<> short 'k'
<> value ""
<> metavar "KEY-FILE"
<> help "File from which to read AES key to decrypt check results. If unspecified, results are assumed to be in cleartext.")
<*> switch
(long "telemetry"
<> short 't'
<> help "Run telemetry")
<*> strOption
(long "telemetry-host"
<> value "127.0.0.1"
<> metavar "TELEMETRYHOST"
<> help "Host to send telemetry data to")
<*> strOption
(long "telemetry-port"
<> value "9447"
<> metavar "TELEMETRYPORT"
<> help "Port to use for telemetry.")
| anchor/vaultaire-collector-nagios | lib/Vaultaire/Collector/Nagios/Perfdata/Options.hs | mit | 2,266 | 0 | 18 | 674 | 409 | 193 | 216 | 59 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.WebKitCSSViewportRule
(js_getStyle, getStyle, WebKitCSSViewportRule,
castToWebKitCSSViewportRule, gTypeWebKitCSSViewportRule)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"style\"]" js_getStyle ::
JSRef WebKitCSSViewportRule -> IO (JSRef CSSStyleDeclaration)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebKitCSSViewportRule.style Mozilla WebKitCSSViewportRule.style documentation>
getStyle ::
(MonadIO m) =>
WebKitCSSViewportRule -> m (Maybe CSSStyleDeclaration)
getStyle self
= liftIO
((js_getStyle (unWebKitCSSViewportRule self)) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/WebKitCSSViewportRule.hs | mit | 1,467 | 6 | 11 | 182 | 377 | 238 | 139 | 26 | 1 |
{-# htermination transpose :: [[a]] -> [[a]] #-}
import List
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/List_transpose_1.hs | mit | 61 | 0 | 3 | 10 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE LambdaCase #-}
module Language.Bison.Summary.Parser
( parseRules
) where
import Language.Bison.Summary.Parser.Lexer
import Language.Bison.Summary.Syntax
import Text.Parsec
import Text.Parsec.Prim
import Text.Parsec.Pos
import Text.Parsec.Error
import Control.Applicative hiding ((<|>), optional)
import Control.Monad
import qualified Data.Map as Map
type Parser = Parsec [Loc Token] ()
parseRules :: SourceName -> String -> Either ParseError [Rule]
parseRules src stream = postproc <$> parseAs rules src stream
postproc :: [Rule] -> [Rule]
postproc rules = [ Rule name rhss'
| (name, rhss) <- Map.toList rulesMap
, let rhss' = map (map resolve) rhss
]
where
rulesMap = Map.unionsWith (++)
[ Map.singleton name rhss
| Rule name rhss <- rules
]
isTerminal s = RuleName s `Map.notMember` rulesMap
resolve (Nonterminal (RuleName name)) | isTerminal name = Terminal name
resolve element = element
parseAs :: Parser a -> SourceName -> String -> Either ParseError a
parseAs p src stream = case scanner stream of
Left err -> Left $ newErrorMessage
(Message $ unwords ["lexer failed:", err])
(newPos src 0 0)
Right tokens -> runParser p () src tokens
rules :: Parser [Rule]
rules = skipMany eor >> rule `endBy` many1 eor
rule :: Parser Rule
rule = Rule <$> name <* colon <*> (alt `sepBy` bar) <?> "grammar rule"
alt :: Parser [Element]
alt = (directive "empty" *> pure []) <|>
(many1 element) <?>
"grammar alternative"
element :: Parser Element
element = (Nonterminal <$> name) <|>
(Lit <$> charLit) <?>
"grammar element"
name :: Parser Name
name = tok $ \case
Id s -> return $ RuleName s
Meta s -> return $ MetaName s
_ -> Nothing
charLit :: Parser Char
charLit = tok $ \case
CharLit c -> return c
_ -> Nothing
colon :: Parser ()
colon = tok $ \case
Colon -> return ()
_ -> Nothing
bar :: Parser ()
bar = tok $ \case
Bar -> return ()
_ -> Nothing
eor :: Parser ()
eor = tok $ \case
EOR -> return ()
_ -> Nothing
directive :: String -> Parser ()
directive s = tok $ \case
Directive s' -> guard (s == s')
_ -> Nothing
tok :: (Show t) => (t -> Maybe a) -> Parsec [Loc t] s a
tok f = do
src <- sourceName <$> getPosition
let fromLoc (SrcLoc line col) = newPos src line col
token (show . unLoc) (fromLoc . getLoc) (f . unLoc)
| gergoerdi/bison-parser | src/Language/Bison/Summary/Parser.hs | mit | 2,509 | 0 | 13 | 673 | 948 | 490 | 458 | 73 | 3 |
import Text.ParserCombinators.Parsec
import qualified Data.Map as M
import Text.Parsec.Error (messageString, errorMessages)
import Data.Text (pack, unpack, strip)
import Data.Monoid
type TextLine = String
type Name = String
data ActionType = FoulCommit
| GenericAction String
| ActionType Player
deriving (Show)
data Event = Time String
| Action String ActionType
| Unknown String
| Error [String]
deriving (Show)
data Player = Player { madeShot :: Int, missedShot :: Int, defRebound :: Int, offRebound :: Int , foulCommit :: Int, made3ptShot :: Int, missed3ptShot :: Int, steal :: Int, turnover :: Int, madeLayup :: Int, missedLayup :: Int, foulAgainst :: Int}
deriving (Show)
type Players = M.Map Name Player
defaultPlayer = Player{madeShot=0, missedShot=0, defRebound=0, offRebound=0, foulCommit=0, made3ptShot=0, missed3ptShot=0, steal=0, turnover=0, madeLayup=0, missedLayup=0, foulAgainst=0}
defensiveReboundPlayer = defaultPlayer {defRebound=1}
offensiveReboundPlayer = defaultPlayer {offRebound=1}
madeShotPlayer = defaultPlayer {madeShot=1}
missedShotPlayer = defaultPlayer {missedShot=1}
foulCommitPlayer = defaultPlayer {foulCommit=1}
foulAgainstPlayer = defaultPlayer {foulAgainst=1}
made3ptShotPlayer = defaultPlayer {made3ptShot=1}
missed3ptShotPlayer = defaultPlayer {missed3ptShot=1}
stealPlayer = defaultPlayer {steal=1}
turnoverPlayer = defaultPlayer {turnover=1}
playerAppend :: Player -> Player -> Player
playerAppend (Player a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12) (Player b1 b2 b3 b4 b5 b6 b7 b8 b9 b10 b11 b12) = Player (a1+b1) (a2+b2) (a3+b3) (a4+b4) (a5+b5) (a6+b6) (a7+b7) (a8 + b8) (a9+b9) (a10+b10) (a11+b11) (a12+b12)
instance Monoid Player where
mempty = defaultPlayer
mappend = (playerAppend)
-- s <- readFile "/home/smu/code/fun-with-haskell/testfile.txt"
-- parse parseLines "bl" s
main = do
s <- readFile "/home/smu/code/fun-with-haskell/testfile.txt"
let res = parse parseLines "test" s
print $ (filter (knownEvent) (extractResult res))
mainGame = do
s <- readFile "/home/smu/code/fun-with-haskell/testfile.txt"
let res = parse parseLines "test" s
print $ playGame M.empty $ extractResult res
playGame :: Players -> [Event] -> Players
playGame p [] = p
playGame p g = foldl updatePlayer p g
updatePlayer :: Players -> Event -> Players
updatePlayer p (Action name (ActionType player)) = M.insertWith (mappend) name player p
updatePlayer p _ = p
knownEvent :: Event -> Bool
knownEvent (Action _ (ActionType _)) = False
knownEvent _ = True
extractResult :: Either ParseError [Event] -> [Event]
extractResult (Right x) = x
extractResult (Left y) = [Error (map (messageString) (errorMessages y))]
trimSpaces :: String -> String
trimSpaces x = unpack $ strip $ pack x
parseLines :: Parser [Event]
parseLines = many parseLine
parseLine :: Parser Event
parseLine = do
s <- try parseAction <|> try parseTime <|> parseUnknown
newline
return s
parseTime :: Parser Event
parseTime = do
s <- many (noneOf ".")
string ". min"
return $ Time s
parseAction :: Parser Event
parseAction = do
spaces
name <- many (noneOf "-")
char '-'
spaces
action <- try parseMissed3pt <|> try parseFoul <|> try parseRebound <|> try parseSteal <|> try parseTurnover <|> parseGenericAction --try parseMissed3pt <|> try parseMade3pt <|> parseGenericAction
return (Action (trimSpaces name) action)
parseFoul :: Parser ActionType
parseFoul = try parseFoulAgainst <|> parseFoulCommit
parseFoulCommit :: Parser ActionType
parseFoulCommit = do
string "foul"
s <- many (noneOf "\n")
return $ ActionType foulCommitPlayer
parseSubstitution :: Parser String
parseSubstitution = do
s <- many (oneOf "ABCDEFGHIJKLMNOPQRSTUVWXYZ -")
s1 <- many (noneOf " ")
string " replaces "
s2 <- many (noneOf "\n")
return s
parseFoulAgainst :: Parser ActionType
parseFoulAgainst = do
string "foul against"
s <- many (noneOf "\n")
return $ ActionType foulAgainstPlayer
parseSteal :: Parser ActionType
parseSteal = do
string "steal"
s <- many (noneOf "\n")
return $ ActionType stealPlayer
parseTurnover :: Parser ActionType
parseTurnover = do
string "turnover"
s<- many (noneOf "\n")
return $ ActionType turnoverPlayer
parseGenericAction :: Parser ActionType
parseGenericAction = do
s <- many (noneOf "\n")
return $ GenericAction s
parseRebound :: Parser ActionType
parseRebound = do
s <- try parseDefRebound <|> try parseOffRebound
return s
parseDefRebound :: Parser ActionType
parseDefRebound = do
string "defensive rebound"
s <- many (noneOf "\n")
return $ ActionType defensiveReboundPlayer
parseOffRebound :: Parser ActionType
parseOffRebound = do
string "offensive rebound"
s <- many (noneOf "\n")
return $ ActionType offensiveReboundPlayer
parseMissed3pt :: Parser ActionType
parseMissed3pt = do
string "missed 3-pointer"
s <- many (noneOf "\n")
return $ ActionType missed3ptShotPlayer
parseMade3pt :: Parser ActionType
parseMade3pt = do
string "made 3-pointer"
return $ ActionType made3ptShotPlayer
parseUnknown :: Parser Event
parseUnknown = do
s <- many (noneOf "\n")
return $ Unknown s
| ddccffvv/fun-with-haskell | belgie_parser.hs | mit | 5,711 | 0 | 13 | 1,432 | 1,754 | 900 | 854 | 137 | 1 |
module Main where
import System.Environment
import WoofParse
import WoofToBC
import IO
main :: IO ()
main = do s <- hGetContents stdin
ast <- return $ readAST s
case ast of
ASTNull errMsg -> do putStr errMsg
otherwise -> emitBC ast
| aemoncannon/woof | compiler/woof_main.hs | mit | 295 | 0 | 12 | 104 | 88 | 43 | 45 | 11 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Controls host muting in Datadog.
-}
module Network.Datadog.Host
( muteHost
, unmuteHost
) where
import Control.Monad (void)
import Data.Aeson
import Data.Text (Text, unpack)
import Data.Time.Clock
import Data.Time.Clock.POSIX
import Network.HTTP.Types
import Network.Datadog.Internal
muteHost :: Environment -> Text -> Maybe UTCTime -> Bool -> IO ()
-- ^ Do not allow alerts to trigger on a specific host
muteHost env hostname mtime override =
let path = "host/" ++ unpack hostname ++ "/mute"
q = [("override", "true") | override]
body = object $
prependMaybe (\a -> "end" .= (ceiling (utcTimeToPOSIXSeconds a) :: Integer)) mtime $
prependBool override ("override" .= True)
["hostname" .= hostname]
in void $ datadogHttp env path q POST $ Just $ encode body
unmuteHost :: Environment -> Text -> IO ()
-- ^ Allow alerts to trigger on a specific host
unmuteHost env hostname =
let path = "host/" ++ unpack hostname ++ "/unmute"
body = object ["hostname" .= hostname]
in void $ datadogHttp env path [] POST $ Just $ encode body
| iand675/datadog | src/Network/Datadog/Host.hs | mit | 1,139 | 0 | 18 | 244 | 336 | 180 | 156 | 25 | 1 |
{-# LANGUAGE NoImplicitPrelude, DeriveFunctor #-}
module IHaskell.Flags (
IHaskellMode(..),
Argument(..),
Args(..),
LhsStyle(..),
NotebookFormat(..),
lhsStyleBird,
parseFlags,
help,
) where
import IHaskellPrelude
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Char8 as CBS
import System.Console.CmdArgs.Explicit
import System.Console.CmdArgs.Text
import Data.List (findIndex)
import IHaskell.Types
-- Command line arguments to IHaskell. A set of arguments is annotated with the mode being invoked.
data Args = Args IHaskellMode [Argument]
deriving Show
data Argument = ConfFile String -- ^ A file with commands to load at startup.
| OverwriteFiles -- ^ Present when output should overwrite existing files.
| GhcLibDir String -- ^ Where to find the GHC libraries.
| KernelDebug -- ^ Spew debugging output from the kernel.
| Help -- ^ Display help text.
| ConvertFrom String
| ConvertTo String
| ConvertFromFormat NotebookFormat
| ConvertToFormat NotebookFormat
| ConvertLhsStyle (LhsStyle String)
| KernelspecInstallPrefix String
deriving (Eq, Show)
data LhsStyle string =
LhsStyle
{ lhsCodePrefix :: string -- ^ @>@
, lhsOutputPrefix :: string -- ^ @<<@
, lhsBeginCode :: string -- ^ @\\begin{code}@
, lhsEndCode :: string -- ^ @\\end{code}@
, lhsBeginOutput :: string -- ^ @\\begin{verbatim}@
, lhsEndOutput :: string -- ^ @\\end{verbatim}@
}
deriving (Eq, Functor, Show)
data NotebookFormat = LhsMarkdown
| IpynbFile
deriving (Eq, Show)
-- Which mode IHaskell is being invoked in.
data IHaskellMode = ShowHelp String
| InstallKernelSpec
| ConvertLhs
| Kernel (Maybe String)
deriving (Eq, Show)
-- | Given a list of command-line arguments, return the IHaskell mode and arguments to process.
parseFlags :: [String] -> Either String Args
parseFlags flags =
let modeIndex = findIndex (`elem` modeFlags) flags
in case modeIndex of
Nothing ->
-- Treat no mode as 'console'.
if "--help" `elem` flags
then Left $ showText (Wrap 100) $ helpText [] HelpFormatAll ihaskellArgs
else process ihaskellArgs flags
Just 0 -> process ihaskellArgs flags
Just idx ->
-- If mode not first, move it to be first.
let (start, first:end) = splitAt idx flags
in process ihaskellArgs $ first : start ++ end
where
modeFlags = concatMap modeNames allModes
allModes :: [Mode Args]
allModes = [installKernelSpec, kernel, convert]
-- | Get help text for a given IHaskell ode.
help :: IHaskellMode -> String
help mode = showText (Wrap 100) $ helpText [] HelpFormatAll $ chooseMode mode
where
chooseMode InstallKernelSpec = installKernelSpec
chooseMode (Kernel _) = kernel
chooseMode ConvertLhs = convert
ghcLibFlag :: Flag Args
ghcLibFlag = flagReq ["ghclib", "l"] (store GhcLibDir) "<path>" "Library directory for GHC."
kernelDebugFlag :: Flag Args
kernelDebugFlag = flagNone ["debug"] addDebug "Print debugging output from the kernel."
where
addDebug (Args mode prev) = Args mode (KernelDebug : prev)
confFlag :: Flag Args
confFlag = flagReq ["conf", "c"] (store ConfFile) "<rc.hs>"
"File with commands to execute at start; replaces ~/.ihaskell/rc.hs."
installPrefixFlag :: Flag Args
installPrefixFlag = flagReq ["prefix"] (store KernelspecInstallPrefix) "<install-dir>"
"Installation prefix for kernelspec (see Jupyter's --prefix option)"
helpFlag = flagHelpSimple (add Help)
add flag (Args mode flags) = Args mode $ flag : flags
store :: (String -> Argument) -> String -> Args -> Either String Args
store constructor str (Args mode prev) = Right $ Args mode $ constructor str : prev
installKernelSpec :: Mode Args
installKernelSpec =
mode "install" (Args InstallKernelSpec []) "Install the Jupyter kernelspec." noArgs
[ghcLibFlag, kernelDebugFlag, confFlag, installPrefixFlag, helpFlag]
kernel :: Mode Args
kernel = mode "kernel" (Args (Kernel Nothing) []) "Invoke the IHaskell kernel." kernelArg
[ghcLibFlag, kernelDebugFlag, confFlag]
where
kernelArg = flagArg update "<json-kernel-file>"
update filename (Args _ flags) = Right $ Args (Kernel $ Just filename) flags
convert :: Mode Args
convert = mode "convert" (Args ConvertLhs []) description unnamedArg convertFlags
where
description = "Convert between Literate Haskell (*.lhs) and Ipython notebooks (*.ipynb)."
convertFlags = [ flagReq ["input", "i"] (store ConvertFrom) "<file>" "File to read."
, flagReq ["output", "o"] (store ConvertTo) "<file>" "File to write."
, flagReq ["from", "f"] (storeFormat ConvertFromFormat) "lhs|ipynb"
"Format of the file to read."
, flagReq ["to", "t"] (storeFormat ConvertToFormat) "lhs|ipynb"
"Format of the file to write."
, flagNone ["force"] consForce "Overwrite existing files with output."
, flagReq ["style", "s"] storeLhs "bird|tex"
"Type of markup used for the literate haskell file"
, flagNone ["bird"] (consStyle lhsStyleBird) "Literate haskell uses >"
, flagNone ["tex"] (consStyle lhsStyleTex) "Literate haskell uses \\begin{code}"
, helpFlag
]
consForce (Args mode prev) = Args mode (OverwriteFiles : prev)
unnamedArg = Arg (store ConvertFrom) "<file>" False
consStyle style (Args mode prev) = Args mode (ConvertLhsStyle style : prev)
storeFormat constructor str (Args mode prev) =
case T.toLower (T.pack str) of
"lhs" -> Right $ Args mode $ constructor LhsMarkdown : prev
"ipynb" -> Right $ Args mode $ constructor IpynbFile : prev
_ -> Left $ "Unknown format requested: " ++ str
storeLhs str previousArgs =
case T.toLower (T.pack str) of
"bird" -> success lhsStyleBird
"tex" -> success lhsStyleTex
_ -> Left $ "Unknown lhs style: " ++ str
where
success lhsStyle = Right $ consStyle lhsStyle previousArgs
lhsStyleBird, lhsStyleTex :: LhsStyle String
lhsStyleBird = LhsStyle "> " "\n<< " "" "" "" ""
lhsStyleTex = LhsStyle "" "" "\\begin{code}" "\\end{code}" "\\begin{verbatim}" "\\end{verbatim}"
ihaskellArgs :: Mode Args
ihaskellArgs =
let descr = "Haskell for Interactive Computing."
helpStr = showText (Wrap 100) $ helpText [] HelpFormatAll ihaskellArgs
onlyHelp = [flagHelpSimple (add Help)]
noMode = mode "IHaskell" (Args (ShowHelp helpStr) []) descr noArgs onlyHelp
in noMode { modeGroupModes = toGroup allModes }
where
add flag (Args mode flags) = Args mode $ flag : flags
noArgs = flagArg unexpected ""
where
unexpected a = error $ "Unexpected argument: " ++ a
| beni55/IHaskell | src/IHaskell/Flags.hs | mit | 7,280 | 0 | 15 | 1,907 | 1,727 | 929 | 798 | 137 | 5 |
module Ocram.Names where
import Text.Printf (printf)
-- TODO: split this file into the submodules
ecPrefix :: String
ecPrefix = "ec_"
-- |The name of the attribute that marks blocking function declaratations
blockingAttr :: String
blockingAttr = "tc_block"
-- |The name of the attribute that marks thread start function definitions
startAttr :: String
startAttr = "tc_thread"
-- |The naming scheme for control flow labels
identDesugar :: Int -> String
identDesugar = printf "%sdesugar_%d" ecPrefix
-- |The naming of a new variable for desugaring of switch statements
switchVar :: Int -> String
switchVar = printf "%sswitch_%d" ecPrefix
-- |The naming scheme for unique variable names
varUnique :: String -> Int -> String
varUnique = printf "%sunique_%s_%d" ecPrefix
-- |The name of a temporary variable for critical calls
varCrit :: Int -> String
varCrit = printf "%scrit_%d" ecPrefix
-- |The name of a new variable for boolean short-circuiting
varBool :: Int -> String
varBool = printf "%sbool_%d" ecPrefix
-- |The type alias for a T-stack frame
tframe :: String -> String
tframe = printf "%stframe_%s_t" ecPrefix
-- |The name of the variable inside a t-stack frame that stores the result of the called function
resVar :: String
resVar = printf "%sresult" ecPrefix
-- |The name of the variable inside a t-stack frame that stores the continuation information
contVar :: String
contVar = printf "%scont" ecPrefix
-- |The name of the union variable that holds all nested frames
frameUnion :: String
frameUnion = printf "%sframes" ecPrefix
-- |The name of the variables that hold the t-stack of a thread
tstackVar :: String -> String
tstackVar = printf "%ststack_%s" ecPrefix
-- |The type alias for a E-stack frame
eframe :: String -> String
eframe = printf "%seframe_%s_t" ecPrefix
-- |The name of the variable that holds the e-stack of a thread
estackVar :: String
estackVar = printf "%sestack" ecPrefix
-- |The name of the thread execution function
tfunction :: Int -> String
tfunction = printf "%sthread_%d" ecPrefix
-- |The name of a continuation label
contLbl :: String -> String
contLbl = printf "%scontlbl_%s" ecPrefix
-- |The name of a function static variable
varStatic :: String -> String -> String
varStatic = printf "%sstatic_%s_%s" ecPrefix
-- |How to mangle labels with function names
mangleFun :: String -> String -> String
mangleFun x fun = x ++ "_" ++ fun
| copton/ocram | ocram/src/Ocram/Names.hs | gpl-2.0 | 2,394 | 0 | 6 | 406 | 373 | 209 | 164 | 40 | 1 |
module Main where
--------------------------------------------------------------------------------
import Math.REPL.Evaluator.Base (evalTest)
import Model.Arithmetic
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Monad
import System.Exit (exitFailure, exitSuccess)
import Test.QuickCheck
--------------------------------------------------------------------------------
newtype ExprString = ExprString { unwrapExprString :: String }
instance Show ExprString where
show (ExprString s) = s
instance Arbitrary ExprString where
arbitrary = ExprString <$> genExprString
--------------------------------------------------------------------------------
operators :: String
operators = "+-*/^"
genOp :: Gen Char
genOp = elements operators
genArg :: Gen String
genArg = show . abs <$> (arbitrary :: Gen Double)
genExprString :: Gen String
genExprString = liftM concat .
liftM2 (:) (listOf (elements " ")) .
liftM2 (:) genArg .
liftM2 (:) (listOf (elements " ")) .
listOf . liftM2 (:) genOp
. liftM2 (++) (listOf (elements " ")) $ genArg
--------------------------------------------------------------------------------
prop_model :: ExprString -> Bool
prop_model xs = calcResult == modelResult
where expr = unwrapExprString xs
calcResult = evalTest expr
modelResult = modelEval expr
--------------------------------------------------------------------------------
main :: IO ()
main = do
status <- quickCheckWithResult stdArgs { maxSuccess = 2500 } prop_model
case status of
Success{} -> exitSuccess
_ -> exitFailure
--------------------------------------------------------------------------------
| sumitsahrawat/calculator | tests/Arithmetic.hs | gpl-2.0 | 1,904 | 0 | 16 | 405 | 391 | 215 | 176 | 36 | 2 |
module AnnotatingTypeChecker where
import AbsMini
import PrintMini
import ErrM
-- from TypeChecker by changing () to Program,Stm,etc
typecheck :: Program -> Err Program
typecheck (Prog stms) = do (stms',_) <- checkStms emptyEnv stms ; return (Prog stms')
checkStms :: Env -> [Stm] -> Err ([Stm],Env)
checkStms env [] = return ([],env)
checkStms env (st:stms) = do
(st',env') <- checkStm env st -- a bit more like the interpreter!
(stms',env'') <- checkStms env' stms
return (st':stms', env'')
checkStm :: Env -> Stm -> Err (Stm,Env)
checkStm env s =
case s of
SDecl t x -> do env' <- addVar env x t ; return (s,env')
SAss x e -> do t <- lookupVar env x
e' <- checkExp env e t
return (SAss x e', env)
SBlock stms -> do (stms',_) <- checkStms (addScope env) stms
return (SBlock stms',env) -- discard env updates
SPrint e -> do (e',_) <- inferExp env e
return (SPrint e', env)
checkExp :: Env -> Exp -> Type -> Err Exp
checkExp env e t =
do (e',t') <- inferExp env e
if t' /= t
then fail (printTree e ++ " has type " ++ printTree t'
++ " expected " ++ printTree t)
else return e'
inferExp :: Env -> Exp -> Err (Exp,Type)
inferExp env e =
case e of
EVar x -> do t <- lookupVar env x ; return (ETyped t e, t)
EInt _ -> return (ETyped TInt e,TInt)
EDouble _ -> return (ETyped TDouble e, TDouble)
EAdd e1 e2 -> do (e1',t1) <- inferExp env e1
(e2',t2) <- inferExp env e2
if t1 == t2
then return (ETyped t1 (EAdd e1' e2'), t1)
else fail (printTree e1 ++ " has type " ++ printTree t1
++ " but " ++ printTree e2
++ " has type " ++ printTree t2)
type Env = [[(Ident, Type)]]
emptyEnv :: Env
emptyEnv = [[]]
addVar :: Env -> Ident -> Type -> Err Env
addVar (scope:rest) x t =
case lookup x scope of
Nothing -> return (((x,t):scope):rest)
Just _ -> fail ("Variable " ++ printTree x ++ " already declared.")
lookupVar :: Env -> Ident -> Err Type
lookupVar [] x = fail $ "Unknown variable " ++ printTree x ++ "."
lookupVar (scope:rest) x = case lookup x scope of
Nothing -> lookupVar rest x
Just t -> return t
addScope :: Env -> Env
addScope env = []:env
| izimbra/PLT2014 | mini/haskell/AnnotatingTypeChecker.hs | gpl-2.0 | 2,614 | 0 | 19 | 975 | 1,008 | 504 | 504 | 58 | 5 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module TestTmp where
import Test.Framework
#include "solution.hs"
test = htfMain htf_thisModulesTests
| NorfairKing/project-euler | build/haskell/test-boilerplate.hs | gpl-2.0 | 169 | 0 | 5 | 32 | 20 | 13 | 7 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import qualified SDL
import qualified SDL.Image
import qualified Common as C
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Extra (whileM)
main :: IO ()
main = C.withSDL $ do
C.setHintQuality
C.withWindow "Lesson 07" (640, 480) $ \w ->
C.withRenderer w $ \r -> do
t <- SDL.Image.loadTexture r "./assets/texture.png"
whileM $
C.isContinue <$> SDL.pollEvent
>>= C.conditionallyRun (draw r t)
SDL.destroyTexture t
draw :: (MonadIO m) => SDL.Renderer -> SDL.Texture -> m ()
draw r t = do
SDL.clear r
SDL.copy r t Nothing Nothing
SDL.present r
| palf/haskellSDL2Examples | examples/lesson07/src/Lesson07.hs | gpl-2.0 | 671 | 0 | 17 | 149 | 240 | 125 | 115 | 22 | 1 |
--- apfelmus 2009. This code is hereby released into public domain.
module RandomList where
import System.Random
import Control.Monad.Random
import Control.Monad
type R a = Rand StdGen a
-- List returning elements in random order
type RandomList a = R [a]
print_rl :: RandomList [Integer] -> IO()
print_rl rl = do
g <- getStdGen
let xs = evalRand rl g
putStrLn (show xs)
extract :: RandomList a -> [a]
extract rxs = do
let g=mkStdGen 1000
evalRand rxs g
empty :: RandomList a
empty = return []
singleton :: a -> RandomList a
singleton x = return [x]
-- Fair merge of random lists
merge :: RandomList a -> RandomList a -> RandomList a
merge rxs rys = do
xs <- rxs
ys <- rys
merge' (length xs, xs) (length ys, ys)
where
merge' (0 , []) (_ , ys) = return ys
merge' (_ , xs) (0 , []) = return xs
merge' (nx, x:xs) (ny, y:ys) = do
k <- getRandomR (1,nx+ny) -- selection weighted by size
if k <= nx
then (x:) `liftM` ((nx-1, xs) `merge'` (ny, y:ys))
else (y:) `liftM` ((nx, x:xs) `merge'` (ny-1, ys))
-- Generate a random permutation in O(n log n)
permute :: [a] -> RandomList a
permute = fromList
where
fromList [] = empty
fromList [x] = singleton x
fromList xs = (fromList l) `merge` (fromList r)
where (l,r) = splitAt (length xs `div` 2) xs
----------------------------------------------------------------------------
| adhalanay/Examen | src/RandomList.hs | gpl-3.0 | 1,497 | 0 | 14 | 416 | 573 | 309 | 264 | 37 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module WebSpec (spec) where
import Control.Monad (void)
import Test.Hspec
import Test.Hspec.Fn
import Common
import Users.Model
spec :: Spec
spec = fnTests $ do
describe "/login" $ do
it "should login the user" $ do
let newUser = NewUser "new" "[email protected]" "pass"
void $ eval (\ctxt -> createUser ctxt newUser)
post "/login" [("username", "new")
,("password", "pass")]
>>= should300To "/users/new"
it "shouldn't login the user if the password is wrong" $ do
let newUser = NewUser "new" "[email protected]" "pass"
void $ eval (\ctxt -> createUser ctxt newUser)
post "/login" [("username", "new")
,("password", "passbad")]
>>= shouldNot300
it "shouldn't login the user if the username is wrong" $ do
let newUser = NewUser "new" "[email protected]" "pass"
void $ eval (\ctxt -> createUser ctxt newUser)
post "/login" [("username", "newblah")
,("password", "pass")]
>>= shouldNot300
| emhoracek/smooch | app/tests/WebSpec.hs | gpl-3.0 | 1,109 | 0 | 18 | 338 | 303 | 155 | 148 | 28 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Vdv.FilterOperator where
import ClassyPrelude
import Control.Lens(makePrisms)
data FilterOperator = FilterOperatorEq
| FilterOperatorLike
| FilterOperatorLe
| FilterOperatorGe
| FilterOperatorDateLe
| FilterOperatorDateGe
| FilterOperatorDateEq
deriving(Show,Eq)
$(makePrisms ''FilterOperator)
| pmiddend/vdvanalyze | src/Vdv/FilterOperator.hs | gpl-3.0 | 477 | 0 | 8 | 173 | 68 | 40 | 28 | 13 | 0 |
module Ennel.Text.Interpreter where
import Ennel.Linguistics
import Ennel.Dictionary.Data hiding (phrase)
import Data.JustParse
import Data.JustParse.Char
import Data.JustParse.Combinator hiding (branch)
import Control.Applicative ((<$>), (<*>), (<*), (*>), pure)
import Control.Monad
import Data.Monoid
import Control.Applicative.Automaton
import Data.Traversable hiding (sequence)
interpret :: TemplateTable -> Interpreter
interpret table cat = case interpretations of
[] -> mempty
_ -> choice_ interpretations
where
interpretations = do
(c, Template bs t as s) <- table
is' <- maybe mempty return (c `fitInto` cat)
return $ do
sb <- sequence . map (binding table is') $ bs
phrase t
sa <- sequence . map (binding table is') $ as
return $ foldr substitute s (sb ++ sa)
interpret' :: TemplateTable -> LexicalCategory -> QueueAutomaton Word SemanticTree
interpret' table cat = step *> branch (toPriorityTree interpretations)
where
interpretations = do
(c, Template bs t as s) <- table
is' <- maybe mempty return (c `fitInto` cat)
let sb = sequenceA . map (binding' table is') $ bs
let p = phrase' t
let sa = sequenceA . map (binding' table is') $ as
return $ (\sb' p' sa' -> foldr substitute s (sb' ++ sa')) <$> sb <*> p <*> sa
phrase :: Phrase -> Parser Phrase Phrase
phrase = sequence . map token
phrase' :: Phrase -> QueueAutomaton Word Phrase
phrase' = sequenceA . map (accept . (==))
binding :: TemplateTable -> [Inflection] -> Binding -> Parser Phrase (String, SemanticTree)
binding table is' (Bind sel v, cat) = do
if sel == "" then lookAhead (phrase (words sel)) else return []
s <- interpret table cat
return (v, s)
binding' :: TemplateTable -> [Inflection] -> Binding -> QueueAutomaton Word (String, SemanticTree)
binding' table is' (Bind sel v, cat) = (,) <$> pure v <*> interpret' table cat
substitute :: (String, SemanticTree) -> SemanticTree -> SemanticTree
substitute (x', SemanticTree x xs) (SemanticTree y ys) = if x' == y
then SemanticTree x xs
else SemanticTree y (map (substitute (x', SemanticTree x xs)) ys)
| trehansiddharth/ennel | Ennel/Text/Interpreter.hs | gpl-3.0 | 2,143 | 4 | 17 | 436 | 834 | 436 | 398 | 47 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
import Control.Monad
import Control.Monad.Trans (liftIO)
import Control.Monad.Trans.Either (EitherT(..))
import Control.Monad.Trans.Maybe
-- import Data.Attoparsec.Lazy
import qualified Data.Aeson.Generic as G
import qualified Data.ByteString.Lazy.Char8 as LB
import Data.Foldable (foldrM)
import Data.Maybe
import System.Environment
import System.IO
--
import HEP.Storage.WebDAV.CURL
import HEP.Storage.WebDAV.Type
-- import HEP.Storage.WebDAV.Util
import HEP.Util.Either
--
import HEP.Physics.Analysis.ATLAS.Common
import HEP.Physics.Analysis.ATLAS.SUSY.SUSY_0L2to6JMET_8TeV
import HEP.Physics.Analysis.Common.XSecNTotNum
import HEP.Util.Work
--
import Util
import Debug.Trace
m_neutralino :: Double
m_neutralino = 10 -- 500
datalst :: [ (Double,Double) ]
datalst = [ (g,q) | g <- [2500], q <- [200,300..3000] ]
{-
datalst :: [ (Double,Double) ]
datalst = [ (g,q) | g <- [m_neutralino+100,m_neutralino+200..3000], q <- [m_neutralino+100,m_neutralino+200..3000] ]
-}
datalst1of2 :: [ (Double,Double) ]
datalst1of2 = [ (g,q) | g <- [m_neutralino+100,m_neutralino+200..1500], q <- [m_neutralino+100,m_neutralino+200..3000] ]
datalst2of2 :: [ (Double,Double) ]
datalst2of2 = [ (g,q) | g <- [1600,1700..3000], q <- [m_neutralino+100,m_neutralino+200..3000] ]
checkFiles :: DataFileClass -> String -> IO (Either String ())
checkFiles c procname = do
rs <- forM datalst (\s -> (doJob (checkFileExistInDAV c) . createRdirBName procname) s
>>= return . maybe (show s) (const []) . head)
let missinglst = filter (not.null) rs
nmiss = length missinglst
mapM_ (\x -> putStrLn (" , " ++ x)) missinglst
if null missinglst then return (Right ()) else return (Left (show nmiss ++ " files are missing"))
createRdirBName procname (mg,mq) =
let rdir = "montecarlo/admproject/SimplifiedSUSY/8TeV/scan_" ++ procname
basename = "SimplifiedSUSYMN" ++ show m_neutralino ++ "MG"++show mg++ "MSQ" ++ show mq ++ "_" ++ procname ++ "_LHC8ATLAS_NoMatch_NoCut_AntiKT0.4_NoTau_Set"
in (rdir,basename)
dirset = [ "2sg_4j2n"
, "sqsg_3j2n"
, "2sq_2j2n"
]
atlas_20_3_fbinv_at_8_TeV :: WebDAVConfig -> WebDAVRemoteDir -> String
-> IO (Maybe (CrossSectionAndCount,[(JESParam,HistEType)],[(EType,Double)],Double))
atlas_20_3_fbinv_at_8_TeV wdavcfg wdavrdir bname = do
let fp1 = bname ++ "_ATLAS8TeV0L2to6JBkgTest.json"
fp2 = bname ++ "_total_count.json"
runMaybeT $ do
(_,mr1) <- MaybeT . boolToMaybeM (doesFileExistInDAV wdavcfg wdavrdir fp1)
. downloadFile True wdavcfg wdavrdir $ fp1
r1 <- liftM LB.pack (MaybeT . return $ mr1)
(result :: [(JESParam, HistEType)]) <- MaybeT . return $ G.decode r1
(_,mr2) <- MaybeT . boolToMaybeM (doesFileExistInDAV wdavcfg wdavrdir fp2)
. downloadFile True wdavcfg wdavrdir $ fp2
r2 <- liftM LB.pack (MaybeT . return $ mr2)
(xsec :: CrossSectionAndCount) <- MaybeT . return $ G.decode r2
let weight = crossSectionInPb xsec * 20300 / fromIntegral (numberOfEvent xsec)
hist = map (\(x,y) -> (x,fromIntegral y * weight)) ((snd . head) result )
let getratio (x,y) = do y' <- lookup x limitOfNBSM
return (y/ y')
maxf (x,y) acc = do r <- getratio (x,y)
return (max acc r)
maxratio <- MaybeT . return $ foldrM maxf 0 hist
return (xsec, result, hist, maxratio)
getResult f (rdir,basename) = do
let nlst = [1]
work f "config1.txt" rdir basename nlst
mainAnalysis = do
outh <- openFile ("simplifiedsusy" ++ show m_neutralino ++ "_sqsg_8TeV_0lep_temp.dat") WriteMode
mapM_ (\(mg,msq,r) -> hPutStrLn outh (show mg ++ ", " ++ show msq ++ ", " ++ show r))
=<< forM datalst ( \(x,y) -> do
r <- runEitherT $ do
let analysis x = getResult atlas_20_3_fbinv_at_8_TeV . createRdirBName x
simplify = fmap head . fmap catMaybes . EitherT
takeXSec (x,_,_,_) = crossSectionInPb x * 20300
takeHist (_,_,h,_) = h
t_2sg <- (simplify . analysis "2sg_4j2n") (x,y)
t_sqsg <- (simplify . analysis "sqsg_3j2n") (x,y)
t_2sq <- (simplify . analysis "2sq_2j2n") (x,y)
let h_2sg = takeHist t_2sg
h_sqsg = takeHist t_sqsg
h_2sq = takeHist t_2sq
x_2sg = takeXSec t_2sg
x_sqsg = takeXSec t_sqsg
x_2sq = takeXSec t_2sq
totalsr = mkTotalSR [h_2sg, h_sqsg, h_2sq]
totalpr = x_2sg + x_sqsg + x_2sq
totalsrpr = multiplyScalar (1.0/totalpr) totalsr
r_ratio = getRFromSR totalsr
-- trace (show (x,y)) $ return (x :: Double, y :: Double, r_ratio)
trace (show (x,y,h_2sg)) $ return (x :: Double, y :: Double, totalsrpr)
case r of
Left err -> error err
Right result -> return result
)
hClose outh
mainCheck = do
r <- runEitherT $ mapM_ (EitherT . checkFiles ChanCount) (take 1 dirset)
print r
mainCount str = do
r <- runEitherT (countEvent str)
case r of
Left err -> putStrLn err
Right _ -> return ()
main = do
args <- getArgs
case args !! 0 of
"count" -> case args !! 1 of
"2sg" -> mainCount "2sg_4j2n"
"sqsg" -> mainCount "sqsg_3j2n"
"2sq" -> mainCount "2sq_2j2n"
"check" -> mainCheck
"analysis" -> mainAnalysis
countEvent :: String -> EitherT String IO ()
countEvent str = do
EitherT (checkFiles RawData str)
liftIO $ putStrLn "Proceed 1 or 2 ? (1/2/others)"
c <- liftIO $ getChar
if c == '1'
then liftIO $ forM_ datalst1of2 (getCount.createRdirBName str)
else if c == '2'
then liftIO $ forM_ datalst2of2 (getCount.createRdirBName str)
else return ()
getCount (rdir,basename) = do
let nlst = [1]
r1 <- work (\wdavcfg wdavrdir nm -> getXSecNCount XSecLHE wdavcfg wdavrdir nm >>= getJSONFileAndUpload wdavcfg wdavrdir nm)
"config1.txt"
rdir
basename
nlst
print r1
r2 <- work
(atlas_8TeV_0L2to6J_bkgtest ([0],[0]))
"config1.txt"
rdir
basename
nlst
print r2
| wavewave/lhc-analysis-collection | analysis/SimplifiedSUSY_sqsg_ATLAS0L2to6JMET_8TeV.hs | gpl-3.0 | 6,551 | 0 | 22 | 1,803 | 2,120 | 1,105 | 1,015 | 140 | 5 |
module DrawingColors (
ColorStyle(..)
, colorScheme
, colorOnBlackScheme
, whiteOnBlackScheme
, randomColorScheme
) where
import Diagrams.Prelude hiding ((&), (#))
{-# ANN module "HLint: ignore Unnecessary hiding" #-}
-- COLO(U)RS --
colorScheme :: ColorStyle Double
colorScheme = colorOnBlackScheme
data ColorStyle a = ColorStyle {
backgroundC :: Colour a,
lineC :: Colour a,
textBoxTextC :: Colour a,
textBoxC :: Colour a,
apply0C :: Colour a,
apply1C :: Colour a,
boolC :: Colour a,
lamArgResC :: Colour a,
regionPerimC :: Colour a,
caseRhsC :: Colour a,
patternC :: Colour a,
patternTextC :: Colour a,
bindTextBoxC :: Colour a,
bindTextBoxTextC :: Colour a,
edgeListC :: [Colour a],
nestingC :: [Colour a]
}
colorOnBlackScheme :: (Floating a, Ord a) => ColorStyle a
colorOnBlackScheme = ColorStyle {
backgroundC = black,
lineC = white,
--lineC = lightgray,
textBoxTextC = white,
textBoxC = white,
apply0C = red,
apply1C = cyan,
boolC = orange,
lamArgResC = lightSlightlyPurpleBlue,
regionPerimC = lime,
caseRhsC = slightlyGreenYellow,
patternC = lightMagenta,
patternTextC = cyan,
bindTextBoxC = reddishOrange,
bindTextBoxTextC = lightGreen,
edgeListC = [white, lime, reddishOrange, lightPurple, yellow, lightBlue],
nestingC = cycle [red, reddishOrange, yellow]
}
where
slightlyGreenYellow = sRGB24 212 255 0
lightMagenta = sRGB24 255 94 255
lightSlightlyPurpleBlue = sRGB24 109 87 255
reddishOrange = sRGB24 255 119 0
--lightBlue = sRGB24 126 127 255
lightBlue = sRGB24 35 156 255
lightPurple = sRGB24 208 137 255
lightGreen = sRGB24 180 255 145
whiteOnBlackScheme :: (Floating a, Ord a) => ColorStyle a
whiteOnBlackScheme = ColorStyle {
backgroundC = black,
lineC = white,
textBoxTextC = white,
textBoxC = white,
apply0C = white,
apply1C = white,
boolC = white,
lamArgResC = white,
regionPerimC = white,
caseRhsC = white,
patternC = white,
patternTextC = white,
bindTextBoxC = white,
bindTextBoxTextC = white,
edgeListC = [white],
nestingC = repeat white
}
-- Use this to test that all of the colors use the colorScheme
randomColorScheme :: (Floating a, Ord a) => ColorStyle a
randomColorScheme = ColorStyle {
backgroundC = darkorchid,
lineC = yellow,
textBoxTextC = blue,
textBoxC = magenta,
apply0C = orange,
apply1C = green,
boolC = lightpink,
lamArgResC = red,
regionPerimC = cyan,
caseRhsC = red,
patternC = olive,
patternTextC = coral,
bindTextBoxC = maroon,
bindTextBoxTextC = lime,
edgeListC = [wheat],
nestingC = cycle [red, yellow, purple, pink, lightblue, magenta]
}
| rgleichman/glance | app/DrawingColors.hs | gpl-3.0 | 2,666 | 0 | 10 | 556 | 745 | 452 | 293 | 88 | 1 |
module MicChan (
SoundData(..)
, Mic(..)
, openMic
, closeMic
, readSoundData
) where
import Control.Monad ( void
, forever
)
import Control.Monad.Loops ( whileJust
)
import Control.Concurrent ( ThreadId
, forkIO
, killThread
)
import Control.Concurrent.STM ( atomically
, STM
)
import Control.Concurrent.STM.TChan ( TChan
, newTChanIO
, writeTChan
, tryReadTChan
)
import Sound.Pulse.Simple ( Simple
, Direction(..)
, SampleSpec(..)
, SampleFormat(..)
, Endian(..)
, simpleNew
, simpleFree
, simpleRead
)
newtype SoundData = SD { samples :: [Double] }
deriving Show
data Mic = Mic { chan :: TChan SoundData
, threadId :: ThreadId
, pulseSource :: Simple }
openMic :: IO Mic
openMic = do
s <- simpleNew Nothing "sound-level-indicator" Record Nothing
"Displaying sound level from default mic."
(SampleSpec (F32 LittleEndian) 44100 1) Nothing Nothing
sndChan <- newTChanIO
tid <- forkIO $ forever $ handleMic s sndChan
return $ Mic { chan = sndChan
, threadId = tid
, pulseSource = s
}
closeMic :: Mic -> IO ()
closeMic m = let tid = threadId m
s = pulseSource m
in killThread tid >>
simpleFree s
handleMic :: Simple -> TChan SoundData -> IO ()
handleMic s sndChan = do
sd <- simpleRead s samplesChunkSize
void . atomically $ writeTChan sndChan $ SD sd
where
samplesChunkSize = 1000
instance Monoid SoundData where
mempty = SD []
mappend (SD d1) (SD d2) = SD $ mappend d1 d2
readSoundData :: Mic -> STM SoundData
readSoundData m = do
dta <- whileJust (tryReadTChan $ chan m) return
return $ mconcat dta
| martinvlk/sound-level-indicator | src/MicChan.hs | gpl-3.0 | 2,486 | 0 | 12 | 1,238 | 534 | 292 | 242 | 58 | 1 |
module Main where
import Test.HLex
import Test.TDOP
main :: IO ()
main = undefined
| cbowdon/TDOP | test.hs | gpl-3.0 | 85 | 0 | 6 | 16 | 29 | 17 | 12 | 5 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Mirror.Accounts.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Inserts a new account for a user
--
-- /See:/ <https://developers.google.com/glass Google Mirror API Reference> for @mirror.accounts.insert@.
module Network.Google.Resource.Mirror.Accounts.Insert
(
-- * REST Resource
AccountsInsertResource
-- * Creating a Request
, accountsInsert
, AccountsInsert
-- * Request Lenses
, aiAccountName
, aiPayload
, aiUserToken
, aiAccountType
) where
import Network.Google.Mirror.Types
import Network.Google.Prelude
-- | A resource alias for @mirror.accounts.insert@ method which the
-- 'AccountsInsert' request conforms to.
type AccountsInsertResource =
"mirror" :>
"v1" :>
"accounts" :>
Capture "userToken" Text :>
Capture "accountType" Text :>
Capture "accountName" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Account :> Post '[JSON] Account
-- | Inserts a new account for a user
--
-- /See:/ 'accountsInsert' smart constructor.
data AccountsInsert =
AccountsInsert'
{ _aiAccountName :: !Text
, _aiPayload :: !Account
, _aiUserToken :: !Text
, _aiAccountType :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aiAccountName'
--
-- * 'aiPayload'
--
-- * 'aiUserToken'
--
-- * 'aiAccountType'
accountsInsert
:: Text -- ^ 'aiAccountName'
-> Account -- ^ 'aiPayload'
-> Text -- ^ 'aiUserToken'
-> Text -- ^ 'aiAccountType'
-> AccountsInsert
accountsInsert pAiAccountName_ pAiPayload_ pAiUserToken_ pAiAccountType_ =
AccountsInsert'
{ _aiAccountName = pAiAccountName_
, _aiPayload = pAiPayload_
, _aiUserToken = pAiUserToken_
, _aiAccountType = pAiAccountType_
}
-- | The name of the account to be passed to the Android Account Manager.
aiAccountName :: Lens' AccountsInsert Text
aiAccountName
= lens _aiAccountName
(\ s a -> s{_aiAccountName = a})
-- | Multipart request metadata.
aiPayload :: Lens' AccountsInsert Account
aiPayload
= lens _aiPayload (\ s a -> s{_aiPayload = a})
-- | The ID for the user.
aiUserToken :: Lens' AccountsInsert Text
aiUserToken
= lens _aiUserToken (\ s a -> s{_aiUserToken = a})
-- | Account type to be passed to Android Account Manager.
aiAccountType :: Lens' AccountsInsert Text
aiAccountType
= lens _aiAccountType
(\ s a -> s{_aiAccountType = a})
instance GoogleRequest AccountsInsert where
type Rs AccountsInsert = Account
type Scopes AccountsInsert = '[]
requestClient AccountsInsert'{..}
= go _aiUserToken _aiAccountType _aiAccountName
(Just AltJSON)
_aiPayload
mirrorService
where go
= buildClient (Proxy :: Proxy AccountsInsertResource)
mempty
| brendanhay/gogol | gogol-mirror/gen/Network/Google/Resource/Mirror/Accounts/Insert.hs | mpl-2.0 | 3,745 | 0 | 15 | 883 | 531 | 315 | 216 | 84 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AlertCenter.Alerts.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the alerts.
--
-- /See:/ <https://developers.google.com/admin-sdk/alertcenter/ Google Workspace Alert Center API Reference> for @alertcenter.alerts.list@.
module Network.Google.Resource.AlertCenter.Alerts.List
(
-- * REST Resource
AlertsListResource
-- * Creating a Request
, alertsList
, AlertsList
-- * Request Lenses
, alXgafv
, alUploadProtocol
, alOrderBy
, alAccessToken
, alUploadType
, alCustomerId
, alFilter
, alPageToken
, alPageSize
, alCallback
) where
import Network.Google.AlertCenter.Types
import Network.Google.Prelude
-- | A resource alias for @alertcenter.alerts.list@ method which the
-- 'AlertsList' request conforms to.
type AlertsListResource =
"v1beta1" :>
"alerts" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "orderBy" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "customerId" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListAlertsResponse
-- | Lists the alerts.
--
-- /See:/ 'alertsList' smart constructor.
data AlertsList =
AlertsList'
{ _alXgafv :: !(Maybe Xgafv)
, _alUploadProtocol :: !(Maybe Text)
, _alOrderBy :: !(Maybe Text)
, _alAccessToken :: !(Maybe Text)
, _alUploadType :: !(Maybe Text)
, _alCustomerId :: !(Maybe Text)
, _alFilter :: !(Maybe Text)
, _alPageToken :: !(Maybe Text)
, _alPageSize :: !(Maybe (Textual Int32))
, _alCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AlertsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'alXgafv'
--
-- * 'alUploadProtocol'
--
-- * 'alOrderBy'
--
-- * 'alAccessToken'
--
-- * 'alUploadType'
--
-- * 'alCustomerId'
--
-- * 'alFilter'
--
-- * 'alPageToken'
--
-- * 'alPageSize'
--
-- * 'alCallback'
alertsList
:: AlertsList
alertsList =
AlertsList'
{ _alXgafv = Nothing
, _alUploadProtocol = Nothing
, _alOrderBy = Nothing
, _alAccessToken = Nothing
, _alUploadType = Nothing
, _alCustomerId = Nothing
, _alFilter = Nothing
, _alPageToken = Nothing
, _alPageSize = Nothing
, _alCallback = Nothing
}
-- | V1 error format.
alXgafv :: Lens' AlertsList (Maybe Xgafv)
alXgafv = lens _alXgafv (\ s a -> s{_alXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
alUploadProtocol :: Lens' AlertsList (Maybe Text)
alUploadProtocol
= lens _alUploadProtocol
(\ s a -> s{_alUploadProtocol = a})
-- | Optional. The sort order of the list results. If not specified results
-- may be returned in arbitrary order. You can sort the results in
-- descending order based on the creation timestamp using
-- \`order_by=\"create_time desc\"\`. Currently, supported sorting are
-- \`create_time asc\`, \`create_time desc\`, \`update_time desc\`
alOrderBy :: Lens' AlertsList (Maybe Text)
alOrderBy
= lens _alOrderBy (\ s a -> s{_alOrderBy = a})
-- | OAuth access token.
alAccessToken :: Lens' AlertsList (Maybe Text)
alAccessToken
= lens _alAccessToken
(\ s a -> s{_alAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
alUploadType :: Lens' AlertsList (Maybe Text)
alUploadType
= lens _alUploadType (\ s a -> s{_alUploadType = a})
-- | Optional. The unique identifier of the Google Workspace organization
-- account of the customer the alerts are associated with. Inferred from
-- the caller identity if not provided.
alCustomerId :: Lens' AlertsList (Maybe Text)
alCustomerId
= lens _alCustomerId (\ s a -> s{_alCustomerId = a})
-- | Optional. A query string for filtering alert results. For more details,
-- see [Query
-- filters](https:\/\/developers.google.com\/admin-sdk\/alertcenter\/guides\/query-filters)
-- and [Supported query filter
-- fields](https:\/\/developers.google.com\/admin-sdk\/alertcenter\/reference\/filter-fields#alerts.list).
alFilter :: Lens' AlertsList (Maybe Text)
alFilter = lens _alFilter (\ s a -> s{_alFilter = a})
-- | Optional. A token identifying a page of results the server should
-- return. If empty, a new iteration is started. To continue an iteration,
-- pass in the value from the previous ListAlertsResponse\'s
-- next_page_token field.
alPageToken :: Lens' AlertsList (Maybe Text)
alPageToken
= lens _alPageToken (\ s a -> s{_alPageToken = a})
-- | Optional. The requested page size. Server may return fewer items than
-- requested. If unspecified, server picks an appropriate default.
alPageSize :: Lens' AlertsList (Maybe Int32)
alPageSize
= lens _alPageSize (\ s a -> s{_alPageSize = a}) .
mapping _Coerce
-- | JSONP
alCallback :: Lens' AlertsList (Maybe Text)
alCallback
= lens _alCallback (\ s a -> s{_alCallback = a})
instance GoogleRequest AlertsList where
type Rs AlertsList = ListAlertsResponse
type Scopes AlertsList =
'["https://www.googleapis.com/auth/apps.alerts"]
requestClient AlertsList'{..}
= go _alXgafv _alUploadProtocol _alOrderBy
_alAccessToken
_alUploadType
_alCustomerId
_alFilter
_alPageToken
_alPageSize
_alCallback
(Just AltJSON)
alertCenterService
where go
= buildClient (Proxy :: Proxy AlertsListResource)
mempty
| brendanhay/gogol | gogol-alertcenter/gen/Network/Google/Resource/AlertCenter/Alerts/List.hs | mpl-2.0 | 6,631 | 0 | 20 | 1,574 | 1,055 | 612 | 443 | 140 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Controller.VolumeState
( postVolumeState
, deleteVolumeState
) where
import Data.Maybe (fromMaybe)
import Network.HTTP.Types (noContent204)
import qualified Web.Route.Invertible as R
import qualified Data.Aeson as Aeson
import Model.Id
import Model.Permission
import Model.Volume
import Model.VolumeState
import HTTP.Path.Parser
import HTTP.Form.Deform
import Action.Route
import Action
import Controller.Form
import Controller.Paths
import Controller.Volume
data CreateOrUpdateVolumeStateRequest = CreateOrUpdateVolumeStateRequest Aeson.Value Bool
postVolumeState :: ActionRoute (Id Volume, VolumeStateKey)
postVolumeState = action PUT (pathJSON >/> pathId </> "state" >/> R.parameter) $ \(vi, k) -> withAuth $ do
_ <- getVolume PermissionEDIT vi
s <- runForm Nothing $ do
j <- deform
p <- "public" .:> fromMaybe False <$> deformOptional deform
let _ = CreateOrUpdateVolumeStateRequest j p
return VolumeState
{ stateVolumeId = vi
, volumeStateKey = k
, volumeStatePublic = p
, volumeStateValue = j
}
changeVolumeState s
return $ emptyResponse noContent204 []
deleteVolumeState :: ActionRoute (Id Volume, VolumeStateKey)
deleteVolumeState = action DELETE (pathJSON >/> pathId </> "state" >/> R.parameter) $ \(vi, k) -> withAuth $ do
_ <- getVolume PermissionEDIT vi
r <- DeleteVolumeStateResponse <$> removeVolumeState vi k
return $ okResponse [] $ (Aeson.encode . wasDeleted) r
newtype DeleteVolumeStateResponse = DeleteVolumeStateResponse { wasDeleted :: Bool }
| databrary/databrary | src/Controller/VolumeState.hs | agpl-3.0 | 1,578 | 0 | 17 | 261 | 437 | 235 | 202 | 40 | 1 |
module Main where
import Control.Monad
import Data.Maybe
import Test.QuickCheck
import Text.HTML.TagSoup
import Text.HTML.ValueCalc
import System.Exit (exitFailure)
-- running test suite
main :: IO ()
main =
do quickCheck threeArrowTest
r1 <- quickCheckResult prop_getDiff
r2 <- quickCheckResult prop_isUnderValued
r3 <- quickCheckResult prop_whiteSpacesDropped
r4 <- quickCheckResult prop_dropEmpty
r6 <- quickCheckResult prop_isUnderValued
r7 <- wrap genFromDolSign prop_fromDolSign
r8 <- wrap genFromMilDol prop_fromMilDol
r9 <- wrap genToMilSek prop_toMilSek
forM_ [r1, r2, r3, r4, r6, r7, r8, r9] qcToTest
where wrap f p = quickCheckResult $ forAll f p
-- Property...
threeArrowTest :: Int -> Property
threeArrowTest i = (i /= 23) ==> i == i
{-
For a Result, throw exitFailure if its not successful,
otherwise return unit
-}
qcToTest :: Result -> IO ()
qcToTest (Success _ _ _) = return ()
qcToTest _ = exitFailure
-- Telling quickcheck how to generate a company.
instance Arbitrary Company where
arbitrary = liftM3 (Company "Random Company") randInt randInt randInt
where randInt = elements [-9999..9999] :: Gen Integer
{-
Generators for specific kinds of strings needed
for different tests. Some of these functions are
only intended to work on a certain type of input
strings which are gathered by scraping.
"$ " is added before all instances of this string,
but this is not stated here because of optimization
okay filters the input space to discard meaningless tests
-}
genFromDolSign :: Gen String
genFromDolSign = suchThat (listOf $ elements "0123456789") okay
where okay str = length str > 0
-- okay filters the input space to discard meaningless tests
genFromMilDol :: Gen String
genFromMilDol = suchThat (listOf $ elements "0123456789.,") okay
where okay str = (beforeDot str) /= "" &&
(length $ filter (=='.') str) <= 1 &&
str /= "" &&
head str /= '.' &&
head str /= ','
-- okay filters the input space to discard meaningless tests
genToMilSek :: Gen String
genToMilSek = suchThat (listOf $ elements "0123456789") okay
where okay str = (str /= "") && noBeginningZero str
genFromCommanotation :: Gen String
genFromCommanotation = suchThat (listOf $ elements "0123456789,") okay
where okay str = str /= "" &&
head str /= ',' &&
elem ',' str &&
last str /= ',' &&
noBeginningZero str
-- Testing the calculations module::getDiff
prop_getDiff :: Company -> Bool
prop_getDiff c = (ta-tl)-mc == getDiff c
where ta = totalAssets c
tl = totalLiabilities c
mc = marketCap c
{- Testing the calculations module::isUnderValued
See if the ta-tl is larger than the marketcap and compare it
to the isUnderValued function from the calculations module
-}
prop_isUnderValued :: Company -> Bool
prop_isUnderValued c = ((ta - tl) > mc) == isUnderValued c
where ta = totalAssets c
tl = totalLiabilities c
mc = marketCap c
{- in case it's TagText then we want to make sure a doesn't
have whitespaces anymore in case of a Tag (that's not TagText)
nothing should be changed in this case using ws to not use
-}
prop_whiteSpacesDropped :: String -> Bool
prop_whiteSpacesDropped "" = True
prop_whiteSpacesDropped x =
let ws = [' ','\t','\n','\v','\f','\r','\160']
in dropWhitespace (TagText x) == TagText [c | c <- x, not $ elem c ws]
-- dropEmpty :: [Tag String] -> [Tag String]
-- i only want to
prop_dropEmpty :: [String] -> Bool
prop_dropEmpty [] = True
prop_dropEmpty inp =
dropEmpty tagged == [x | x <- tagged, x /= (TagText "")]
where tagged = map (\x -> TagText x) inp
-- when fromDolSign has been run, there should be no ',' in the result
-- if a lonely $ is sent in, then the result should be 0
prop_fromDolSign :: String -> Bool
prop_fromDolSign "$" = (show $ fromDolSign "$") == "0"
prop_fromDolSign str = (not $ elem ',' parsed) && (take 2 parsed) /= "$ "
where parsed = show $ fromDolSign ("$ 0" ++ str)
-- optimizing, I want "$ 0" to be leading
beforeDot :: String -> String
beforeDot str = dropWhile (=='0') $ takeWhile (/='.') $ filter (/=',') str
-- this returns True when there are no 0's in the beginning of the input str
noBeginningZero :: String -> Bool
noBeginningZero str = (length $ takeWhile (=='0') str) == 0
-- no commas, no dot, right amount of 0's
prop_fromMilDol :: String -> Bool
prop_fromMilDol "" = True
prop_fromMilDol "0" = True
prop_fromMilDol str =
fromMilDol str == (read (beforeDot str)::Integer) * 1000000
-- just verify that the new one is 000000 larger
prop_toMilSek :: String -> Bool
prop_toMilSek str = str ++ "000000" == show (fromJust $ toMilSek str)
| ksallberg/valuecalc | Tests/QCTests.hs | unlicense | 4,987 | 0 | 18 | 1,250 | 1,188 | 609 | 579 | 83 | 1 |
module Utils where
import qualified Data.Set as S (Set, singleton, union, map, toList, fromList)
import Control.Monad (liftM2)
import RuleRTypes
ss :: a -> S.Set (S.Set a)
ss = S.singleton . S.singleton
cp :: (Eq a, Ord a) => S.Set (S.Set a) -> S.Set (S.Set a) -> S.Set (S.Set a)
cp a b = S.fromList . map S.fromList $ [x ++ y | x <- a', y <- b']
where a' = map S.toList . S.toList $ a
b' = map S.toList . S.toList $ b
l2f :: Literal -> Either String Formula
l2f (LiRu (Complex f)) = Right f
l2f (LiRu e) = Left $ "This: '" ++ show e ++ "' Literal cannot be converted to a Formula."
l2f (LiOb f) = Right f
q2rnr :: (Ord a) => Q -> (Either String Formula -> S.Set Literal -> a) -> S.Set a
q2rnr q lc = S.fromList [lc x x' | x' <- S.toList q, let x = foldl1 (liftM2 Conj) (map l2f . S.toList $ x')]
rn2suliru :: [S.Set RuleName] -> S.Set (S.Set Literal)
rn2suliru = S.singleton . foldl1 S.union . map (S.map LiRu)
| rispoli/nesy-hs | Utils.hs | unlicense | 962 | 0 | 15 | 236 | 502 | 255 | 247 | 18 | 1 |
module Git.Sanity.Internal where
import Data.Machine
-- TODO Contribute to machine?
slide :: Process a (a, a)
slide = f Nothing where
f Nothing = await (\x -> f $ Just x)
f (Just x) = await (\y -> machine $ Yield (x, y) $ f $ Just y)
await f = machine $ Await f Refl stopped
machine = MachineT . return
| aloiscochard/git-sanity | src/Git/Sanity/Internal.hs | apache-2.0 | 316 | 0 | 14 | 75 | 142 | 75 | 67 | 8 | 2 |
{- | A BSON document is a JSON-like object with a standard binary encoding defined at bsonspec.org. This implements version 1.0 of that spec.
Use the GHC language extension /OverloadedStrings/ to automatically convert String literals to UString (UTF8) -}
{-# LANGUAGE OverloadedStrings, TypeSynonymInstances, FlexibleInstances, DeriveDataTypeable, RankNTypes, OverlappingInstances, IncoherentInstances, ScopedTypeVariables, ForeignFunctionInterface, BangPatterns, CPP #-}
module Data.Bson (
-- * UTF-8 String
module Data.UString,
-- * Document
Document, look, lookup, valueAt, at, include, exclude, merge,
-- * Field
Field(..), (=:), (=?),
Label,
-- * Value
Value(..), Val(..), fval, cast, typed, typeOfVal,
-- * Special Bson value types
Binary(..), Function(..), UUID(..), MD5(..), UserDefined(..),
Regex(..), Javascript(..), Symbol(..), MongoStamp(..), MinMaxKey(..),
-- ** ObjectId
ObjectId(..), timestamp, genObjectId
#ifdef TEST
, composite
, roundTo
#endif
) where
import Prelude hiding (lookup)
import Control.Applicative ((<$>), (<*>))
import Data.Typeable hiding (cast)
import Data.Int
import Data.Word
import Data.UString (UString, u, unpack) -- plus Show and IsString instances
import Data.Time.Clock (UTCTime)
import Data.Time.Clock.POSIX
import Data.Time.Format () -- for Show and Read instances of UTCTime
import Data.List (find, findIndex)
import Data.Bits (shift, (.|.))
import qualified Data.ByteString as BS (ByteString, unpack, take)
import qualified Data.ByteString.Char8 as BSC (pack)
import qualified Crypto.Hash.MD5 as MD5 (hash)
import Numeric (readHex, showHex)
import Network.BSD (getHostName)
import System.IO.Unsafe (unsafePerformIO)
import Data.IORef
import Data.Maybe (maybeToList, mapMaybe)
import Control.Monad.Identity
import qualified Text.ParserCombinators.ReadP as R
import qualified Text.ParserCombinators.ReadPrec as R (lift, readS_to_Prec)
import Text.Read (Read(..))
getProcessID :: IO Int
-- ^ Get the current process id.
getProcessID = c_getpid
foreign import ccall unsafe "getpid"
c_getpid :: IO Int
roundTo :: (RealFrac a) => a -> a -> a
-- ^ Round second number to nearest multiple of first number. Eg: roundTo (1/1000) 0.12345 = 0.123
roundTo mult n = fromIntegral (round (n / mult)) * mult
showHexLen :: (Integral n) => Int -> n -> ShowS
-- ^ showHex of n padded with leading zeros if necessary to fill d digits
showHexLen d n = showString (replicate (d - sigDigits n) '0') . showHex n where
sigDigits 0 = 1
sigDigits n' = truncate (logBase 16 $ fromIntegral n') + 1
-- * Document
type Document = [Field]
-- ^ A BSON document is a list of 'Field's
look :: (Monad m) => Label -> Document -> m Value
-- ^ Value of field in document, or fail (Nothing) if field not found
look k doc = maybe notFound (return . value) (find ((k ==) . label) doc) where
notFound = fail $ "expected " ++ show k ++ " in " ++ show doc
lookup :: (Val v, Monad m) => Label -> Document -> m v
-- ^ Lookup value of field in document and cast to expected type. Fail (Nothing) if field not found or value not of expected type.
lookup k doc = cast =<< look k doc
valueAt :: Label -> Document -> Value
-- ^ Value of field in document. Error if missing.
valueAt k = runIdentity . look k
at :: forall v. (Val v) => Label -> Document -> v
-- ^ Typed value of field in document. Error if missing or wrong type.
at k doc = maybe err id (lookup k doc) where
err = error $ "expected (" ++ show k ++ " :: " ++ show (typeOf (undefined :: v)) ++ ") in " ++ show doc
include :: [Label] -> Document -> Document
-- ^ Only include fields of document in label list
include keys doc = mapMaybe (\k -> find ((k ==) . label) doc) keys
exclude :: [Label] -> Document -> Document
-- ^ Exclude fields from document in label list
exclude keys doc = filter (\(k := _) -> notElem k keys) doc
merge :: Document -> Document -> Document
-- ^ Merge documents with preference given to first one when both have the same label. I.e. for every (k := v) in first argument, if k exists in second argument then replace its value with v, otherwise add (k := v) to second argument.
merge es doc = foldl f doc es where
f doc (k := v) = case findIndex ((k ==) . label) doc of
Nothing -> doc ++ [k := v]
Just i -> let (x, _ : y) = splitAt i doc in x ++ [k := v] ++ y
-- * Field
infix 0 :=, =:, =?
data Field = (:=) {label :: !Label, value :: Value} deriving (Typeable, Eq)
-- ^ A BSON field is a named value, where the name (label) is a string and the value is a BSON 'Value'
(=:) :: (Val v) => Label -> v -> Field
-- ^ Field with given label and typed value
k =: v = k := val v
(=?) :: (Val a) => Label -> Maybe a -> Document
-- ^ If Just value then return one field document, otherwise return empty document
k =? ma = maybeToList (fmap (k =:) ma)
instance Show Field where
showsPrec d (k := v) = showParen (d > 0) $ showString (' ' : unpack k) . showString ": " . showsPrec 1 v
type Label = UString
-- ^ The name of a BSON field
-- * Value
-- | A BSON value is one of the following types of values
data Value =
Float Double |
String UString |
Doc Document |
Array [Value] |
Bin Binary |
Fun Function |
Uuid UUID |
Md5 MD5 |
UserDef UserDefined |
ObjId ObjectId |
Bool Bool |
UTC UTCTime |
Null |
RegEx Regex |
JavaScr Javascript |
Sym Symbol |
Int32 Int32 |
Int64 Int64 |
Stamp MongoStamp |
MinMax MinMaxKey
deriving (Typeable, Eq)
instance Show Value where
showsPrec d v = fval (showsPrec d) v
fval :: (forall a . (Val a) => a -> b) -> Value -> b
-- ^ Apply generic function to typed value
fval f v = case v of
Float x -> f x
String x -> f x
Doc x -> f x
Array x -> f x
Bin x -> f x
Fun x -> f x
Uuid x -> f x
Md5 x -> f x
UserDef x -> f x
ObjId x -> f x
Bool x -> f x
UTC x -> f x
Null -> f (Nothing :: Maybe Value)
RegEx x -> f x
JavaScr x -> f x
Sym x -> f x
Int32 x -> f x
Int64 x -> f x
Stamp x -> f x
MinMax x -> f x
-- * Value conversion
cast :: forall m a. (Val a, Monad m) => Value -> m a
-- ^ Convert Value to expected type, or fail (Nothing) if not of that type
cast v = maybe notType return (cast' v) where
notType = fail $ "expected " ++ show (typeOf (undefined :: a)) ++ ": " ++ show v
typed :: (Val a) => Value -> a
-- ^ Convert Value to expected type. Error if not that type.
typed = runIdentity . cast
typeOfVal :: Value -> TypeRep
-- ^ Type of typed value
typeOfVal = fval typeOf
-- ** conversion class
-- | Haskell types of this class correspond to BSON value types
class (Typeable a, Show a, Eq a) => Val a where
val :: a -> Value
cast' :: Value -> Maybe a
instance Val Double where
val = Float
cast' (Float x) = Just x
cast' (Int32 x) = Just (fromIntegral x)
cast' (Int64 x) = Just (fromIntegral x)
cast' _ = Nothing
instance Val Float where
val = Float . realToFrac
cast' (Float x) = Just (realToFrac x)
cast' (Int32 x) = Just (fromIntegral x)
cast' (Int64 x) = Just (fromIntegral x)
cast' _ = Nothing
instance Val UString where
val = String
cast' (String x) = Just x
cast' (Sym (Symbol x)) = Just x
cast' _ = Nothing
instance Val String where
val = String . u
cast' (String x) = Just (unpack x)
cast' (Sym (Symbol x)) = Just (unpack x)
cast' _ = Nothing
instance Val Document where
val = Doc
cast' (Doc x) = Just x
cast' _ = Nothing
instance Val [Value] where
val = Array
cast' (Array x) = Just x
cast' _ = Nothing
instance (Val a) => Val [a] where
val = Array . map val
cast' (Array x) = mapM cast x
cast' _ = Nothing
instance Val Binary where
val = Bin
cast' (Bin x) = Just x
cast' _ = Nothing
instance Val Function where
val = Fun
cast' (Fun x) = Just x
cast' _ = Nothing
instance Val UUID where
val = Uuid
cast' (Uuid x) = Just x
cast' _ = Nothing
instance Val MD5 where
val = Md5
cast' (Md5 x) = Just x
cast' _ = Nothing
instance Val UserDefined where
val = UserDef
cast' (UserDef x) = Just x
cast' _ = Nothing
instance Val ObjectId where
val = ObjId
cast' (ObjId x) = Just x
cast' _ = Nothing
instance Val Bool where
val = Bool
cast' (Bool x) = Just x
cast' _ = Nothing
instance Val UTCTime where
val = UTC . posixSecondsToUTCTime . roundTo (1/1000) . utcTimeToPOSIXSeconds
cast' (UTC x) = Just x
cast' _ = Nothing
instance Val POSIXTime where
val = UTC . posixSecondsToUTCTime . roundTo (1/1000)
cast' (UTC x) = Just (utcTimeToPOSIXSeconds x)
cast' _ = Nothing
instance Val (Maybe Value) where
val Nothing = Null
val (Just v) = v
cast' Null = Just Nothing
cast' v = Just (Just v)
instance (Val a) => Val (Maybe a) where
val Nothing = Null
val (Just a) = val a
cast' Null = Just Nothing
cast' v = fmap Just (cast' v)
instance Val Regex where
val = RegEx
cast' (RegEx x) = Just x
cast' _ = Nothing
instance Val Javascript where
val = JavaScr
cast' (JavaScr x) = Just x
cast' _ = Nothing
instance Val Symbol where
val = Sym
cast' (Sym x) = Just x
cast' (String x) = Just (Symbol x)
cast' _ = Nothing
instance Val Int32 where
val = Int32
cast' (Int32 x) = Just x
cast' (Int64 x) = fitInt x
cast' (Float x) = Just (round x)
cast' _ = Nothing
instance Val Int64 where
val = Int64
cast' (Int64 x) = Just x
cast' (Int32 x) = Just (fromIntegral x)
cast' (Float x) = Just (round x)
cast' _ = Nothing
instance Val Int where
val n = maybe (Int64 $ fromIntegral n) Int32 (fitInt n)
cast' (Int32 x) = Just (fromIntegral x)
cast' (Int64 x) = Just (fromEnum x)
cast' (Float x) = Just (round x)
cast' _ = Nothing
instance Val Integer where
val n = maybe (maybe err Int64 $ fitInt n) Int32 (fitInt n) where
err = error $ show n ++ " is too large for Bson Int Value"
cast' (Int32 x) = Just (fromIntegral x)
cast' (Int64 x) = Just (fromIntegral x)
cast' (Float x) = Just (round x)
cast' _ = Nothing
instance Val MongoStamp where
val = Stamp
cast' (Stamp x) = Just x
cast' _ = Nothing
instance Val MinMaxKey where
val = MinMax
cast' (MinMax x) = Just x
cast' _ = Nothing
fitInt :: forall n m. (Integral n, Integral m, Bounded m) => n -> Maybe m
-- ^ If number fits in type m then cast to m, otherwise Nothing
fitInt n = if fromIntegral (minBound :: m) <= n && n <= fromIntegral (maxBound :: m)
then Just (fromIntegral n)
else Nothing
-- * Haskell types corresponding to special Bson value types
-- ** Binary types
newtype Binary = Binary BS.ByteString deriving (Typeable, Show, Read, Eq)
newtype Function = Function BS.ByteString deriving (Typeable, Show, Read, Eq)
newtype UUID = UUID BS.ByteString deriving (Typeable, Show, Read, Eq)
newtype MD5 = MD5 BS.ByteString deriving (Typeable, Show, Read, Eq)
newtype UserDefined = UserDefined BS.ByteString deriving (Typeable, Show, Read, Eq)
-- ** Regex
data Regex = Regex UString UString deriving (Typeable, Show, Read, Eq)
-- ^ The first string is the regex pattern, the second is the regex options string. Options are identified by characters, which must be listed in alphabetical order. Valid options are *i* for case insensitive matching, *m* for multiline matching, *x* for verbose mode, *l* to make \\w, \\W, etc. locale dependent, *s* for dotall mode (\".\" matches everything), and *u* to make \\w, \\W, etc. match unicode.
-- ** Javascript
data Javascript = Javascript Document UString deriving (Typeable, Show, Eq)
-- ^ Javascript code with possibly empty environment mapping variables to values that the code may reference
-- ** Symbol
newtype Symbol = Symbol UString deriving (Typeable, Show, Read, Eq)
-- ** MongoStamp
newtype MongoStamp = MongoStamp Int64 deriving (Typeable, Show, Read, Eq)
-- ** MinMax
data MinMaxKey = MinKey | MaxKey deriving (Typeable, Show, Read, Eq)
-- ** ObjectId
data ObjectId = Oid Word32 Word64 deriving (Typeable, Eq, Ord)
-- ^ A BSON ObjectID is a 12-byte value consisting of a 4-byte timestamp (seconds since epoch), a 3-byte machine id, a 2-byte process id, and a 3-byte counter. Note that the timestamp and counter fields must be stored big endian unlike the rest of BSON. This is because they are compared byte-by-byte and we want to ensure a mostly increasing order.
instance Show ObjectId where
showsPrec _ (Oid x y) = showHexLen 8 x . showHexLen 16 y
instance Read ObjectId where
readPrec = do
[(x, "")] <- readHex <$> R.lift (R.count 8 R.get)
y <- R.readS_to_Prec $ const readHex
return (Oid x y)
timestamp :: ObjectId -> UTCTime
-- ^ Time when objectId was created
timestamp (Oid time _) = posixSecondsToUTCTime (fromIntegral time)
genObjectId :: IO ObjectId
-- ^ Create a fresh ObjectId
genObjectId = do
time <- truncate <$> getPOSIXTime
pid <- fromIntegral <$> getProcessID
inc <- nextCount
return $ Oid time (composite machineId pid inc)
where
machineId :: Word24
machineId = unsafePerformIO (makeWord24 . BS.unpack . BS.take 3 . MD5.hash . BSC.pack <$> getHostName)
{-# NOINLINE machineId #-}
counter :: IORef Word24
counter = unsafePerformIO (newIORef 0)
{-# NOINLINE counter #-}
nextCount :: IO Word24
nextCount = atomicModifyIORef counter $ \n -> (wrap24 (n + 1), n)
composite :: Word24 -> Word16 -> Word24 -> Word64
composite mid pid inc = fromIntegral mid `shift` 40 .|. fromIntegral pid `shift` 24 .|. fromIntegral inc
type Word24 = Word32
-- ^ low 3 bytes only, high byte must be zero
wrap24 :: Word24 -> Word24
wrap24 n = n `mod` 0x1000000
makeWord24 :: [Word8] -> Word24
-- ^ Put last 3 bytes into a Word24. Expected to be called on very short list
makeWord24 = foldl (\a b -> a `shift` 8 .|. fromIntegral b) 0
{- Authors: Tony Hannan <[email protected]>
Copyright 2010 10gen Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -}
| mongodb/bson-haskell | Data/Bson.hs | apache-2.0 | 14,020 | 108 | 15 | 2,854 | 4,611 | 2,434 | 2,177 | 301 | 20 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE UndecidableInstances #-}
module Tersus.DataTypes.TError where
import Control.Exception.Base
import Data.Text
import Data.Typeable
import Prelude
import Tersus.DataTypes.TypeSynonyms
import Yesod.Handler
import GHC.Generics (Generic)
import Data.Aeson (FromJSON, ToJSON, decode, encode)
data TError =
--General errors
TheImpossibleHappened Text -- if we catch these types of errors, we ought to fix them in the code
-- Database errors
| RedisTError Text
-- From requests
| MissingParameter Text Text -- parameter name, parameter description
| TUserIdNotFound UserId
| TUserNicknameNotFound Username
| TAppIdNotFound ApplicationIdentifier
| TFileIdNotFound FileId
| TFilePathNotFound Path deriving (Eq,Show,Typeable,Generic)
instance ToJSON TError
instance Exception TError
| kmels/tersus | Tersus/DataTypes/TError.hs | bsd-2-clause | 867 | 0 | 6 | 147 | 152 | 92 | 60 | 22 | 0 |
{-# LANGUAGE BangPatterns, PatternGuards #-}
module Data.BTree
( -- * Types
BTree
-- * Creation
, empty
, singleton
, fromList
-- * Queries
, size
, lookup
, minimumKey
-- * Insertion
, insert
, insertWith
-- * Debugging
, showBTree
) where
import Prelude hiding (lookup)
-- import qualified Prelude as P
import Data.List (foldl')
import Data.BTree.Array.Search
import Data.BTree.Internal
import qualified Data.BTree.Array as A
import qualified Data.BTree.Array.Util as A
-- | Create an empty 'BTree'
empty :: Ord k => BTree k v
empty = Leaf 0 A.empty A.empty
{-# INLINE empty #-}
-- | Create a 'BTree' holding a single element
singleton :: Ord k => k -> v -> BTree k v
singleton k v = Leaf 1 (A.singleton k) (A.singleton v)
{-# INLINE singleton #-}
-- | Create a 'BTree' from an associative list
fromList :: Ord k => [(k, v)] -> BTree k v
fromList = foldl' insert' empty
where
insert' t (k, v) = insert k v t
-- | Find the number of values in the 'BTree'
size :: BTree k v -> Int
size (Leaf s _ _) = s
size (Node _ tv _ _ ) = tv
-- | Find an element in the 'BTree'
lookup :: Ord k => k -> BTree k v -> Maybe v
lookup k = lookup'
where
lookup' (Leaf s ks vs) = fmap (A.unsafeIndex vs) (search s k ks)
lookup' (Node s _ ks cs) = searchWith found notFound s k ks
where
found i = lookup' (A.unsafeIndex cs (i + 1))
{-# INLINE found #-}
notFound i = lookup' (A.unsafeIndex cs i)
{-# INLINE notFound #-}
{-# INLINE lookup #-}
-- | Find the minimum key in a 'BTree'
minimumKey :: Ord k => BTree k v -> k
minimumKey (Leaf _ ks _) = A.unsafeIndex ks 0
minimumKey (Node _ _ _ cs) = minimumKey (A.unsafeIndex cs 0)
-- | Signals for insertion
data Insert k v = Ok !(BTree k v)
| Split !(BTree k v) !(BTree k v)
-- | Insert an element into the 'BTree'
insert :: Ord k => k -> v -> BTree k v -> BTree k v
insert = insertWith const
{-# INLINE insert #-}
-- | Insert an element into the 'BTree'
insertWith :: Ord k => (v -> v -> v) -> k -> v -> BTree k v -> BTree k v
insertWith f k v btree =
-- Insertion in the root
case insert' btree of
Ok btree' -> btree'
-- Split the root
Split l r ->
let !s = 1
!tv = size l + size r
!ks = A.singleton (minimumKey r)
!cs = A.pair l r
in Node s tv ks cs
where
-- Insertion in a leaf node
insert' (Leaf s ks vs) = searchWith found notFound s k ks
where
-- Overwrite the value
found i =
let ov = A.unsafeIndex vs i -- Do not force ov!
in Ok $ Leaf s ks (A.unsafePut s i (f ov v) vs)
-- Insert the value
notFound i
-- We have enough place, so just insert it
| s + 1 <= maxNodeSize = Ok $ Leaf
(s + 1) (A.unsafeInsert s i k ks) (A.unsafeInsert s i v vs)
-- We need to split this leaf and insert left
| i < s' =
let lks = A.unsafeInsertIn 0 s' i k ks
lvs = A.unsafeInsertIn 0 s' i v vs
rks = A.unsafeCopyRange s' s' ks
rvs = A.unsafeCopyRange s' s' vs
l = Leaf (s' + 1) lks lvs
r = Leaf s' rks rvs
in Split l r
-- We need to split this leaf and insert right
| otherwise =
let lks = A.unsafeCopyRange 0 s' ks
lvs = A.unsafeCopyRange 0 s' vs
rks = A.unsafeInsertIn s' s' (i - s') k ks
rvs = A.unsafeInsertIn s' s' (i - s') v vs
l = Leaf s' lks lvs
r = Leaf (s' + 1) rks rvs
in Split l r
where
s' = s `div` 2
-- Insertion in a parent node
insert' (Node s tv ks cs) = searchWith found notFound s k ks
where
-- Found: we continue in the right child child. We also know the size
-- cannot change (since no new key is added).
found i = case insert' (A.unsafeIndex cs (i + 1)) of
Ok c' -> Ok $ Node s tv ks (A.unsafePut (s + 1) (i + 1) c' cs)
_ -> error "Data.BTree.insert: internal error!"
-- Not found: left child. Now, it is possible that we have to split our
-- node in order to balance the tree
-- TODO: update size!
notFound i = case insert' (A.unsafeIndex cs i) of
Ok c' -> Ok $ Node s tv ks (A.unsafePut (s + 1) i c' cs)
Split l r
-- We're still good
| s + 1 <= maxNodeSize ->
let -- Key to copy
!k' = minimumKey r
!ks' = A.unsafeInsert s i k' ks
!cs' = A.unsafePutPair (s + 1) i l r cs
in Ok $ Node (s + 1) tv ks' cs'
-- We need to split this node. This should not happen often.
-- TODO: This implementation can be written using at least one
-- less copy.
| otherwise ->
let -- Create a "too large" node
!k' = minimumKey r
!ks' = A.unsafeInsert s i k' ks
!cs' = A.unsafePutPair (s + 1) i l r cs
-- Currently: number of keys: s + 1, and s + 2 children
-- s + 1 is odd, so we can drop the key in the middle
!s' = s `div` 2
-- Select the left part
!lks = A.unsafeCopyRange 0 s' ks'
!lcs = A.unsafeCopyRange 0 (s' + 1) cs'
l' = Node s' tv lks lcs
-- Select the right part
!rks = A.unsafeCopyRange (s' + 1) s' ks'
!rcs = A.unsafeCopyRange (s' + 1) (s' + 1) cs'
r' = Node s' tv rks rcs
in Split l' r'
{-# INLINE insertWith #-}
| jaspervdj/b-tree | src/Data/BTree.hs | bsd-3-clause | 6,073 | 3 | 17 | 2,443 | 1,748 | 890 | 858 | 115 | 5 |
module HM.Normal where
import HM.Types
import HM.Monad
import Data.IORef
import Data.List( (\\) )
import Debug.Trace
import Text.PrettyPrint.HughesPJ
typecheck :: Term -> Tc Sigma
typecheck e = do { ty <- inferSigma e
; zonkType ty }
data Expected a = Infer (IORef a) | Check a
checkRho :: Term -> Rho -> Tc ()
checkRho expr ty = tcRho expr (Check ty)
inferRho :: Term -> Tc Rho
inferRho expr
= do { ref <- newTcRef (error "inferRho: empty result")
; tcRho expr (Infer ref)
; readTcRef ref }
tcRho :: Term -> Expected Rho -> Tc ()
tcRho (Lit _) exp_ty = instSigma intType exp_ty
tcRho (Var v) exp_ty
= do { v_sigma <- lookupVar v
; instSigma v_sigma exp_ty
}
tcRho (App fun arg) exp_ty
= do { fun_ty <- inferRho fun
; (arg_ty, res_ty) <- unifyFun fun_ty
; checkSigma arg arg_ty
; instSigma res_ty exp_ty
}
tcRho (Lam var body) (Check exp_ty)
= do { (var_ty, body_ty) <- unifyFun exp_ty
; extendVarEnv var var_ty (checkRho body body_ty)
}
tcRho (Lam var body) (Infer ref)
= do { var_ty <- newTyVarTy
; body_ty <- extendVarEnv var var_ty (inferRho body)
; writeTcRef ref (var_ty --> body_ty)
}
tcRho (Let var rhs body) exp_ty
= do { var_ty <- inferSigma rhs
; traceM $ ("tcRho Let var_ty = " ++ show var_ty)
; extendVarEnv var var_ty (tcRho body exp_ty)
}
tcRho (Ann body ann_ty) exp_ty
= do { checkSigma body ann_ty
; instSigma ann_ty exp_ty
}
tcRho (ALam var var_ty body) exp_ty = fail "annotated lambda is not supported in hindley milner"
inferSigma :: Term -> Tc Sigma
inferSigma e
= do { exp_ty <- inferRho e
; traceM $ ("inferRho result = " ++ show exp_ty)
; env_tys <- getEnvTypes
; env_tvs <- getMetaTyVars env_tys
; res_tvs <- getMetaTyVars [exp_ty]
; let forall_tvs = res_tvs \\ env_tvs
; quantify forall_tvs exp_ty }
checkSigma :: Term -> Sigma -> Tc ()
checkSigma expr sigma
= do { (skol_tvs, rho) <- shallowskol sigma
; checkRho expr rho
; env_tys <- getEnvTypes
; esc_tvs <- getFreeTyVars (sigma : env_tys)
; let bad_tvs = filter (`elem` esc_tvs) skol_tvs
; check (null bad_tvs) (text "Type not polymorphic enough")
}
instSigma :: Sigma -> Expected Rho -> Tc ()
instSigma t1 (Check t2) = unify t1 t2
instSigma t1 (Infer r) = do { t1' <- instantiate t1
; writeTcRef r t1'
}
| MnO2/hindley-milner | src/HM/Normal.hs | bsd-3-clause | 2,533 | 0 | 11 | 746 | 915 | 459 | 456 | 64 | 1 |
import Data.Function
import Data.List
import Data.Monoid
import qualified Data.Set as S
import Data.Foldable.Strict
import qualified Data.Map.Strict as M
import Control.Lens
import MinIR.CorpusStats as CS
import MinIR.TermIndex as TI
import MinIR.OrderedIndex as OI
import MinIR.UnorderedIndex as UI
import MinIR.SequentialDependence as SD
documents :: [(Int, [Int])]
documents =
[ (100, [1,2,3,4])
, (101, [1,2,3,4])
, (102, [1,3])
, (103, [1,3,5])
] ++ map (\i->(i,[10..20])) [200..30000]
main = do
let idx = foldMap' (uncurry TI.fromTerms) documents
oidx = foldMap' (uncurry $ OI.fromTerms 8) documents
cstats = foldMap' (\(doc,terms)->CS.fromDocument doc (length terms)) documents
Prelude.mapM_ (print . sortBy (flip compare `on` snd) . TI.termScore 0.1 cstats idx) [1..5]
print $ take 10 $ sortBy (flip compare `on` snd)
$ M.toList $ OI.termsScore 2 0.1 cstats oidx [1..8]
| bgamari/minir | Test.hs | bsd-3-clause | 940 | 0 | 15 | 184 | 402 | 234 | 168 | 26 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.