code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
import Data.Tree (Tree(..))
import Data.Char (ord)
mmap :: (a -> b) -> Maybe a -> Maybe b
mmap f (Just x) = Just $ f x
mmap _ Nothing = Nothing
tmap :: (a -> b) -> Tree a -> Tree b
tmap f (Node x sf) = Node (f x) $ map (tmap f) sf
toCode :: Functor f => f Char -> f Int
toCode = fmap ord
newtype Fun a = Fun { fun :: Integer -> a }
instance Functor Fun where
fmap f (Fun g) = Fun $ \x -> f $ g x
brandBag :: Integer -> Integer
brandBag = \t -> 50000 * (100 + t) `div` 100
iWantFourBag :: Integer -> Integer
iWantFourBag = fmap (* 4) brandBag
|
YoshikuniJujo/funpaala
|
samples/30_functor/functor1.hs
|
bsd-3-clause
| 550 | 0 | 9 | 134 | 299 | 157 | 142 | 16 | 1 |
-- |
module Libtorrent.SessionSpec (spec) where
import Test.Hspec
import Libtorrent.Session
spec :: Spec
spec = do
describe "Session" $ do
it "create new session" $ do
newSession >>= (`shouldSatisfy` const True)
|
eryx67/haskell-libtorrent
|
test/Libtorrent/SessionSpec.hs
|
bsd-3-clause
| 227 | 0 | 15 | 46 | 68 | 37 | 31 | 8 | 1 |
{-# LANGUAGE BangPatterns #-}
module Y2017.Day03 (answer1, answer2) where
import Control.Monad.State.Strict
import qualified Data.HashMap.Strict as Map
import Control.Monad.Loops
answer1, answer2 :: IO ()
answer1 = print $ dist $ coord input
answer2 = print $ evalState
(iterateUntil (>= input) walk)
(1, Map.singleton (0, 0) 1)
coord :: Int -> (Int, Int)
coord n =
let k = findClosestOddSqrt n
k' = k `div` 2
a = n - k * k
(q, r) = a `quotRem` (k+1)
(x, y) | a == 0 = (k', -k')
| q == 0 = (k'+1, r - k' - 1)
| q == 1 = (-r + k' + 1, k'+1)
| q == 2 = (-k'-1, - r + k' + 1)
| q == 3 = (r - k' - 1, -k' - 1)
| otherwise = error "non exhaustive case"
in (x, y)
dist (a, b) = abs a + abs b
-- find the closest odd number 2k+1 such that (2k+1)^2 < n <= (2k+3)^2
findClosestOddSqrt :: Int -> Int
findClosestOddSqrt n =
let k = truncate (sqrt $ fromIntegral n) in if odd k then k else k-1
walk :: State (Int, Map.HashMap (Int, Int) Int) Int
walk = do
(n, vs) <- get
let (x, y) = coord n
let neighbors = [(x-a, y-b) | a <- [-1,0,1], b <- [-1,0,1]]
let v = sum $ map (\k -> Map.lookupDefault 0 k vs) neighbors
let n' = n + 1
let !vs' = Map.insert (x,y) v vs
put (n', vs')
pure v
input = 265149
|
geekingfrog/advent-of-code
|
src/Y2017/Day03.hs
|
bsd-3-clause
| 1,359 | 0 | 15 | 432 | 684 | 365 | 319 | 38 | 2 |
-- | Data constructors (Abstract Syntax) and pretty printer for strict While.
module Language.LoopGotoWhile.While.StrictAS
( Stat (..)
, Op (..)
, Program
, Const
, Index
, prettyPrint
) where
import Data.List (intercalate)
type Program = Stat
type Index = Integer
type Const = Integer
data Op = Plus
| Minus
deriving Eq
data Stat
= Assign Index Index Op Const
| While Index Stat
| Seq [Stat]
deriving Eq
-- | Return a standard string representation of a strict While AST.
prettyPrint :: Program -> String
prettyPrint = prettyPrint' 0
where prettyPrint' indentSize (Assign i j op c) =
indent indentSize ++ "x" ++ show i ++ " := " ++
"x" ++ show j ++ " " ++ show op ++ " " ++ show c
prettyPrint' indentSize (While i stat) =
indent indentSize ++ "WHILE x" ++ show i ++ " != 0 DO\n" ++
prettyPrint' (indentSize + tabSize) stat ++ "\n" ++
indent indentSize ++ "END"
prettyPrint' indentSize (Seq stats) =
intercalate ";\n" . map (prettyPrint' indentSize) $ stats
indent size = replicate size ' '
tabSize = 2
instance Show Stat where
show = prettyPrint
instance Show Op where
show Plus = "+"
show Minus = "-"
|
eugenkiss/loopgotowhile
|
src/Language/LoopGotoWhile/While/StrictAS.hs
|
bsd-3-clause
| 1,322 | 0 | 16 | 421 | 362 | 193 | 169 | 37 | 3 |
-- | Demonstriert den ASCII-Import
module Main where
import EFA.IO.ASCIIImport (modelicaASCIIImport)
import EFA.Signal.Record (SignalRecord)
main :: IO ()
main = do
rec <- modelicaASCIIImport "test.asc"
print (rec :: SignalRecord [] Double)
|
energyflowanalysis/efa-2.1
|
demo/asciiImport/Main.hs
|
bsd-3-clause
| 248 | 1 | 11 | 38 | 73 | 39 | 34 | 7 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift Compiler (0.9.3) --
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE YOU KNOW WHAT YOU ARE DOING --
-----------------------------------------------------------------
module Hadoopfs_Consts where
import Prelude (($), (.), (>>=), (==), (++))
import qualified Prelude as P
import qualified Control.Exception as X
import qualified Control.Monad as M ( liftM, ap, when )
import Data.Functor ( (<$>) )
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Hashable as H
import qualified Data.Int as I
import qualified Data.Maybe as M (catMaybes)
import qualified Data.Text.Lazy.Encoding as E ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified GHC.Generics as G (Generic)
import qualified Data.Typeable as TY ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as QC ( Arbitrary(..) )
import qualified Test.QuickCheck as QC ( elements )
import qualified Thrift as T
import qualified Thrift.Types as T
import qualified Thrift.Arbitraries as T
import Hadoopfs_Types
|
michaxm/haskell-hdfs-thrift-client
|
src-gen-thrift/Hadoopfs_Consts.hs
|
bsd-3-clause
| 1,627 | 0 | 6 | 276 | 264 | 197 | 67 | 31 | 0 |
{-# LANGUAGE NamedFieldPuns #-}
module Yesod.CoreBot.Bliki.Resources.Data where
import Yesod.CoreBot.Bliki.Prelude
import Yesod.CoreBot.Bliki.Resources.Base
import Yesod.CoreBot.Bliki.Cache.UpdateHTML
import Yesod.CoreBot.Bliki.Config
import Yesod.CoreBot.Bliki.DB
import Yesod.CoreBot.Bliki.Store
import Control.Monad.Reader hiding ( lift )
import Control.Monad.State.Strict hiding ( lift )
import qualified Data.FileStore as FileStore
import Data.Map ( Map )
import qualified Data.Map as Map
import qualified Data.Text as T
import Data.Time.Clock
import Data.Time.Clock.POSIX
import System.Directory ( createDirectory
, doesDirectoryExist
, removeDirectoryRecursive
)
-- XXX: should be a RWST
type DataM master a = StateT DB ( StateT Store (ReaderT ( Config master ) IO) ) a
process_revisions :: Yesod master => ( Config master, Store ) -> IORef DB -> [ Revision ] -> IO ()
process_revisions ( config, store ) db_ref rs = do
db <- readIORef db_ref
let db_mod = execStateT (apply_revisions rs) db
db' <- runReaderT ( evalStateT db_mod store ) config
writeIORef db_ref db'
apply_revisions :: Yesod master => [ Revision ] -> DataM master ()
apply_revisions [] = do
return ()
apply_revisions [ r ] = do
let new_updates = revision_to_updates r
apply_updates new_updates
modify $ \db -> db { raw_history = r : raw_history db }
return ()
apply_revisions (r : r_prev : rs) = do
apply_revisions ( r_prev : rs )
let new_updates = revision_to_updates r
apply_updates new_updates
modify $ \db -> db { raw_history = r : raw_history db }
return ()
apply_updates :: Yesod master => [ DataUpdate ] -> DataM master ()
apply_updates [] = do
return ()
apply_updates ( u : us ) = do
apply_updates us
modify $ \db -> db { update_log = u : update_log db }
f u
where
f ( Wibble _ ) = do
return ()
f ( Tweet _ _ ) = do
return ()
f ( BlogAdded update_rev_ID blog_str ) = do
b <- add_bloggable $ UpdateBloggable blog_str update_rev_ID
build_blog_HTML ( prev_bloggable b ) update_rev_ID blog_str
f ( EntryAdded update_rev_ID update_entry_path ) = do
revs <- gets latest_revisions
let revs' = Map.insert update_entry_path update_rev_ID revs
modify $ \db -> db { latest_revisions = revs' }
b <- add_bloggable $ WikiBloggable update_entry_path update_rev_ID
lift $ build_node_HTML (prev_bloggable b) update_rev_ID update_entry_path
f ( EntryChanged update_rev_ID update_entry_path ) = do
revs <- gets latest_revisions
let revs' = Map.insert update_entry_path update_rev_ID revs
modify $ \db -> db { latest_revisions = revs' }
bs <- gets bloggables
b <- case bs of
( WikiBloggable blog_entry view_rev prev_bloggable : rest_bs )
| blog_entry == update_entry_path -> do
let b' = WikiBloggable blog_entry update_rev_ID prev_bloggable
modify $ \db -> db { bloggables = b' : rest_bs }
return b'
_ -> do
add_bloggable $ WikiBloggable update_entry_path update_rev_ID
lift $ build_node_HTML ( prev_bloggable b ) update_rev_ID update_entry_path
add_bloggable :: ( Maybe Bloggable -> Bloggable ) -> DataM master Bloggable
add_bloggable fb = do
bs <- gets bloggables
let b = case bs of
[] -> fb Nothing
b_prev : _ -> fb $ Just b_prev
modify $ \db -> db { bloggables = b : bs }
return b
-- XXX: Should be event driven but that'd be harder
update_thread :: Yesod master => ( Config master, Store ) -> IORef DB -> IO ()
update_thread ( config, store ) db_ref = do
-- XXX: Lower bound to FileStore.history is not exclusive
let inc_a_bit = addUTCTime (fromInteger 1)
prev_time_ref <- newIORef =<< return . inc_a_bit =<< head_time db_ref
let update_thread_ = do
prev_time <- readIORef prev_time_ref
putStrLn $ "probing for changes since " ++ show prev_time
rs <- FileStore.history ( filestore store )
[]
( TimeRange (Just prev_time)
Nothing
)
case null rs of
True -> return ()
False -> do
putStrLn "found updates"
process_revisions ( config, store ) db_ref rs
writeIORef prev_time_ref =<< return . inc_a_bit
=<< head_time db_ref
-- delay before probing for updates again
threadDelay $ 1000000 * probe_period config
forever update_thread_
mk_data :: Yesod master => Config master -> IO ( Data_ master )
mk_data config = do
-- clear memoization store
should_clear_memo_store <- doesDirectoryExist $ cache_dir config
when should_clear_memo_store $ removeDirectoryRecursive $ cache_dir config
createDirectory $ cache_dir config
-- build internal DB state
let filestore = FileStore.gitFileStore $ store_dir config
store = Store { filestore = filestore }
empty_db = DB [] [] Map.empty []
initial_history <- FileStore.history filestore [] (TimeRange Nothing Nothing)
let db_0_build = execStateT (apply_revisions initial_history) empty_db
db_ref <- newIORef =<< runReaderT (evalStateT db_0_build store) config
-- XXX: Only because store is not pure value but a reference
the_ID <- forkIO $ update_thread ( config, store ) db_ref
return Data { config = config
, store = store
, update_thread_ID = the_ID
, db_ref = db_ref
}
node_HTML_content :: Yesod master => Data_ master -> DB -> FilePath -> Content
node_HTML_content src_data db node_path =
let Just rev_ID = Map.lookup node_path ( latest_revisions db )
out_path = node_HTML_path (config src_data) rev_ID node_path
in ContentFile out_path Nothing
blog_HTML_content :: Yesod master => Data_ master -> RevisionId -> Content
blog_HTML_content src_data rev_ID =
let out_path = blog_HTML_path (config src_data) rev_ID
in ContentFile out_path Nothing
getBlogR :: Yesod master => RevisionId -> GHandler ( Data_ master ) master [(ContentType, Content)]
getBlogR rev_ID = do
src_data <- getYesodSub
let out_HTML_content = blog_HTML_content src_data rev_ID
return [ ( typeHtml, out_HTML_content )
]
getLatestR :: Yesod master => GHandler ( Data_ master ) master [(ContentType, Content)]
getLatestR = do
src_data <- getYesodSub
db <- liftIO $ readIORef $ db_ref src_data
let latest = head $ bloggables db
case latest of
UpdateBloggable blog_str source_rev _ -> do
let out_HTML = blog_HTML_content src_data source_rev
return [ ( typeHtml, out_HTML )
, ( typePlain, toContent blog_str )
]
WikiBloggable blog_entry _ _ -> do
let markdown_path = node_markdown_path (config src_data) blog_entry
let out_HTML = node_HTML_content src_data db blog_entry
return [ ( typeHtml , out_HTML )
, ( typePlain, ContentFile markdown_path Nothing )
]
getUpdateLogR :: Yesod master => GHandler ( Data_ master ) master RepJson
getUpdateLogR = do
jsonToRepJson $ toJSON ()
getEntryRevR :: Yesod master
=> RevisionId
-> [ Text ]
-> GHandler ( Data_ master ) master [(ContentType, Content)]
getEntryRevR rev_ID entry_path_texts = do
src_data <- getYesodSub
let ( first_path : rest_paths ) = map T.unpack entry_path_texts
node_path = foldl (</>) first_path rest_paths
let p = node_HTML_path (config src_data) rev_ID node_path
let markdown_path = node_markdown_path (config src_data) node_path
return [ ( typeHtml , ContentFile p Nothing )
, ( typePlain, ContentFile markdown_path Nothing )
]
getEntryLatestR :: Yesod master => [ Text ] -> GHandler ( Data_ master ) master [(ContentType, Content)]
getEntryLatestR entry_path_texts = do
let ( first_path : rest_paths ) = map T.unpack entry_path_texts
node_path = foldl (</>) first_path rest_paths
src_data <- getYesodSub
db <- liftIO $ readIORef $ db_ref src_data
let x = Map.lookup node_path ( latest_revisions db )
case x of
Nothing -> do
liftIO $ putStrLn $ "no node at path " ++ show node_path
fail $ "no node at path " ++ show node_path
Just rev_ID -> do
liftIO $ putStrLn $ "latest rev of " ++ show node_path ++ " is " ++ show rev_ID
getEntryRevR rev_ID entry_path_texts
mkYesodSubDispatch "Data_ master" [] [parseRoutes|
/latest LatestR GET
/ UpdateLogR GET
/entry/*Texts EntryLatestR GET
/blog/#RevisionId BlogR GET
/rev/#RevisionId/*Texts EntryRevR GET
|]
|
coreyoconnor/corebot-bliki
|
src/Yesod/CoreBot/Bliki/Resources/Data.hs
|
bsd-3-clause
| 9,362 | 0 | 20 | 2,866 | 2,523 | 1,240 | 1,283 | -1 | -1 |
module Idris.REPLParser(parseCmd) where
import System.FilePath ((</>))
import Idris.Parser
import Idris.AbsSyntax
import Core.TT
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import Text.ParserCombinators.Parsec.Language
import qualified Text.ParserCombinators.Parsec.Token as PTok
import Debug.Trace
import Data.List
import Data.List.Split(splitOn)
parseCmd i = runParser pCmd i "(input)"
cmd :: [String] -> IParser ()
cmd xs = do lchar ':'; docmd (sortBy (\x y -> compare (length y) (length x)) xs)
where docmd [] = fail "No such command"
docmd (x:xs) = try (discard (symbol x)) <|> docmd xs
pCmd :: IParser Command
pCmd = do spaces; try (do cmd ["q", "quit"]; eof; return Quit)
<|> try (do cmd ["h", "?", "help"]; eof; return Help)
<|> try (do cmd ["r", "reload"]; eof; return Reload)
<|> try (do cmd ["m", "module"]; f <- identifier; eof;
return (ModImport (toPath f)))
<|> try (do cmd ["e", "edit"]; eof; return Edit)
<|> try (do cmd ["exec", "execute"]; eof; return Execute)
<|> try (do cmd ["ttshell"]; eof; return TTShell)
<|> try (do cmd ["c", "compile"]; f <- identifier; eof; return (Compile ViaC f))
<|> try (do cmd ["jc", "newcompile"]; f <- identifier; eof; return (Compile ViaJava f))
<|> try (do cmd ["js", "javascript"]; f <- identifier; eof; return (Compile ViaJavaScript f))
<|> try (do cmd ["m", "metavars"]; eof; return Metavars)
<|> try (do cmd ["proofs"]; eof; return Proofs)
<|> try (do cmd ["p", "prove"]; n <- pName; eof; return (Prove n))
<|> try (do cmd ["a", "addproof"]; do n <- option Nothing (do x <- pName;
return (Just x))
eof; return (AddProof n))
<|> try (do cmd ["rmproof"]; n <- pName; eof; return (RmProof n))
<|> try (do cmd ["showproof"]; n <- pName; eof; return (ShowProof n))
<|> try (do cmd ["log"]; i <- natural; eof; return (LogLvl (fromIntegral i)))
<|> try (do cmd ["l", "load"]; f <- getInput; return (Load f))
<|> try (do cmd ["cd"]; f <- getInput; return (ChangeDirectory f))
<|> try (do cmd ["spec"]; whiteSpace; t <- pFullExpr defaultSyntax; return (Spec t))
<|> try (do cmd ["hnf"]; whiteSpace; t <- pFullExpr defaultSyntax; return (HNF t))
<|> try (do cmd ["doc"]; n <- pfName; eof; return (DocStr n))
<|> try (do cmd ["d", "def"]; many1 (char ' ') ; n <- pfName; eof; return (Defn n))
<|> try (do cmd ["total"]; do n <- pfName; eof; return (TotCheck n))
<|> try (do cmd ["t", "type"]; do whiteSpace; t <- pFullExpr defaultSyntax; return (Check t))
<|> try (do cmd ["u", "universes"]; eof; return Universes)
<|> try (do cmd ["di", "dbginfo"]; n <- pfName; eof; return (DebugInfo n))
<|> try (do cmd ["i", "info"]; n <- pfName; eof; return (Info n))
<|> try (do cmd ["miss", "missing"]; n <- pfName; eof; return (Missing n))
<|> try (do cmd ["dynamic"]; eof; return ListDynamic)
<|> try (do cmd ["dynamic"]; l <- getInput; return (DynamicLink l))
<|> try (do cmd ["set"]; o <-pOption; return (SetOpt o))
<|> try (do cmd ["unset"]; o <-pOption; return (UnsetOpt o))
<|> try (do cmd ["s", "search"]; whiteSpace; t <- pFullExpr defaultSyntax; return (Search t))
<|> try (do cmd ["x"]; whiteSpace; t <- pFullExpr defaultSyntax; return (ExecVal t))
<|> try (do cmd ["patt"]; whiteSpace; t <- pFullExpr defaultSyntax; return (Pattelab t))
<|> do whiteSpace; do eof; return NOP
<|> do t <- pFullExpr defaultSyntax; return (Eval t)
where toPath n = foldl1' (</>) $ splitOn "." n
pOption :: IParser Opt
pOption = do discard (symbol "errorcontext"); return ErrContext
<|> do discard (symbol "showimplicits"); return ShowImpl
|
christiaanb/Idris-dev
|
src/Idris/REPLParser.hs
|
bsd-3-clause
| 4,220 | 0 | 49 | 1,280 | 1,944 | 947 | 997 | 63 | 2 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PostfixOperators #-}
module Language.Epilog.IR.Procedure
( irProcedure
) where
--------------------------------------------------------------------------------
import Language.Epilog.AST.Procedure
import Language.Epilog.Common
import Language.Epilog.IR.Instruction
import Language.Epilog.IR.Monad
import Language.Epilog.IR.TAC hiding (TAC (Var))
import qualified Language.Epilog.IR.TAC as TAC (TAC (FloatVar, RefVar, Var))
import Language.Epilog.Position hiding (Position (Epilog))
import Language.Epilog.SymbolTable
import Language.Epilog.Type (Atom (..), Type (..), voidT)
--------------------------------------------------------------------------------
import Control.Lens (use, (.=), (<~))
--------------------------------------------------------------------------------
irProcedure :: Procedure -> IRMonad ()
irProcedure Procedure { procName, procPos, procType = _ :-> retType
, procParams, procDef, procStackSize } =
case procDef of
Nothing -> liftIO . putStrLn $ "Epilog native procedure `" <> procName <> "`"
Just (iblock, scope) -> do
enterScope
g <- use global
let smbs = (\(Right x) -> x) . goDownFirst . insertST scope . focus $ g
symbols .= smbs
retLabel <~ Just <$> newLabel ("return_" <> procName)
newLabel ("proc_" <> procName) >>= (#)
comment $ "Procedure at " <> showP procPos
addTAC $ Prolog procStackSize
forM_ procParams $
\Parameter { parName, parOffset, parSize, parRef, parType } -> do
parName' <- insertVar parName
addTAC $ (case parRef of
True -> TAC.RefVar
False -> case parType of
Basic {atom = EpFloat} -> TAC.FloatVar
_ -> TAC.Var) parName' (parOffset + 12) parSize
irIBlock iblock
use currentBlock >>= \case
Nothing -> pure ()
Just _ -> do
unless (retType == voidT) $ do
addTAC . Answer $ case retType of
Basic { atom } -> case atom of
EpInteger -> C (IC 0)
EpBoolean -> C (BC False)
EpFloat -> C (FC 0.0)
EpCharacter -> C (CC 0)
t -> internal $ "bad return type " <> show t
Pointer {} -> C (IC 0)
t -> internal $ "bad return type " <> show t
use retLabel >>= \case
Nothing -> internal "nowhere to return"
Just lbl -> terminate $ Br lbl
use retLabel >>= \case
Nothing -> internal "no return label"
Just lbl -> (lbl #)
comment $ "Epilog for procedure " <> procName
addTAC $ Epilog procStackSize
terminate Return
retLabel .= Nothing
closeModule procName
exitScope
irProcedure _ = pure ()
|
adgalad/Epilog
|
src/Haskell/Language/Epilog/IR/Procedure.hs
|
bsd-3-clause
| 3,029 | 1 | 32 | 979 | 812 | 423 | 389 | 64 | 13 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeApplications #-}
-- | Entrypoint for symbolic analysis of Pact programs -- for checking
-- that functions obey properties and maintain invariants.
module Pact.Analyze.Check
( verifyModule
, renderVerifiedModule
, verifyCheck
, describeCheckFailure
, describeCheckResult
, describeParseFailure
, falsifyingModel
, showModel
, hasVerificationError
, CheckFailure(..)
, CheckFailureNoLoc(..)
, CheckSuccess(..)
, CheckResult
, ScopeError(..)
, ModuleChecks(..)
, SmtFailure(..)
, ParseFailure
, VerificationFailure(..)
, RenderedOutput(..)
-- Exported just for inclusion in haddocks:
, verifyFunctionProperty
, verifyFunctionInvariants
) where
import Control.Exception as E
import Control.Lens hiding (DefName)
import Control.Monad
import Control.Monad.Except
import Control.Monad.Morph (generalize, hoist)
import Control.Monad.Reader (runReaderT)
import Control.Monad.State.Strict (evalStateT)
import Data.Bifunctor (first)
import Data.Either (partitionEithers)
import Data.Foldable (foldl')
import qualified Data.HashMap.Strict as HM
import Data.List (isPrefixOf,nub)
import qualified Data.List as List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe (mapMaybe)
import Data.SBV (Symbolic)
import qualified Data.SBV as SBV
import qualified Data.SBV.Control as SBV
import qualified Data.SBV.Internals as SBVI
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Traversable (for)
import Prelude hiding (exp)
import Pact.Typechecker (typecheckTopLevel)
import Pact.Types.Lang (pattern ColonExp, pattern CommaExp,
Def (..), DefType (..), Info,
dFunType, dMeta, mModel,
renderInfo, tDef, tInfo, tMeta,
_aName, _dMeta, _mModel, _tDef)
import Pact.Types.PactError
import Pact.Types.Pretty (renderCompactText)
import Pact.Types.Runtime (Exp, ModuleData (..), ModuleName,
Ref, Ref' (Ref),
Term (TConst, TDef, TSchema, TTable),
asString, getInfo, mdModule,
mdRefMap, tShow)
import qualified Pact.Types.Runtime as Pact
import Pact.Types.Term (DefName (..),
dDefType, moduleDefMeta,
moduleDefName, _Ref, _gGovernance)
import Pact.Types.Type (ftArgs, _ftArgs)
import Pact.Types.Typecheck (AST, Fun (FDefun, _fArgs, _fBody, _fInfo),
Named, Node,
TopLevel (TopConst, TopFun, TopTable),
UserType (..), Schema (_schFields, _schName),
runTC, tcFailures, toplevelInfo, DynEnv, mkTcState, renderTcFailure)
import qualified Pact.Types.Typecheck as TC
import Pact.Analyze.Alloc (runAlloc)
import Pact.Analyze.Errors
import Pact.Analyze.Eval hiding (invariants)
import Pact.Analyze.Model (allocArgs, allocModelTags,
allocStepChoices, saturateModel,
showModel)
import Pact.Analyze.Parse hiding (tableEnv)
import Pact.Analyze.Translate
import Pact.Analyze.Types
import Pact.Analyze.Util
newtype VerificationWarnings = VerificationWarnings [Text]
deriving (Eq, Show)
describeVerificationWarnings :: VerificationWarnings -> [Text]
describeVerificationWarnings (VerificationWarnings dups) = case dups of
[] -> []
_ -> ["Duplicated property definitions for " <>
T.intercalate ", " dups]
data CheckSuccess
= SatisfiedProperty (Model 'Concrete)
| ProvedTheorem
deriving (Eq, Show)
type ParseFailure = (Exp Info, String)
data SmtFailure
= Invalid (Model 'Concrete)
| Unsatisfiable
| Unknown SBV.SMTReasonUnknown
| SortMismatch String
| UnexpectedFailure SBV.SBVException
deriving Show
instance Eq SmtFailure where
Invalid m1 == Invalid m2 = m1 == m2
Unsatisfiable == Unsatisfiable = True
-- SMTReasonUnknown and SBVException don't provide instances of Eq, so we
-- always return 'False' in these cases.
_ == _ = False
data CheckFailureNoLoc
= NotAFunction Text
| TypecheckFailure (Set TC.Failure)
| TranslateFailure' TranslateFailureNoLoc
| AnalyzeFailure' AnalyzeFailureNoLoc
| SmtFailure SmtFailure
| QueryFailure SmtFailure
| VacuousProperty SmtFailure
deriving (Eq, Show)
data CheckFailure = CheckFailure
{ _checkFailureParsed :: Info
, _checkFailure :: CheckFailureNoLoc
} deriving (Eq, Show)
type CheckResult = Either CheckFailure CheckSuccess
data ScopeError
= ScopeParseFailure ParseFailure
| NotInScope Text
| ScopeInvalidDirectRef
deriving (Eq, Show)
describeScopeError :: ScopeError -> Text
describeScopeError = \case
ScopeParseFailure pf ->
describeParseFailure pf
NotInScope name ->
"Variable not in scope: " <> name
ScopeInvalidDirectRef ->
"Invalid Direct reference given to scope checker instead of Ref."
data ModuleChecks = ModuleChecks
{ propertyChecks :: HM.HashMap Text [CheckResult]
, stepChecks :: HM.HashMap (Text, Int) [CheckResult]
, invariantChecks :: HM.HashMap Text (TableMap [CheckResult])
, moduleWarnings :: VerificationWarnings
} deriving (Eq, Show)
-- | Does this 'ModuleChecks' have either a property or invariant failure?
-- Warnings don't count.
hasVerificationError :: ModuleChecks -> Bool
hasVerificationError (ModuleChecks propChecks stepChecks invChecks _)
= let errs = toListOf (traverse . traverse . _Left) propChecks ++
toListOf (traverse . traverse . _Left) stepChecks ++
toListOf (traverse . traverse . traverse . _Left) invChecks
in not (null errs)
data CheckEnv = CheckEnv
{ _tables :: [Table]
, _consts :: HM.HashMap Text EProp
, _propDefs :: HM.HashMap Text (DefinedProperty (Exp Info))
, _moduleData :: ModuleData Ref
, _caps :: [Capability]
, _moduleGov :: Governance
, _dynEnv :: DynEnv
}
-- | Essential data used to check a function (where function could actually be
-- a defun, defpact, or step).
--
-- Note: We don't include props, the function name, or it's check type. Why? We
-- use this structure for invariant checks, which don't check any props. We
-- also use it for checking pact steps which are a different check type and
-- borrow the name of their enclosing pact.
data FunData = FunData
Info -- Location info (for error messages)
[Named Node] -- Arguments
[AST Node] -- Body
mkFunInfo :: Fun Node -> FunData
mkFunInfo = \case
FDefun{_fInfo, _fArgs, _fBody} -> FunData _fInfo _fArgs _fBody
_ -> error "invariant violation: mkFunInfo called on non-function"
data VerificationFailure
= ModuleParseFailure ParseFailure
| ModuleCheckFailure CheckFailure
| TypeTranslationFailure Text (Pact.Type TC.UserType)
| InvalidDirectReference
| ModuleSpecInSchemaPosition Pact.ModuleName
| ExpectedConcreteModule
| FailedConstTranslation String
| SchemalessTable Info
| ScopeErrors [ScopeError]
| NonDefTerm (Pact.Term (Ref' (Pact.Term Pact.Name)))
deriving Show
_describeCheckSuccess :: CheckSuccess -> Text
_describeCheckSuccess = \case
SatisfiedProperty model -> "Property satisfied with model:\n"
<> fst (showModel model)
ProvedTheorem -> "Property proven valid"
describeParseFailure :: ParseFailure -> Text
describeParseFailure (exp, info)
= T.pack (renderInfo (getInfo exp))
<> ": could not parse " <> renderCompactText exp <> ": " <> T.pack info
describeSmtFailure :: SmtFailure -> Text
describeSmtFailure = \case
Invalid model ->
let (desc,header) = showModel model
in case header of
Just h -> "Invalidating model found in " <> h <> "\n" <> desc
Nothing -> "Invalidating model found\n" <> desc
Unsatisfiable -> "This property is unsatisfiable"
Unknown reason -> "The solver returned 'unknown':\n" <> tShow reason
SortMismatch msg -> T.unlines
[ "The solver returned a sort mismatch:"
, T.pack msg
, "This may be the result of a bug in z3 versions 4.8.0 and earlier."
, "Specifically, before commit a37d05d54b9ca10d4c613a4bb3a980f1bb0c1c4a."
]
UnexpectedFailure smtE -> T.pack $ show smtE
describeUnknownFailure :: SBV.SMTReasonUnknown -> Text
describeUnknownFailure = \case
SBV.UnknownMemOut -> "SMT solver out of memory"
SBV.UnknownTimeOut -> "SMT solver timeout"
r -> "SMT solver error: " <> tShow r
describeQueryFailure :: SmtFailure -> Text
describeQueryFailure = \case
Invalid model -> "Invalid model failure:\n" <> fst (showModel model)
Unknown reason -> describeUnknownFailure reason
err@SortMismatch{} -> "(QueryFailure): " <> describeSmtFailure err
Unsatisfiable -> "Unsatisfiable query failure: please report this as a bug"
UnexpectedFailure smtE -> T.pack $ show smtE
describeVacuousProperty :: SmtFailure -> Text
describeVacuousProperty = \case
Invalid _ -> "Unexpected solver response during vacuous property check: please report this as a bug"
Unknown reason -> describeUnknownFailure reason
err@SortMismatch{} -> "Vacuous property check: " <> describeSmtFailure err
Unsatisfiable -> "Vacuous property encountered! There is no way for a transaction to succeed if this function is called from the top-level. Because all `property` expressions in Pact assume transaction success, in this case it would be possible to validate *any* `property`, even e.g. `false`."
UnexpectedFailure smtE -> T.pack $ show smtE
describeCheckFailure :: CheckFailure -> [RenderedOutput]
describeCheckFailure (CheckFailure info failure) = case failure of
TypecheckFailure fails -> map renderTcFailure $ Set.toList fails
NotAFunction name -> fatal $ "No function named " <> name
TranslateFailure' err -> pure $ setInf $ describeTranslateFailureNoLoc err
AnalyzeFailure' err -> fatal $ describeAnalyzeFailureNoLoc err
SmtFailure err -> fatal $ describeSmtFailure err
QueryFailure err -> fatal $ describeQueryFailure err
VacuousProperty err -> fatal $ describeVacuousProperty err
where
fatal = pure . setInf . renderFatal
setInf = set roInfo info
describeCheckResult :: CheckResult -> [RenderedOutput]
describeCheckResult = either describeCheckFailure (const [])
falsifyingModel :: CheckFailure -> Maybe (Model 'Concrete)
falsifyingModel (CheckFailure _ (SmtFailure (Invalid m))) = Just m
falsifyingModel _ = Nothing
translateToCheckFailure :: TranslateFailure -> CheckFailure
translateToCheckFailure (TranslateFailure info err)
= CheckFailure info (TranslateFailure' err)
translateToVerificationFailure :: TranslateFailure -> VerificationFailure
translateToVerificationFailure = ModuleCheckFailure . translateToCheckFailure
analyzeToCheckFailure :: AnalyzeFailure -> CheckFailure
analyzeToCheckFailure (AnalyzeFailure info err)
= CheckFailure info (AnalyzeFailure' err)
smtToCheckFailure :: Info -> SmtFailure -> CheckFailure
smtToCheckFailure info = CheckFailure info . SmtFailure
smtToQueryFailure :: Info -> SmtFailure -> CheckFailure
smtToQueryFailure info = CheckFailure info . QueryFailure
smtToVacuousProperty :: Info -> SmtFailure -> CheckFailure
smtToVacuousProperty info = CheckFailure info . VacuousProperty
resultQuery
:: Goal
-> Model 'Symbolic
-> ExceptT SmtFailure SBV.Query CheckSuccess
resultQuery goal model0 = do
satResult <- lift SBV.checkSat
case goal of
Validation ->
case satResult of
SBV.Sat -> throwError . Invalid =<< lift (saturateModel model0)
SBV.DSat _ -> throwError . Invalid =<< lift (saturateModel model0)
SBV.Unsat -> pure ProvedTheorem
SBV.Unk -> throwError . mkUnknown =<< lift SBV.getUnknownReason
Satisfaction ->
case satResult of
SBV.Sat -> SatisfiedProperty <$> lift (saturateModel model0)
SBV.DSat _ -> SatisfiedProperty <$> lift (saturateModel model0)
SBV.Unsat -> throwError Unsatisfiable
SBV.Unk -> throwError . mkUnknown =<< lift SBV.getUnknownReason
where mkUnknown = \case
SBV.UnknownOther explanation
| "Sort mismatch" `isPrefixOf` explanation
-> SortMismatch explanation
other -> Unknown other
-- -- Assumes sat mode. It might be a decent idea for us to introduce an indexed
-- -- type to denote which things assume certain modes.
-- checkConstraintVacuity :: ExceptT SmtFailure SBV.Query ()
-- checkConstraintVacuity = do
-- prePropRes <- lift $ SBV.checkSat
-- case prePropRes of
-- SBV.Sat -> pure ()
-- SBV.Unsat -> throwError VacuousConstraints
-- SBV.Unk -> throwError . Unknown =<< lift SBV.getUnknownReason
-- SBV also provides 'inNewAssertionStack', but in 'Query'
inNewAssertionStack
:: ExceptT a SBV.Query b
-> ExceptT a SBV.Query b
inNewAssertionStack act = do
push
result <- act `catchError` \e -> pop *> throwError e
pop
pure result
where
push = lift $ SBV.push 1
pop = lift $ SBV.pop 1
-- Produces args for analysis from model args
analysisArgs :: Map VarId (Located (Unmunged, TVal)) -> Map VarId AVal
analysisArgs = fmap (view (located._2._2))
-- | Solver timeout in millis. Usually timeout is a bad sign
-- but can always try larger values here.
timeout :: Integer
timeout = 1000 -- one second
-- | Check that all invariants hold for a function (this is actually used for
-- defun, defpact, and step)
verifyFunctionInvariants
:: CheckEnv
-> FunData
-> Text
-> CheckableType
-> IO (Either CheckFailure (TableMap [CheckResult]))
verifyFunctionInvariants (CheckEnv tables _consts _pDefs moduleData caps gov _de)
(FunData funInfo pactArgs body) funName checkType = runExceptT $ do
let modName = moduleDefName $ _mdModule moduleData
-- ignoring warnings here as they will be collected in 'verifyFunctionProperty'
(args, stepChoices, tm, graph, _) <- hoist generalize $
withExcept translateToCheckFailure $ runTranslation modName funName
funInfo caps pactArgs body checkType
let invsMap = TableMap $ Map.fromList $
tables <&> \Table { _tableName, _tableInvariants } ->
( TableName (T.unpack _tableName)
, fmap (const ()) <$> _tableInvariants
)
-- Check to see if there are any invariants in this module. If there aren't
-- we can skip these checks.
case invsMap ^.. traverse . traverse of
[] -> pure $ invsMap & traverse .~ []
_ -> ExceptT $ catchingExceptions $ runSymbolic $ runExceptT $ do
lift $ SBV.setTimeOut timeout
modelArgs' <- lift $ runAlloc $ allocArgs args
stepChoices' <- lift $ runAlloc $ allocStepChoices stepChoices
tags <- lift $ runAlloc $ allocModelTags modelArgs'
(Located funInfo tm) graph
let rootPath = _egRootPath graph
resultsTable <- withExceptT analyzeToCheckFailure $
runInvariantAnalysis modName gov tables caps
(analysisArgs modelArgs') stepChoices' tm rootPath tags funInfo
-- Iterate through each invariant in a single query so we can reuse our
-- assertion stack.
ExceptT $ fmap Right $
SBV.query $
for2 resultsTable $ \(Located info
(AnalysisResult querySucceeds _ prop ksProvs)) -> do
let model = Model modelArgs' tags ksProvs graph
_ <- runExceptT $ inNewAssertionStack $ do
void $ lift $ SBV.constrain $ sNot $ successBool querySucceeds
withExceptT (smtToQueryFailure info) $
resultQuery Validation model
queryResult <- runExceptT $ inNewAssertionStack $ do
void $ lift $ SBV.constrain $ sNot prop
resultQuery Validation model
-- Either SmtFailure CheckSuccess -> CheckResult
pure $ case queryResult of
Left smtFailure -> Left $
CheckFailure info (SmtFailure smtFailure)
Right pass -> Right pass
where
config :: SBV.SMTConfig
config = SBV.z3 { SBVI.allowQuantifiedQueries = True }
-- Discharges impure 'SBVException's from sbv.
catchingExceptions
:: IO (Either CheckFailure b)
-> IO (Either CheckFailure b)
catchingExceptions act = act `E.catch` \(e :: SBV.SBVException) ->
pure $ Left $ CheckFailure funInfo $ SmtFailure $ UnexpectedFailure e
runSymbolic :: Symbolic a -> IO a
runSymbolic = SBV.runSMTWith config
-- | Check that a specific property holds for a function (this is actually used
-- for defun, defpact, and step)
verifyFunctionProperty
:: CheckEnv
-> FunData
-> Text
-> CheckableType
-> Located Check
-> IO [Either CheckFailure CheckSuccess]
verifyFunctionProperty (CheckEnv tables _consts _propDefs moduleData caps gov _de)
(FunData funInfo pactArgs body) funName checkType
(Located propInfo check) = fmap sequence' $ runExceptT $ do
let modName = moduleDefName (_mdModule moduleData)
(args, stepChoices, tm, graph, warnings) <- hoist generalize $
withExcept translateToCheckFailure $
runTranslation modName funName funInfo caps pactArgs body checkType
-- Set up the model and our query
let setupSmtProblem = do
lift $ SBV.setTimeOut timeout
modelArgs' <- lift $ runAlloc $ allocArgs args
stepChoices' <- lift $ runAlloc $ allocStepChoices stepChoices
tags <- lift $ runAlloc $ allocModelTags modelArgs'
(Located funInfo tm) graph
let rootPath = _egRootPath graph
ar@(AnalysisResult _querySucceeds _txSuccess _prop ksProvs)
<- withExceptT analyzeToCheckFailure $
runPropertyAnalysis modName gov check tables caps
(analysisArgs modelArgs') stepChoices' tm rootPath tags funInfo
let model = Model modelArgs' tags ksProvs graph
pure (ar, model)
-- First we check whether the query definitely succeeds. Queries don't
-- succeed if the (pure) property throws an error (eg division by 0 or
-- indexing to an invalid array position). If the query fails we bail.
_ <- ExceptT $ catchingExceptions $ runSymbolicSat $ runExceptT $ do
(AnalysisResult querySucceeds _txSucc _ _, model) <- setupSmtProblem
void $ lift $ SBV.output $ SBV.sNot $ successBool querySucceeds
hoist SBV.query $ do
withExceptT (smtToQueryFailure propInfo) $
resultQuery Validation model
-- If the-end user is checking for the validity of a proposition while
-- assuming transaction success, throw an error if it is not possible for
-- the transaction to succeed.
--
-- Unfortunately we need a completely separate `query` here. We cannot
-- combine this with the query-success check by using `inNewAssertionStack`
-- because this breaks support for quantifiers in properties. e.g.:
--
-- (property
-- (forall (i:integer)
-- (when (and (>= i 0) (< i 5))
-- (= (at i result) a))))
--
case check of
PropertyHolds _ ->
void $ ExceptT $ catchingExceptions $ runSymbolicSat $ runExceptT $ do
(AnalysisResult _ txSuccess _ _, model) <- setupSmtProblem
void $ lift $ SBV.output txSuccess
hoist SBV.query $ do
withExceptT (smtToVacuousProperty propInfo) $
resultQuery Satisfaction model
_ ->
pure ()
ExceptT $ catchingExceptions $ runSymbolicGoal $ runExceptT $ do
(AnalysisResult _ _txSucc prop _, model) <- setupSmtProblem
void $ lift $ SBV.output prop
r <- hoist SBV.query $ do
withExceptT (smtToCheckFailure propInfo) $
resultQuery goal model
-- note here that it is important that the main result is first
-- for 'verifyCheck'/unit tests
return $ (Right r : map (Left . translateToCheckFailure) warnings)
where
goal :: Goal
goal = checkGoal check
sequence' (Left a) = [Left a]
sequence' (Right rs) = rs
config :: SBV.SMTConfig
config = SBV.z3 { SBVI.allowQuantifiedQueries = True }
-- Discharges impure 'SBVException's from sbv.
catchingExceptions
:: IO (Either CheckFailure b)
-> IO (Either CheckFailure b)
catchingExceptions act = act `E.catch` \(e :: SBV.SBVException) ->
pure $ Left $ smtToCheckFailure propInfo $ UnexpectedFailure e
-- Run a 'Symbolic' in sat mode
runSymbolicSat :: Symbolic a -> IO a
runSymbolicSat = SBV.runSMTWith config
-- Run a 'Symbolic' in the mode corresponding to our goal
runSymbolicGoal :: Symbolic a -> IO a
runSymbolicGoal = fmap fst
. SBVI.runSymbolic (SBVI.SMTMode SBVI.QueryExternal SBVI.ISetup
(goal == Satisfaction) config)
-- | Get the set of tables in the specified modules.
moduleTables
:: HM.HashMap ModuleName (ModuleData Ref)
-- ^ all loaded modules
-> ModuleRefs
-- ^ the refs of the module we're verifying
-> HM.HashMap Text EProp
-- ^ constants in the module
-> ExceptT VerificationFailure IO [Table]
moduleTables modules modRefs consts = do
-- All tables defined in this module, and imported by it. We're going to look
-- through these for their schemas, which we'll look through for invariants.
let tables = flip mapMaybe (modules ^@.. traversed . mdRefMap . itraversed) $
\case
(name, Ref (table@TTable {})) -> Just (name, table)
_ -> Nothing
schemas = modRefs ^. defschemas
for tables $ \(tabName, tab) -> do
(TopTable _info _name tableTy _meta, _tcState)
<- lift $ runTC 0 False $ typecheckTopLevel (Ref tab)
case tableTy of
Pact.TyUser (TC.UTModSpec (TC.ModSpec mn)) -> throwError $ ModuleSpecInSchemaPosition mn
Pact.TyUser schema@(TC.UTSchema (TC.Schema{_schName,_schFields})) -> do
VarEnv vidStart invEnv vidTys <- hoist generalize $
mkInvariantEnv schema
let schemaName = asString _schName
mkInvariant :: Exp Info -> Either String (Invariant 'TyBool)
mkInvariant = expToInvariant vidStart invEnv vidTys consts SBool
invariants <- case schemas ^? ix schemaName._Ref.tMeta.mModel of
-- no model = no invariants
Nothing -> pure []
Just model -> case normalizeListLit model of
Nothing -> throwError $ ModuleParseFailure
-- reconstruct an `Exp Info` for this list
( Pact.EList $ Pact.ListExp model Pact.Brackets $
schemas ^?! ix schemaName._Ref.tInfo
, "malformed list (inconsistent use of comma separators?)"
)
Just model' -> withExceptT ModuleParseFailure $ liftEither $ do
exps <- collectInvariants model'
for exps $ \meta ->
case mkInvariant meta of
Left err -> Left (meta, err)
Right good -> Right (Located (getInfo meta) good)
pure $ Table tabName schema invariants
-- If we don't have a user type, the type should be `TyAny` (`*`),
-- meaning the table has no schema. Refuse to verify the module.
_ -> throwError $ SchemalessTable $
HM.fromList tables ^?! ix tabName.tInfo
-- | Get the set of capabilities in this module. This is done by typechecking
-- every capability ref and converting to a 'Capability'.
moduleCapabilities
:: DynEnv -> [ModuleData Ref] -> ExceptT VerificationFailure IO [Capability]
moduleCapabilities de mds = fmap concat $ forM mds $ \md -> do
toplevels <- withExceptT ModuleCheckFailure $
traverse (ExceptT . typecheck de) (defcapRefs md)
hoist generalize $ traverse mkCap toplevels
where
defcapRefs md = toListOf
(mdRefMap.traverse.filtered
(\ref -> ref ^? _Ref.tDef.dDefType == Just Defcap))
md
mkCap :: TopLevel Node -> Except VerificationFailure Capability
mkCap toplevel = do
eSchema <- mkESchema <$> traverse (translateArgTy "argument") pactArgs
pure $ case eSchema of
ESchema schema -> Capability schema capName
where
(capName, pactArgs) = case toplevel of
TopFun FDefun{_fName,_fType,_fModule} _ ->
(mkCapName _fModule _fName, _ftArgs _fType)
_ ->
error "invariant violation: defcap toplevel must be a defun"
translateArgTy
:: Text
-> Pact.Arg UserType
-> Except VerificationFailure (Text, EType)
translateArgTy errNoun (Pact.Arg name ty _info) =
case maybeTranslateType ty of
Just ety -> pure (name, ety)
Nothing -> throwError $
TypeTranslationFailure ("couldn't translate " <> errNoun <> " type") ty
data PropertyScope
= Everywhere
-- ^ This property applies to the whole module
| Excluding (Set Text)
-- ^ This property applies to all but the named functions (and pacts)
| Including (Set Text)
-- ^ This property applies to only the named functions (and pacts)
deriving Show
data ModuleCheck = ModuleCheck
{ _moduleCheckType :: Prop 'TyBool -> Check
-- ^ The style of check to use ('PropertyHolds', 'SucceedsWhen', or
-- 'FailsWhen')
, _checkPropertyBody :: Exp Info
-- ^ The body of the property to check
, _moduleCheckScope :: PropertyScope
-- ^ Where does this property apply?
}
-- Does this (module-scoped) property apply to this function?
applicableCheck :: DefName -> ModuleCheck -> Bool
applicableCheck (DefName funName) (ModuleCheck _ _ propScope) =
case propScope of
Everywhere -> True
Excluding names -> funName `Set.notMember` names
Including names -> funName `Set.member` names
-- List literals are valid either with no commas or with commas interspersed
-- between each element. Remove all commas.
normalizeListLit :: [Exp i] -> Maybe [Exp i]
normalizeListLit lits = case lits of
_ : CommaExp : _ -> removeCommas lits
_ ->
let isComma = \case { CommaExp -> True; _ -> False }
in case List.find isComma lits of
Nothing -> Just lits
Just _comma -> Nothing
where
-- invariant: the input is the previous input *after a comma*, meaning the
-- empty list is not valid
removeCommas :: [Exp i] -> Maybe [Exp i]
removeCommas = \case
x : CommaExp : xs -> (x:) <$> removeCommas xs
[CommaExp] -> Nothing
[x] -> Just [x]
_ -> Nothing
-- | Parse a property definition or property like
--
-- * '(defproperty foo (> 1 0))'
-- * '(defproperty foo (a:integer b:integer) (> a b))'
-- * '(property foo)'
parseModuleModelDecl
:: [Exp Info]
-> Either ParseFailure
[Either (Text, DefinedProperty (Exp Info)) ModuleCheck]
parseModuleModelDecl exps = traverse parseDecl exps where
parseDecl exp@(ParenList (EAtom' "defproperty" : rest)) = case rest of
[ EAtom' propname, ParenList args, body ] -> do
args' <- parseBindings (curry Right) args & _Left %~ (exp,)
pure $ Left (propname, DefinedProperty args' body)
[ EAtom' propname, body ] ->
pure $ Left (propname, DefinedProperty [] body)
_ -> Left (exp, "Invalid property definition")
parseDecl exp = do
(propTy, body, propScope) <- parsePropertyExp exp
pure $ Right $ ModuleCheck propTy body propScope
parseNames :: Exp Info -> Either ParseFailure (Set Text)
parseNames exp@(SquareList names) = case normalizeListLit names of
Just names' -> fmap Set.fromList $ traverse parseName names'
Nothing -> Left (exp, "expected a list of names")
parseNames exp = Left (exp, "expected a list of names")
parseName :: Exp Info -> Either ParseFailure Text
parseName (EAtom' name) = Right name
parseName exp = Left (exp, "expected a bare word name")
getPropTy = \case
"property" -> Just PropertyHolds
"succeeds-when" -> Just SucceedsWhen
"fails-when" -> Just FailsWhen
_ -> Nothing
parsePropertyExp
:: Exp Info
-> Either ParseFailure (Prop 'TyBool -> Check, Exp Info, PropertyScope)
parsePropertyExp exp = case exp of
ParenList (EAtom' propTyName : rest)
| Just propTy <- getPropTy propTyName
-> case rest of
[ exp' ]
-> pure (propTy, exp', Everywhere)
[ exp', BraceList [ EStrLiteral' "except", ColonExp, names ] ]
-> (propTy, exp',) . Excluding <$> parseNames names
[ exp', BraceList [ EStrLiteral' "only", ColonExp, names ] ]
-> (propTy, exp',) . Including <$> parseNames names
_ -> Left (exp, "malformed property definition")
_ -> Left (exp, "expected a set of property / defproperty")
-- | Organize the module's refs by type
moduleRefs :: ModuleData Ref -> ModuleRefs
moduleRefs (ModuleData _ refMap) = foldl' f noRefs (HM.toList refMap)
where
f accum (name, ref) = case ref of
Ref (TDef (Def{_dDefType, _dDefBody}) _) ->
case _dDefType of
Defun -> accum & defuns . at name ?~ ref
Defpact -> accum & defpacts . at name ?~ ref
Defcap -> accum
Ref TConst{} ->
accum & defconsts . at name ?~ ref
Ref TSchema{} ->
accum & defschemas . at name ?~ ref
_ ->
accum
noRefs = ModuleRefs HM.empty HM.empty HM.empty HM.empty
-- | Module-level propery definitions and declarations
data ModelDecl = ModelDecl
{ _moduleDefProperties :: HM.HashMap Text (DefinedProperty (Exp Info))
, _moduleProperties :: [ModuleCheck]
}
-- | Get the model defined in this module
moduleModelDecl :: ModuleData Ref -> Either ParseFailure ModelDecl
moduleModelDecl ModuleData{..} = do
lst <- parseModuleModelDecl $ Pact._mModel $ moduleDefMeta _mdModule
let (propList, checkList) = partitionEithers lst
pure $ ModelDecl (HM.fromList propList) checkList
-- | Then environment for variables at the beginning of execution
data VarEnv = VarEnv
{ _vidStart :: VarId
-- ^ The first 'VarId' the function can issue.
, _nameVids :: Map Text VarId
-- ^ A 'VarId' for each argument to the function. For steps these are the
-- variables bound by the enclosing @defpact@.
, _vidTys :: Map VarId EType
-- ^ The type of each variable in scope.
}
-- | Given a schema, returns an environment of canonical assignment of var ids
-- to each column, and an environment of types. The canonical ordering is
-- determined by the lexicographic order of variable names. Also see
-- 'varIdColumns'.
mkInvariantEnv :: UserType -> Except VerificationFailure VarEnv
mkInvariantEnv (TC.UTModSpec (TC.ModSpec mn)) = throwError $ ModuleSpecInSchemaPosition mn
mkInvariantEnv (TC.UTSchema TC.Schema{_schFields}) = do
tys <- Map.fromList . map (first (env Map.!)) <$>
traverse (translateArgTy "schema field's") _schFields
pure $ VarEnv vidStart env tys
where
-- Order variables lexicographically over their names when assigning
-- variable IDs.
env :: Map Text VarId
env = Map.fromList $ flip zip [0..] $ List.sort $ map Pact._aName _schFields
vidStart :: VarId
vidStart = VarId $ Map.size env
-- | Make an environment (binding result and args) from a function type.
makeFunctionEnv
:: Pact.FunType TC.UserType -> Except VerificationFailure VarEnv
makeFunctionEnv (Pact.FunType argTys resultTy) = do
let -- We use VID 0 for the result, the one for each argument variable.
-- Finally, we start the VID generator in the translation environment at
-- the next VID. envVidStart is the first VID that will be issued.
--
-- TODO: Ideally we wouldn't have any ad-hoc VID generation, but we're
-- not there yet.
envVidStart = VarId (length argTys + 1)
vids = [1..(envVidStart - 1)]
-- TODO(joel): this relies on generating the same unique ids as
-- @checkFunction@. We need to more carefully enforce this is true!
argTys' <- for argTys $ \(Pact.Arg name ty _info) ->
case maybeTranslateType ty of
Just ety -> pure (Unmunged name, ety)
Nothing -> throwError $
TypeTranslationFailure "couldn't translate argument type" ty
resultBinding <- case maybeTranslateType resultTy of
Just ety -> pure $ Binding 0 (Unmunged "result") (Munged "result") ety
Nothing -> throwError $
TypeTranslationFailure "couldn't translate result type" resultTy
-- NOTE: At the moment, we leave all variables except for the toplevel args
-- under analysis as the original munged/SSA'd variable names. And result,
-- which we introduce. We also rely on this assumption in Translate's
-- mkTranslateEnv.
let env :: [Binding]
env = resultBinding :
(zip vids argTys' <&> \(vid, (Unmunged nm, ty))
-> Binding vid (Unmunged nm) (Munged nm) ty)
nameVids :: Map Text VarId
nameVids = Map.fromList $ env <&> \(Binding vid (Unmunged nm) _ _)
-> (nm, vid)
vidTys :: Map VarId EType
vidTys = Map.fromList $ fmap (\(Binding vid _ _ ty) -> (vid, ty)) env
pure $ VarEnv envVidStart nameVids vidTys
mkTableEnv :: [Table] -> TableMap (ColumnMap EType)
mkTableEnv tables = TableMap $ Map.fromList $ foldr go [] tables
where
go Table { _tableName, _tableType } acc = case _tableType of
TC.UTModSpec{} -> acc
TC.UTSchema schema ->
let fields = _schFields schema
colMap = ColumnMap $ Map.fromList $ flip mapMaybe fields $
\(Pact.Arg argName ty _) ->
(ColumnName (T.unpack argName),) <$> maybeTranslateType ty
in (TableName (T.unpack _tableName), colMap):acc
-- | Get the set of checks for a step.
stepCheck
:: [Table]
-- ^ All tables defined in this module and imported by it
-> HM.HashMap Text EProp
-- ^ Constants defined in this module
-> HM.HashMap Text (DefinedProperty (Exp Info))
-- ^ Properties defined in this module
-> Pact.FunType TC.UserType
-- ^ The type of the pact this step is part of (we extract argument types
-- from this)
-> [Exp Info]
-- ^ The model
-> Except VerificationFailure (Either ParseFailure [Located Check])
stepCheck tables consts propDefs funTy model = do
VarEnv envVidStart nameVids vidTys <- makeFunctionEnv funTy
let getCheck = expToCheck (mkTableEnv tables) envVidStart nameVids vidTys
consts propDefs
checks <- withExcept ModuleParseFailure $ liftEither $ do
exps <- collectProperties model
for exps $ \(propTy, meta) -> case getCheck propTy meta of
Left err -> Left (meta, err)
Right good -> Right (Located (getInfo meta) good)
pure $ Right checks
-- | Get the set of checks for a function.
moduleFunCheck
:: [Table]
-- ^ All tables defined in this module and imported by it
-> [ModuleCheck]
-- ^ The set of properties that apply to all functions in the module
-> HM.HashMap Text EProp
-- ^ Constants defined in this module
-> HM.HashMap Text (DefinedProperty (Exp Info))
-- ^ Properties defined in this module
-> Pact.Term (Ref' (Pact.Term Pact.Name))
-- ^ The term under analysis
-> Pact.FunType TC.UserType
-- ^ The type of the term under analysis
-> Except VerificationFailure (Either ParseFailure [Located Check])
moduleFunCheck tables modCheckExps consts propDefs defTerm funTy = do
VarEnv envVidStart nameVids vidTys <- makeFunctionEnv funTy
checks <- case defTerm of
TDef def info ->
let model = _mModel (_dMeta def)
in case normalizeListLit model of
Nothing -> throwError $ ModuleParseFailure
-- reconstruct an `Exp Info` for this list
( Pact.EList (Pact.ListExp model Pact.Brackets info)
, "malformed list (inconsistent use of comma separators?)"
)
Just model' -> withExcept ModuleParseFailure $ liftEither $ do
exps <- collectProperties model'
let funName = _dDefName (_tDef defTerm)
applicableModuleChecks =
filter (applicableCheck funName) modCheckExps
<&> \(ModuleCheck ty prop _scope) -> (ty, prop)
for (applicableModuleChecks <> exps) $ \(propTy, meta) ->
case expToCheck (mkTableEnv tables) envVidStart nameVids vidTys
consts propDefs propTy meta of
Left err -> Left (meta, err)
Right good -> Right (Located (getInfo meta) good)
_ ->
throwError $ NonDefTerm defTerm
pure $ Right checks
-- | Remove the "property", "succeeds-when", or "fails-when" application from
-- every exp.
collectProperties
:: [Exp Info] -> Either ParseFailure [(Prop 'TyBool -> Check, Exp Info)]
collectProperties multiExp = for multiExp $ \case
ParenList [EAtom' "property", v] -> Right (PropertyHolds, v)
ParenList [EAtom' "succeeds-when", v] -> Right (SucceedsWhen, v)
ParenList [EAtom' "fails-when", v] -> Right (FailsWhen, v)
exp -> Left (exp, "expected an application of \"property\", \"succeeds-when\", or \"fails-when\"")
-- | Remove the "invariant" application from every exp
collectInvariants :: [Exp Info] -> Either ParseFailure [Exp Info]
collectInvariants multiExp = for multiExp $ \case
ParenList [EAtom' "invariant", v] -> Right v
exp -> Left (exp, "expected an application of \"invariant\"")
-- | Typecheck a 'Ref'. This is used to extract an @'AST' 'Node'@, which is
-- translated to either a term or property.
typecheck :: DynEnv -> Ref -> IO (Either CheckFailure (TopLevel Node))
typecheck de ref = do
(toplevel, tcState) <- TC.runTCState (mkTcState 0 False de) $ typecheckTopLevel ref
let failures = tcState ^. tcFailures
info = toplevelInfo toplevel
pure $ if Set.null failures
then Right toplevel
else Left $ CheckFailure info $ TypecheckFailure failures
-- | Extract constants by typechecking and translating to properties.
getConsts
:: DynEnv
-> HM.HashMap Text Ref
-> ExceptT VerificationFailure IO (HM.HashMap Text EProp)
getConsts de defconstRefs = do
(consts :: HM.HashMap Text (AST Node)) <- ifoldrM
(\name ref accum -> do
maybeConst <- lift $ typecheck de ref
case maybeConst of
Left checkFailure -> throwError $ ModuleCheckFailure checkFailure
Right (TopConst _info _qualifiedName _type val _doc)
-> pure $ accum & at name ?~ val
Right _
-> error "invariant failure: anything but a const is unexpected here"
)
HM.empty
defconstRefs
let constToProp :: ETerm -> Except VerificationFailure EProp
constToProp tm = case constantToProp tm of
Right prop -> pure prop
Left msg -> throwError $ FailedConstTranslation msg
translateNodeNoGraph'
= withExceptT translateToVerificationFailure . translateNodeNoGraph
hoist generalize $
traverseOf each (constToProp <=< translateNodeNoGraph') consts
-- | Get the set of property check results for steps. Note that we just check
-- properties of individual steps here. Invariants are checked in at the
-- defpact level.
getStepChecks
:: CheckEnv
-> HM.HashMap Text Ref
-> ExceptT VerificationFailure IO (HM.HashMap (Text, Int) [CheckResult])
getStepChecks env@(CheckEnv tables consts propDefs _ _ _ de) defpactRefs = do
(steps :: HM.HashMap (Text, Int)
((AST Node, [Named Node], Info), Pact.FunType TC.UserType))
<- ifoldrM
(\name ref accum -> do
maybeDef <- lift $ typecheck de ref
case maybeDef of
Left checkFailure -> throwError $ ModuleCheckFailure checkFailure
Right (TopFun (FDefun info _ _ Defpact funType args steps _) _meta)
-> pure $ ifoldl
(\i stepAccum step ->
stepAccum & at (name, i) ?~ ((step, args, info), funType))
accum
steps
Right _ -> error
"invariant failure: anything but a function is unexpected here"
)
HM.empty
defpactRefs
(stepChecks :: HM.HashMap (Text, Int)
((AST Node, [Named Node], Info), Either ParseFailure [Located Check]))
<- hoist generalize $ for steps $ \((step, args, info), pactType) ->
case step of
TC.Step _ _ exec _ _ model -> ((exec,args,info),) <$>
stepCheck tables consts propDefs pactType model
_ -> error
"invariant violation: anything but a step is unexpected in stepChecks"
stepChecks' <- case traverse sequence stepChecks of
Left errs -> throwError $ ModuleParseFailure errs
Right stepChecks' -> pure stepChecks'
lift $ fmap (fmap (nub . concat)) $ ifor stepChecks' $
\(name, _stepNum) ((node, args, info), checks) -> for checks $
verifyFunctionProperty env (FunData info args [node]) name CheckPactStep
-- | Get the set of property and invariant check results for functions (defun
-- and defpact)
getFunChecks
:: CheckEnv
-> HM.HashMap Text Ref
-> ExceptT VerificationFailure IO
( HM.HashMap Text [CheckResult]
, HM.HashMap Text (TableMap [CheckResult])
)
getFunChecks env@(CheckEnv tables consts propDefs moduleData _cs _g de) refs = do
ModelDecl _ checkExps <-
withExceptT ModuleParseFailure $ liftEither $
moduleModelDecl moduleData
(funTypes :: HM.HashMap Text
(Ref, TopLevel Node, Pact.FunType TC.UserType, CheckableType))
<- ifoldrM
(\name ref accum -> do
maybeFun <- lift $ typecheck de ref
case maybeFun of
Left checkFailure -> throwError $ ModuleCheckFailure checkFailure
Right topfun@(TopFun (FDefun _ _ _ defType funType _ _ _) _)
-> let checkType = case defType of
Defpact -> CheckDefpact
Defun -> CheckDefun
_ -> error
"invariant violation: only defpact / defun are allowed"
in pure $ accum & at name ?~ (ref, topfun, funType, checkType)
Right _ -> error
"invariant failure: anything but a function is unexpected here"
)
HM.empty
refs
(funChecks
:: HM.HashMap Text
((TopLevel Node, CheckableType), Either ParseFailure [Located Check]))
<- hoist generalize $ for funTypes $ \case
(Pact.Direct _, _, _, _) -> throwError InvalidDirectReference
(Pact.Ref defn, toplevel, userTy, checkType) -> ((toplevel,checkType),)
<$> moduleFunCheck tables checkExps consts propDefs defn userTy
-- check for parse failures in any of the checks
funChecks' <- case traverse sequence funChecks of
Left errs -> throwError $ ModuleParseFailure errs
Right funChecks' -> pure funChecks'
let invariantCheckable :: HM.HashMap Text (TopLevel Node, CheckableType)
invariantCheckable = fst <$> funChecks'
invariantChecks <- ifor invariantCheckable $ \name (toplevel, checkType) ->
case toplevel of
TopFun fun _ -> withExceptT ModuleCheckFailure $ ExceptT $
verifyFunctionInvariants env (mkFunInfo fun) name checkType
_ -> error "invariant violation: anything but a TopFun is unexpected in \
\invariantCheckable"
funChecks'' <- lift $ ifor funChecks' $ \name ((toplevel, checkType), checks)
-> case toplevel of
TopFun fun _ -> for checks $
verifyFunctionProperty env (mkFunInfo fun) name checkType
_ -> error
"invariant violation: anything but a TopFun is unexpected in funChecks"
pure (nub . concat <$> funChecks'', invariantChecks)
-- | Check that every property variable is in scope.
scopeCheckInterface
:: Set Text
-- ^ A set of table, definition and property names in scope
-> HM.HashMap Text Ref
-- ^ The set of refs to check
-> [ScopeError]
scopeCheckInterface globalNames refs = refs <&&> \case
Pact.Direct _ -> [ScopeInvalidDirectRef]
Pact.Ref defn -> case defn ^? tDef . dMeta . mModel of
Nothing -> []
Just model -> case normalizeListLit model of
Nothing ->
[ ScopeParseFailure
-- reconstruct an `Exp Info` for this list
( Pact.EList (Pact.ListExp model Pact.Brackets (defn ^. tInfo))
, "malformed list (inconsistent use of comma separators?)"
)
]
Just model' -> case collectProperties model' of
Left err -> [ScopeParseFailure err]
Right exps -> exps <&&> \(_propTy, meta) -> do
let args = fmap _aName $ defn ^. tDef . dFunType . ftArgs
nameEnv = Map.fromList $ ("result", 0) : zip args [1..]
genStart = fromIntegral $ length nameEnv
case evalStateT (runReaderT (expToPreProp meta) nameEnv) genStart of
Left err -> [ScopeParseFailure (meta, err)]
Right preTypedBody -> prePropGlobals preTypedBody <&&>
\globalName ->
if globalName `Set.notMember` globalNames
then [NotInScope globalName]
else []
where
(<&&>) :: Foldable t => t a -> (a -> [ScopeError]) -> [ScopeError]
(<&&>) = flip foldMap
moduleGovernance :: ModuleData Ref -> ExceptT VerificationFailure IO Governance
moduleGovernance moduleData = case _mdModule moduleData of
Pact.MDModule (Pact.Module {_mGovernance}) ->
case _gGovernance _mGovernance of
Left (Pact.KeySetName rn) ->
pure $ KsGovernance $ RegistryName rn
Right (Def {_dDefName=Pact.DefName dn,_dModule}) ->
pure $ CapGovernance $ mkCapName _dModule dn
Pact.MDInterface _ ->
throwError ExpectedConcreteModule
-- | Verifies properties on all functions, and that each function maintains all
-- invariants.
verifyModule
:: DynEnv
-> HM.HashMap ModuleName (ModuleData Ref) -- ^ all loaded modules
-> ModuleData Ref -- ^ the module we're verifying
-> IO (Either VerificationFailure ModuleChecks)
verifyModule de modules moduleData@(ModuleData modDef allRefs) = runExceptT $ do
let modRefs = moduleRefs moduleData
consts <- getConsts de $ modRefs ^. defconsts
tables <- moduleTables modules modRefs consts
let -- HM.unions is biased towards the start of the list. This module should
-- shadow the others. Note that load / shadow order of imported modules
-- is undefined and in particular not the same as their import order.
allModules = moduleData : HM.elems modules
allModuleModelDecls <- for allModules $ \modul ->
case moduleModelDecl modul of
Left err -> throwError $ ModuleParseFailure err
Right modelDecl -> pure modelDecl
let allModulePropDefs = fmap _moduleDefProperties allModuleModelDecls
-- how many times have these names been defined across all in-scope
-- modules
allModulePropNameDuplicates =
HM.keys
$ HM.filter (> (1 :: Int))
$ foldl (\acc k -> acc & at k %~ (Just . maybe 0 succ)) HM.empty
$ concatMap HM.keys allModulePropDefs
warnings = VerificationWarnings allModulePropNameDuplicates
propDefs :: HM.HashMap Text (DefinedProperty (Exp Info))
propDefs = HM.unions allModulePropDefs
case modDef of
-- If we're passed an interface there is no implementation to check so we
-- just check that every property variable referenced is in scope
Pact.MDInterface{} ->
let success = ModuleChecks HM.empty HM.empty HM.empty warnings
globalNames = Set.unions $ fmap Set.fromList
[ fmap _tableName tables
, HM.keys propDefs
, HM.keys allRefs
]
scopeErrors = scopeCheckInterface globalNames allRefs
in if length scopeErrors > 0
then throwError $ ScopeErrors scopeErrors
else pure success
-- If we're passed a module we actually check properties
Pact.MDModule{} -> do
let defunRefs, defpactRefs :: HM.HashMap Text Ref
ModuleRefs defunRefs defpactRefs _ _ = modRefs
caps <- moduleCapabilities de allModules
gov <- moduleGovernance moduleData
let checkEnv = CheckEnv tables consts propDefs moduleData caps gov de
-- Note that invariants are only checked at the defpact level, not in
-- individual steps.
(funChecks, invariantChecks)
<- getFunChecks checkEnv $ defunRefs <> defpactRefs
stepChecks <- getStepChecks checkEnv defpactRefs
pure $ ModuleChecks funChecks stepChecks invariantChecks warnings
-- | Produce errors/warnings from result.
renderVerifiedModule :: Either VerificationFailure ModuleChecks -> [RenderedOutput]
renderVerifiedModule = \case
Left (ModuleParseFailure failure) ->
[renderFatal $ describeParseFailure failure]
Left (ModuleCheckFailure checkFailure) -> describeCheckFailure checkFailure
Left (TypeTranslationFailure msg ty) ->
[renderFatal $ msg <> ": " <> tShow ty]
Left InvalidDirectReference ->
[renderFatal $ "Invalid Direct reference given to typechecker instead of Ref"]
Left ExpectedConcreteModule ->
[renderFatal $ "Expected concrete module but encountered an interface"]
Left (NonDefTerm term) ->
[renderFatal $ "Expected TDef Term but encountered: " <> tShow term]
Left (FailedConstTranslation msg) ->
[renderFatal $ T.pack msg]
Left (SchemalessTable info) ->
[renderFatal $ T.pack (renderInfo info) <>
"Verification requires all tables to have schemas"
]
Left (ModuleSpecInSchemaPosition mn) ->
[renderFatal $ "Found modref spec in schema position: " <> renderCompactText mn
]
Left (ScopeErrors errs) ->
renderFatal <$> ("Scope checking errors" : fmap describeScopeError errs)
Right (ModuleChecks propResults stepResults invariantResults warnings) ->
let propResults' = toListOf (traverse.each) propResults
stepResults' = toListOf (traverse.each) stepResults
invariantResults' = toListOf (traverse.traverse.each) invariantResults
allResults = propResults' <> stepResults' <> invariantResults'
in (concatMap describeCheckResult allResults) <>
(renderWarn <$> describeVerificationWarnings warnings)
-- | Verifies a one-off 'Check' for a function.
verifyCheck
:: DynEnv
-> ModuleData Ref -- ^ the module we're verifying
-> Text -- ^ the name of the function
-> Check -- ^ the check we're running
-> CheckableType
-> ExceptT VerificationFailure IO [CheckResult]
verifyCheck de moduleData funName check checkType = do
let info = dummyInfo
moduleName = moduleDefName $ moduleData ^. mdModule
modules = HM.fromList [(moduleName, moduleData)]
moduleFun :: ModuleData Ref -> Text -> Maybe Ref
moduleFun ModuleData{..} name = name `HM.lookup` _mdRefMap
modRefs = moduleRefs moduleData
caps <- moduleCapabilities de [moduleData]
consts <- getConsts de $ modRefs ^. defconsts
tables <- moduleTables modules modRefs consts
gov <- moduleGovernance moduleData
let checkEnv = CheckEnv tables HM.empty HM.empty moduleData caps gov de
case moduleFun moduleData funName of
Just funRef -> do
toplevel <- lift $ typecheck de funRef
case toplevel of
Left checkFailure -> throwError $ ModuleCheckFailure checkFailure
Right (TopFun fun _) -> ExceptT $ fmap Right $
verifyFunctionProperty checkEnv (mkFunInfo fun) funName checkType $
Located info check
Right _
-> error "invariant violation: verifyCheck called on non-function"
Nothing -> pure [Left $ CheckFailure info $ NotAFunction funName]
|
kadena-io/pact
|
src-tool/Pact/Analyze/Check.hs
|
bsd-3-clause
| 52,872 | 0 | 35 | 13,674 | 12,657 | 6,474 | 6,183 | -1 | -1 |
module Distributed.Gossip.IO (
runGossip,
waitGossip,
getMembers,
stopGossip,
Gossip
) where
import Distributed.Gossip.Data
import Distributed.Config
import Distributed.Network
import Network
import System.IO -- Handles, etc
import System.Time -- getClockTime
import System.Random -- randomRIO
import Control.Concurrent -- Mvars
import Data.List (nub, stripPrefix) -- nub, stripPrefix
import Data.String.Utils -- strip
import Control.Concurrent.Timer --repeatedTimer
import Control.Concurrent.Suspend.Lifted -- sDelay
import Control.Exception -- try
import Data.Map as Map
import Control.Monad (when, replicateM)
import Control.Arrow ((&&&))
-- | The Gossip data structure.
data Gossip = Gossip { listener :: ThreadId,
updater :: TimerIO,
flusher :: TimerIO,
log :: (MVar (Maybe Handle)),
alive :: (MVar Bool),
memberMap :: (MVar (Map ID Node))}
-- | A function that's run periodically. It updates our node (if we're in the membership map),
-- | marks the failed nodes, and sends out gossip.
-- | It also tries to join the designated contact node if there is one in the case that:
-- | 1) Our node isn't in the membership map
-- | 2) The contact's node isn't in the membership map
doUpdate :: Maybe ID -> Time -> MVar ID -> String -> MVar (Maybe Handle) -> MVar (Map ID Node) -> IO()
doUpdate contact tFail myIdMVar v6interface logMVar mvarMap = do
(TOD now _) <- getClockTime
id@(ID myHost myPort myTime) <- readMVar myIdMVar
let idInterface = ID (myHost ++ v6interface) myPort myTime
-- Heartbeat our own list
when (myTime /= 0) (modifyMVar mvarMap (\x -> return (updateNode x id now, ())))
-- Mark dead nodes
modifyMVar mvarMap (\x -> return (markFailed x now tFail, ()))
memberMap <- readMVar mvarMap
case contact of
Nothing -> return ()
Just contactId -> do
case findLiveAnytime memberMap contactId of
Nothing -> sendJoin contactId idInterface $ portFromWord myPort
Just node -> return ()
case findLiveAnytime memberMap id of
Nothing -> sendJoin contactId idInterface $ portFromWord myPort
Just node -> return ()
id <- readMVar myIdMVar
sendGossip memberMap id v6interface
-- Log Membership
writeLog logMVar $ "Members: " ++ show now ++ " | " ++ show (prettyMemberList memberMap)
-- | Multicast our membership map in gossip style. We only broadcast to nodes that aren't us,
-- | and we only send nodes marked alive.
sendGossip :: Map ID Node -> ID -> String -> IO()
sendGossip members myId@(ID myHost myPort myTime) v6interface = do
let myV6Id = ID (myHost ++ v6interface) myPort myTime
otherMembers = Map.delete myId members
notDead = filterDead members
hosts = Prelude.map (host &&& port) $ keys otherMembers
count = length hosts
numToSend = ceiling $ fromIntegral count / 2
chosenHosts <- replicateM numToSend $ randomRIO (0, count - 1)
mapM_ (\y -> let host = fst $ hosts !! y
port = portFromWord $ snd $ hosts !! y in
forkIO $ trySend host port myV6Id $ show notDead) $ nub chosenHosts
return ()
-- | Send a join message to the contact
sendJoin :: ID -> ID -> PortID -> IO ()
sendJoin (ID host port _) myId (PortNumber myPort) = trySend host (portFromWord port) myId $ "Join " ++ show myPort
-- | Begin listening for gossip. On unhandled exception, set the MVar so someone who cares can
-- | learn about the failure.
-- | (This should probably be implemented better. I'd like to put an exception in the MVar.)
listenForGossip :: MVar Bool -> MVar ID -> String -> MVar (Maybe Handle) -> MVar (Map ID Node) -> IO ()
listenForGossip alive myIDMVar v6interface logMVar memberMVar = do
(ID host port _) <- readMVar myIDMVar
sock <- listenSocket (host ++ v6interface) (portFromWord port)
writeLog logMVar $ "Listening on " ++ host ++ v6interface ++ ":" ++ show port
try $ waitForConnect sock myIDMVar logMVar memberMVar :: IO (Either SomeException ())
writeLog logMVar $ "Ending listening. Closing port."
sClose sock
putMVar alive False
-- | Wait for and accept a connection, then fork the handler.
waitForConnect :: Socket -> MVar ID -> MVar (Maybe Handle) -> MVar (Map ID Node) -> IO ()
waitForConnect sock myIDMVar logMVar mvarMap = do
connection <- acceptSocket sock
forkIO $ handleConnection connection myIDMVar logMVar mvarMap
waitForConnect sock myIDMVar logMVar mvarMap
-- | Handle a connection.
-- | The message is either a join, in which case we add the node to the membership map,
-- | or it's a membership map, and we merge our map with the received one.
-- | We also update our id MVar with the latest node representing us in the map.
handleConnection :: (Handle, HostName, PortNumber) -> MVar ID -> MVar (Maybe Handle) -> MVar (Map ID Node) -> IO ()
handleConnection (handle, host, port) myIDMVar logMVar memberMVar = do
message <- hGetContents handle
(TOD time _) <- getClockTime
let strippedmsg = strip message
case stripPrefix "Join " strippedmsg of
Just portStr ->
modifyMVar memberMVar (\x -> return (updateNode x (ID host (fromIntegral $ read portStr) time) time, ()))
Nothing ->
case reads strippedmsg of
[(nodemap,"")] -> do
modifyMVar memberMVar (\x -> return (gossipMerge x nodemap time, ()))
memberMap <- readMVar memberMVar
myId <- readMVar myIDMVar
let myNode = findLiveAnytime memberMap myId
case myNode of
Nothing -> return ()
Just (Node host port time _ _ _) -> modifyMVar myIDMVar (\x -> return (ID host port time, ()))
_ -> writeLog logMVar $ "Failed to parse message from " ++ host ++ ":" ++ show port
hClose handle
-- | A function that accepts a config file path, reads the configuration, and begins gossiping
-- | based on that configuration. It currently returns the ThreadID of the listener, the
-- | TimerIO of the periodic update function, and the MVar that will be filled if the
-- | listener stops for some reason.
-- | TODO: If the listener stops, halt other portions of the gossip protocol
runGossip :: FilePath -> IO Gossip
runGossip filePath = do
config <- getConfigOrFail filePath
let contactPort = configGetValue config "gossip" "contactport"
contactIP = configGetValue config "gossip" "contactip"
ipv6Interface = case configGetValue config "gossip" "ipv6_interface" of
Nothing -> ""
Just interface -> '%':interface
bindIP = configGetCrucial config "gossip" "bindip"
bindPort = configGetCrucial config "gossip" "bindport"
tFail = configGetCrucial config "gossip" "tfail"
tGossip = configGetCrucial config "gossip" "tgossip"
logFile = configGetValue config "gossip" "log_file"
tflush = case configGetValue config "gossip" "tflush_log" of
Nothing -> 3
Just seconds -> seconds
noBuffer = case configGetValue config "gossip" "no_buffer_log" of
Nothing -> False
Just bool -> bool
logMVar <- case logFile of
Nothing -> newMVar Nothing
Just filepath -> if noBuffer
then (openFile filepath AppendMode) >>=
(\x -> do hSetBuffering x NoBuffering; return x) >>=
newMVar . Just
else (openFile filepath AppendMode) >>=
newMVar . Just
(contact, myIdMVar) <-
case (contactIP, contactPort) of
(Just ip, Just port) -> do
writeLog logMVar "This node is NOT the contact node."
let contact = Just (ID ip port 0)
myIdMVar <- newMVar (ID bindIP bindPort 0)
return (contact, myIdMVar)
(_, _) -> do
writeLog logMVar "This node IS the contact node."
(TOD now _) <- getClockTime
myIdMVar <- newMVar (ID bindIP bindPort now)
return (Nothing, myIdMVar)
memberMVar <- newMVar Map.empty
alive <- newEmptyMVar
listener <- forkIO $ listenForGossip alive myIdMVar ipv6Interface logMVar memberMVar
tmr <- repeatedTimer (doUpdate contact tFail myIdMVar ipv6Interface logMVar memberMVar) $ sDelay tGossip
flusher <- repeatedTimer (modifyMVar logMVar flushLog) $ sDelay tflush
return $ Gossip listener tmr flusher logMVar alive memberMVar
-- | A function to stop the gossip protocol.
stopGossip :: Gossip -> IO ()
stopGossip (Gossip listen timer flusher log alive members) = do
killThread listen
stopTimer timer
stopTimer flusher
takeMVar alive
modifyMVar log (\l -> case l of
Nothing -> return (Nothing, ())
Just handle -> do
hFlush handle
hClose handle
return (Nothing,()))
modifyMVar members (\_ -> return (Map.empty, ()))
putMVar alive False
return ()
-- | Return a list of hostnames that are currently alive in the cluster
getMembers :: Gossip -> IO [HostName]
getMembers (Gossip _ _ _ _ _ members) = do
membermap <- readMVar members
return $ Prelude.map (\(ID host _ _) -> host) $ keys $ filterDead membermap
-- | Wait for gossip to die (something to stop it)
waitGossip :: Gossip -> IO ()
waitGossip (Gossip _ _ _ _ alive _) = do
takeMVar alive
putMVar alive False
return ()
-- | A function that allows asynchronous writes to a log file.
writeLog :: MVar (Maybe Handle) -> String -> IO ()
writeLog logMVar text = do
modifyMVar logMVar (\mHandle -> case mHandle of
Nothing -> return (mHandle, ())
Just handle -> do hPutStrLn handle text
return (mHandle, ()))
flushLog :: Maybe Handle -> IO (Maybe Handle,())
flushLog Nothing = return (Nothing,())
flushLog (Just handle) = do
hFlush handle
return (Just handle, ())
|
knusbaum/Distributed-Haskell
|
Distributed/Gossip/IO.hs
|
bsd-3-clause
| 10,090 | 0 | 23 | 2,685 | 2,788 | 1,370 | 1,418 | 177 | 7 |
{-
SockeyeParser.hs: Parser for Sockeye
Part of Sockeye
Copyright (c) 2018, ETH Zurich.
All rights reserved.
This file is distributed under the terms in the attached LICENSE file.
If you do not find this file, copies can be found by writing to:
ETH Zurich D-INFK, CAB F.78, Universitaetstrasse 6, CH-8092 Zurich,
Attn: Systems Group.
-}
module SockeyeParser
( parseSockeye ) where
import System.FilePath
import Text.Parsec
import Text.Parsec.Expr
import qualified Text.Parsec.Token as P
import Text.Parsec.Language (emptyDef)
import SockeyeASTMeta
import qualified SockeyeParserAST as AST
{- Parser main function -}
parseSockeye :: String -> String -> Either ParseError AST.SockeyeFile
parseSockeye = parse sockeyeFile
data TopLevel
= ModuleDecl AST.Module
| TypeDecl AST.NamedType
data ModuleBody
= ConstDecl AST.NamedConstant
| InstDecl AST.InstanceDeclaration
| NodeDecl AST.NodeDeclaration
| Def AST.Definition
{- Sockeye parsing -}
sockeyeFile = do
whiteSpace
pos <- getPositionMeta
imports <- many sockeyeImport
(modules, types) <- do
stmts <- many $ choice [moduleDecl, typeDecl]
return $ foldr splitDecl ([], []) stmts
eof
return AST.SockeyeFile
{ AST.sockeyeFileMeta = pos
, AST.imports = imports
, AST.modules = modules
, AST.types = types
}
where
moduleDecl = fmap ModuleDecl sockeyeModule
typeDecl = fmap TypeDecl namedType
splitDecl (ModuleDecl m) (ms, ts) = (m:ms, ts)
splitDecl (TypeDecl t) (ms, ts) = (ms, t:ts)
sockeyeImport = do
pos <- getPositionMeta
reserved "import"
path <- many1 (alphaNum <|> char '_' <|> char '-' <|> char '/') <* whiteSpace
explImports <- optionMaybe (parens $ commaSep1 importAlias)
return AST.Import
{ AST.importMeta = pos
, AST.importFile = path <.> "soc"
, AST.explImports = explImports
}
<?> "import"
importAlias = do
pos <- getPositionMeta
origName <- identString <?> "module or type to import"
alias <- option origName importAlias
return AST.ImportAlias
{ AST.importAliasMeta = pos
, AST.originalName = origName
, AST.importAlias = alias
}
where
importAlias = do
reserved "as"
identString <?> "import alias"
sockeyeModule = do
pos <- getPositionMeta
extern <- option False moduleExtern
reserved "module"
name <- moduleName
params <- option [] (parens $ commaSep moduleParam)
(consts, insts, nodes, defs) <- braces moduleBody
return AST.Module
{ AST.moduleMeta = pos
, AST.moduleExtern = extern
, AST.moduleName = name
, AST.parameters = params
, AST.constants = consts
, AST.instDecls = insts
, AST.nodeDecls = nodes
, AST.definitions = defs
}
<?> "module specification"
moduleExtern = do
reserved "extern"
return True
moduleParam = do
pos <- getPositionMeta
range <- parens naturalSet <?> "parameter range"
paramName <- parameterName
return AST.ModuleParameter
{ AST.paramMeta = pos
, AST.paramName = paramName
, AST.paramRange = range
}
moduleBody = do
body <- many $ choice [constDecl, instDecl, nodeDecl, def]
return $ foldr splitBody ([], [], [], []) body
where
constDecl = fmap ConstDecl namedConstant
instDecl = fmap InstDecl instanceDeclaration
nodeDecl = fmap NodeDecl nodeDeclaration
def = fmap Def definition
splitBody (ConstDecl c) (cs, is, ns, ds) = (c:cs, is, ns, ds)
splitBody (InstDecl i) (cs, is, ns, ds) = (cs, i:is, ns, ds)
splitBody (NodeDecl n) (cs, is, ns, ds) = (cs, is, n:ns, ds)
splitBody (Def d) (cs, is, ns, ds) = (cs, is, ns, d:ds)
instanceDeclaration = do
pos <- getPositionMeta
reserved "instance"
name <- identifierName
size <- optionMaybe arraySize
reserved "of"
modName <- moduleName
return AST.InstanceDeclaration
{ AST.instDeclMeta = pos
, AST.instName = name
, AST.instModName = modName
, AST.instArrSize = size
}
<?> "instance declaration"
nodeDeclaration = do
pos <- getPositionMeta
kind <- nodeKind
t <- nodeType
name <- identifierName
size <- optionMaybe arraySize
return AST.NodeDeclaration
{ AST.nodeDeclMeta = pos
, AST.nodeKind = kind
, AST.nodeType = t
, AST.nodeName = name
, AST.nodeArrSize = size
}
<?> "node declaration"
nodeKind = option AST.InternalNode $ choice [input, output]
where
input = do
reserved "input"
return AST.InputPort
output = do
reserved "output"
return AST.OutputPort
nodeType = do
pos <- getPositionMeta
originDomain <- domain
originType <- edgeType
(targetDomain, targetType) <- option (originDomain, Nothing) $ do
reserved "to"
d <- domain
t <- optionMaybe edgeType
return (d, t)
return AST.NodeType
{ AST.nodeTypeMeta = pos
, AST.originDomain = originDomain
, AST.originType = originType
, AST.targetDomain = targetDomain
, AST.targetType = targetType
}
domain = choice [memory, intr, power, clock] <?> "node domain"
where
memory = do
reserved "memory"
return AST.Memory
intr = do
reserved "intr"
return AST.Interrupt
power = do
reserved "power"
return AST.Power
clock = do
reserved "clock"
return AST.Clock
edgeType = choice [literal, named]
where
literal = do
pos <- getPositionMeta
addrType <- addressType
return $ AST.TypeLiteral pos addrType
named = do
pos <- getPositionMeta
name <- parens typeName
return $ AST.TypeName pos name
<?> "(<type name>)"
definition = choice [forall, def]
where
def = do
receiver <- unqualifiedRef
choice
[ accepts receiver
, maps receiver
, converts receiver
, overlays receiver
, blockoverlays receiver
, instantiates receiver
, binds receiver
]
accepts node = do
pos <- getPositionMeta
reserved "accepts"
blocks <- brackets $ semiSep addressBlock
return $ AST.Accepts pos node blocks
maps node = do
pos <- getPositionMeta
reserved "maps"
maps <- brackets $ semiSep mapSpec
return $ AST.Maps pos node maps
mapSpec = do
pos <- getPositionMeta
addr <- addressBlock
reserved "to"
targets <- commaSep1 mapTarget
return $ AST.MapSpec pos addr targets
where
mapTarget = do
pos <- getPositionMeta
node <- nodeReference
reserved "at"
addr <- addressBlock
return $ AST.MapTarget pos node addr
converts node = do
pos <- getPositionMeta
reserved "converts"
converts <- brackets $ semiSep convertSpec
return $ AST.Converts pos node converts
convertSpec = mapSpec
blockoverlays node = do
pos <- getPositionMeta
reserved "blockoverlays"
overlay <- nodeReference
reserved "bits"
blocksizes <- parens $ commaSep1 natural
return $ AST.BlockOverlays pos node overlay blocksizes
overlays node = do
pos <- getPositionMeta
reserved "overlays"
overlay <- nodeReference
return $ AST.Overlays pos node overlay
instantiates inst = do
pos <- getPositionMeta
reserved "instantiates"
modName <- moduleName
args <- option [] (parens $ commaSep naturalExpr)
return AST.Instantiates
{ AST.defMeta = pos
, AST.inst = inst
, AST.instModule = modName
, AST.arguments = args
}
binds inst = do
pos <- getPositionMeta
reserved "binds"
bindings <- brackets $ semiSep portBinding
return $ AST.Binds pos inst bindings
where
portBinding = do
pos <- getPositionMeta
port <- unqualifiedRef
reserved "to"
node <- nodeReference
return $ AST.PortBinding pos port node
forall = do
pos <- getPositionMeta
reserved "forall"
var <- variableName
reserved "in"
range <- parens naturalSet
body <- braces $ many definition
return AST.Forall
{ AST.defMeta = pos
, AST.boundVarName = var
, AST.varRange = range
, AST.quantifierBody = body
}
unqualifiedRef = do
pos <- getPositionMeta
name <- identifierName
index <- optionMaybe arrayIndex
return AST.UnqualifiedRef
{ AST.refMeta = pos
, AST.refName = name
, AST.refIndex = index
}
nodeReference = do
pos <- getPositionMeta
ref1 <- unqualifiedRef
ref2 <- optionMaybe $ (reservedOp "." >> unqualifiedRef)
return $ maybe (AST.InternalNodeRef pos ref1) (AST.InputPortRef pos ref1) ref2
namedType = do
pos <- getPositionMeta
reserved "type"
name <- typeName
addrType <- addressType
return $ AST.NamedType pos name addrType
<?> "named type"
namedConstant = do
pos <- getPositionMeta
reserved "const"
name <- constName
value <- natural
return $ AST.NamedConstant pos name value
<?> "named constant"
addressType = do
pos <- getPositionMeta
addrType <- parens $ semiSep1 naturalSet
return $ AST.AddressType pos addrType
<?> "address type literal"
address = do
pos <- getPositionMeta
addr <- parens $ semiSep1 wildcardSet
return $ AST.Address pos addr
<?> "address tuple"
addressBlock = do
pos <- getPositionMeta
addr <- address
props <- option AST.True propertyExpr
return $ AST.AddressBlock pos addr props
arraySize = do
pos <- getPositionMeta
size <- brackets $ semiSep1 naturalSet
return $ AST.ArraySize pos size
<?> "array size"
arrayIndex = do
pos <- getPositionMeta
index <- brackets $ semiSep1 wildcardSet
return $ AST.ArrayIndex pos index
<?> "array index"
naturalSet = do
pos <- getPositionMeta
ranges <- commaSep1 naturalRange
return $ AST.NaturalSet pos ranges
<?> "set of naturals"
wildcardSet = choice [wildcard, explicit]
where
explicit = do
pos <- getPositionMeta
set <- naturalSet
return $ AST.ExplicitSet pos set
wildcard = do
pos <- getPositionMeta
reservedOp "*"
return $ AST.Wildcard pos
naturalRange = do
pos <- getPositionMeta
base <- naturalExpr
choice [bits pos base, limit pos base, singleton pos base]
<?> "range of naturals"
where
bits pos base = do
reserved "bits"
bits <- naturalExpr
return $ AST.BitsRange pos base bits
limit pos base = do
reserved "to"
limit <- naturalExpr
return $ AST.LimitRange pos base limit
singleton pos base = return $ AST.SingletonRange pos base
naturalExpr = buildExpressionParser opTable term <?> "arithmetic expression"
where
term = parens naturalExpr <|> var <|> lit
opTable =
[ [ Postfix slice ]
, [ Infix mult AssocLeft ]
, [ Infix add AssocLeft, Infix sub AssocLeft ]
, [ Infix concat AssocLeft ]
]
var = do
pos <- getPositionMeta
name <- variableName
return $ AST.Variable pos name
lit = do
pos <- getPositionMeta
value <- natural
return $ AST.Literal pos value
slice = do
pos <- getPositionMeta
range <- brackets naturalRange
return $ flip (AST.Slice pos) range
mult = do
pos <- getPositionMeta
reservedOp "*"
return $ AST.Multiplication pos
add = do
pos <- getPositionMeta
reservedOp "+"
return $ AST.Addition pos
sub = do
pos <- getPositionMeta
reservedOp "-"
return $ AST.Subtraction pos
concat = do
pos <- getPositionMeta
reservedOp "++"
return $ AST.Concat pos
propertyExpr = buildExpressionParser opTable term <?> "property expression"
where
term = parens propertyExpr <|> prop
opTable =
[ [ Prefix not ]
, [ Infix and AssocLeft, Infix or AssocLeft ]
]
prop = do
pos <- getPositionMeta
name <- propertyName
return $ AST.Property pos name
not = do
pos <- getPositionMeta
reservedOp "!"
return $ AST.Not pos
and = do
pos <- getPositionMeta
reservedOp "&&"
return $ AST.And pos
or = do
pos <- getPositionMeta
reservedOp "||"
return $ AST.Or pos
{- Helper functions -}
lexer = P.makeTokenParser (
emptyDef {
{- List of reserved names -}
P.reservedNames =
[ "import", "as"
, "extern"
, "module"
, "input", "output"
, "type", "const"
, "memory", "intr", "power", "clock", "instance"
, "of"
, "forall", "in"
, "accepts", "maps", "converts", "overlays", "blockoverlays"
, "instantiates", "binds"
, "to", "at"
, "bits"
],
{- List of operators -}
P.reservedOpNames =
[ "+", "-", "*", "/", "++"
, "!", "&&", "||"
, "."
],
{- Valid identifiers -}
P.identStart = letter,
P.identLetter = alphaNum <|> char '_',
{- comment start and end -}
P.commentStart = "/*",
P.commentEnd = "*/",
P.commentLine = "//",
P.nestedComments = False,
{- Sockeye is case sensitive -}
P.caseSensitive = True
})
whiteSpace = P.whiteSpace lexer
reserved = P.reserved lexer
reservedOp = P.reservedOp lexer
parens = P.parens lexer
brackets = P.brackets lexer
braces = P.braces lexer
commaSep = P.commaSep lexer
commaSep1 = P.commaSep1 lexer
semiSep = P.semiSep lexer
semiSep1 = P.semiSep1 lexer
identString = P.identifier lexer
natural = P.natural lexer
typeName = identString <?> "type name"
constName = identString <?> "constant name"
moduleName = identString <?> "module name"
parameterName = identString <?> "parameter name"
variableName = identString <?> "variable name"
propertyName = identString <?> "property name"
identifierName = identString <?> "identifier"
getPositionMeta = fmap ParserMeta getPosition
|
kishoredbn/barrelfish
|
tools/sockeye/SockeyeParser.hs
|
mit
| 15,135 | 0 | 15 | 5,042 | 4,038 | 2,006 | 2,032 | 433 | 4 |
main :: [()]
main = [() | True <- []]
|
roberth/uu-helium
|
test/staticwarnings/Generator4.hs
|
gpl-3.0
| 39 | 0 | 8 | 11 | 31 | 17 | 14 | 2 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-CS">
<title>Tips and Tricks | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/tips/src/main/javahelp/org/zaproxy/zap/extension/tips/resources/help_sr_CS/helpset_sr_CS.hs
|
apache-2.0
| 977 | 80 | 66 | 161 | 417 | 211 | 206 | -1 | -1 |
-- CIS 194 Homework 2
module Week2.Log where
import Control.Applicative
data MessageType = Info
| Warning
| Error Int
deriving (Show, Eq)
type TimeStamp = Int
data LogMessage = LogMessage MessageType TimeStamp String
| Unknown String
deriving (Show, Eq)
data MessageTree = Leaf
| Node MessageTree LogMessage MessageTree
deriving (Show, Eq)
-- | @testParse p n f@ tests the log file parser @p@ by running it
-- on the first @n@ lines of file @f@.
testParse :: (String -> [LogMessage])
-> Int
-> FilePath
-> IO [LogMessage]
testParse parse n file = take n . parse <$> readFile file
-- | @testWhatWentWrong p w f@ tests the log file parser @p@ and
-- warning message extractor @w@ by running them on the log file
-- @f@.
testWhatWentWrong :: (String -> [LogMessage])
-> ([LogMessage] -> [String])
-> FilePath
-> IO [String]
testWhatWentWrong parse whatWentWrong file
= whatWentWrong . parse <$> readFile file
|
gneuvill/haskell-exos
|
src/Week2/Log.hs
|
bsd-3-clause
| 1,079 | 0 | 9 | 324 | 233 | 130 | 103 | 24 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Main
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Entry point to the default cabal-install front-end.
-----------------------------------------------------------------------------
module Main (main) where
import Distribution.Client.Setup
( GlobalFlags(..), globalCommand, globalRepos
, ConfigFlags(..)
, ConfigExFlags(..), defaultConfigExFlags, configureExCommand
, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..)
, buildCommand, replCommand, testCommand, benchmarkCommand
, InstallFlags(..), defaultInstallFlags
, installCommand, upgradeCommand
, FetchFlags(..), fetchCommand
, FreezeFlags(..), freezeCommand
, GetFlags(..), getCommand, unpackCommand
, checkCommand
, formatCommand
, updateCommand
, ListFlags(..), listCommand
, InfoFlags(..), infoCommand
, UploadFlags(..), uploadCommand
, ReportFlags(..), reportCommand
, runCommand
, InitFlags(initVerbosity), initCommand
, SDistFlags(..), SDistExFlags(..), sdistCommand
, Win32SelfUpgradeFlags(..), win32SelfUpgradeCommand
, SandboxFlags(..), sandboxCommand
, ExecFlags(..), execCommand
, UserConfigFlags(..), userConfigCommand
, reportCommand
)
import Distribution.Simple.Setup
( HaddockFlags(..), haddockCommand, defaultHaddockFlags
, HscolourFlags(..), hscolourCommand
, ReplFlags(..)
, CopyFlags(..), copyCommand
, RegisterFlags(..), registerCommand
, CleanFlags(..), cleanCommand
, TestFlags(..), BenchmarkFlags(..)
, Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe, toFlag
, configAbsolutePaths
)
import Distribution.Client.SetupWrapper
( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions )
import Distribution.Client.Config
( SavedConfig(..), loadConfig, defaultConfigFile, userConfigDiff
, userConfigUpdate )
import Distribution.Client.Targets
( readUserTargets )
import qualified Distribution.Client.List as List
( list, info )
import Distribution.Client.Install (install)
import Distribution.Client.Configure (configure)
import Distribution.Client.Update (update)
import Distribution.Client.Exec (exec)
import Distribution.Client.Fetch (fetch)
import Distribution.Client.Freeze (freeze)
import Distribution.Client.Check as Check (check)
--import Distribution.Client.Clean (clean)
import Distribution.Client.Upload as Upload (upload, check, report)
import Distribution.Client.Run (run, splitRunArgs)
import Distribution.Client.SrcDist (sdist)
import Distribution.Client.Get (get)
import Distribution.Client.Sandbox (sandboxInit
,sandboxAddSource
,sandboxDelete
,sandboxDeleteSource
,sandboxListSources
,sandboxHcPkg
,dumpPackageEnvironment
,getSandboxConfigFilePath
,loadConfigOrSandboxConfig
,initPackageDBIfNeeded
,maybeWithSandboxDirOnSearchPath
,maybeWithSandboxPackageInfo
,WereDepsReinstalled(..)
,maybeReinstallAddSourceDeps
,tryGetIndexFilePath
,sandboxBuildDir
,updateSandboxConfigFileFlag
,configCompilerAux'
,configPackageDB')
import Distribution.Client.Sandbox.PackageEnvironment
(setPackageDB
,userPackageEnvironmentFile)
import Distribution.Client.Sandbox.Timestamp (maybeAddCompilerTimestampRecord)
import Distribution.Client.Sandbox.Types (UseSandbox(..), whenUsingSandbox)
import Distribution.Client.Init (initCabal)
import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade
import Distribution.Client.Utils (determineNumJobs
#if defined(mingw32_HOST_OS)
,relaxEncodingErrors
#endif
,existsAndIsMoreRecentThan)
import Distribution.PackageDescription
( Executable(..), benchmarkName, benchmarkBuildInfo, testName
, testBuildInfo, buildable )
import Distribution.PackageDescription.Parse
( readPackageDescription )
import Distribution.PackageDescription.PrettyPrint
( writeGenericPackageDescription )
import Distribution.Simple.Build
( startInterpreter )
import Distribution.Simple.Command
( CommandParse(..), CommandUI(..), Command
, commandsRun, commandAddAction, hiddenCommand )
import Distribution.Simple.Compiler
( Compiler(..) )
import Distribution.Simple.Configure
( checkPersistBuildConfigOutdated, configCompilerAuxEx
, ConfigStateFileError(..), localBuildInfoFile
, getPersistBuildConfig, tryGetPersistBuildConfig )
import qualified Distribution.Simple.LocalBuildInfo as LBI
import Distribution.Simple.Program (defaultProgramConfiguration
,configureAllKnownPrograms)
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Utils
( cabalVersion, die, notice, info, topHandler
, findPackageDesc, tryFindPackageDesc )
import Distribution.Text
( display )
import Distribution.Verbosity as Verbosity
( Verbosity, normal )
import Distribution.Version
( Version(..), orLaterVersion )
import qualified Paths_cabal_install (version)
import System.Environment (getArgs, getProgName)
import System.Exit (exitFailure)
import System.FilePath (splitExtension, takeExtension)
import System.IO ( BufferMode(LineBuffering), hSetBuffering
#ifdef mingw32_HOST_OS
, stderr
#endif
, stdout )
import System.Directory (doesFileExist, getCurrentDirectory)
import Data.List (intercalate)
import Data.Maybe (mapMaybe)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(..))
#endif
import Control.Monad (when, unless)
-- | Entry point
--
main :: IO ()
main = do
-- Enable line buffering so that we can get fast feedback even when piped.
-- This is especially important for CI and build systems.
hSetBuffering stdout LineBuffering
-- The default locale encoding for Windows CLI is not UTF-8 and printing
-- Unicode characters to it will fail unless we relax the handling of encoding
-- errors when writing to stderr and stdout.
#ifdef mingw32_HOST_OS
relaxEncodingErrors stdout
relaxEncodingErrors stderr
#endif
getArgs >>= mainWorker
mainWorker :: [String] -> IO ()
mainWorker args = topHandler $
case commandsRun (globalCommand commands) commands args of
CommandHelp help -> printGlobalHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo (globalFlags, commandParse) ->
case commandParse of
_ | fromFlagOrDefault False (globalVersion globalFlags)
-> printVersion
| fromFlagOrDefault False (globalNumericVersion globalFlags)
-> printNumericVersion
CommandHelp help -> printCommandHelp help
CommandList opts -> printOptionsList opts
CommandErrors errs -> printErrors errs
CommandReadyToGo action -> do
globalFlags' <- updateSandboxConfigFileFlag globalFlags
action globalFlags'
where
printCommandHelp help = do
pname <- getProgName
putStr (help pname)
printGlobalHelp help = do
pname <- getProgName
configFile <- defaultConfigFile
putStr (help pname)
putStr $ "\nYou can edit the cabal configuration file to set defaults:\n"
++ " " ++ configFile ++ "\n"
exists <- doesFileExist configFile
when (not exists) $
putStrLn $ "This file will be generated with sensible "
++ "defaults if you run 'cabal update'."
printOptionsList = putStr . unlines
printErrors errs = die $ intercalate "\n" errs
printNumericVersion = putStrLn $ display Paths_cabal_install.version
printVersion = putStrLn $ "cabal-install version "
++ display Paths_cabal_install.version
++ "\nusing version "
++ display cabalVersion
++ " of the Cabal library "
commands =
[installCommand `commandAddAction` installAction
,updateCommand `commandAddAction` updateAction
,listCommand `commandAddAction` listAction
,infoCommand `commandAddAction` infoAction
,fetchCommand `commandAddAction` fetchAction
,freezeCommand `commandAddAction` freezeAction
,getCommand `commandAddAction` getAction
,hiddenCommand $
unpackCommand `commandAddAction` unpackAction
,checkCommand `commandAddAction` checkAction
,sdistCommand `commandAddAction` sdistAction
,uploadCommand `commandAddAction` uploadAction
,reportCommand `commandAddAction` reportAction
,runCommand `commandAddAction` runAction
,initCommand `commandAddAction` initAction
,configureExCommand `commandAddAction` configureAction
,buildCommand `commandAddAction` buildAction
,replCommand `commandAddAction` replAction
,sandboxCommand `commandAddAction` sandboxAction
,haddockCommand `commandAddAction` haddockAction
,execCommand `commandAddAction` execAction
,userConfigCommand `commandAddAction` userConfigAction
,cleanCommand `commandAddAction` cleanAction
,wrapperAction copyCommand
copyVerbosity copyDistPref
,wrapperAction hscolourCommand
hscolourVerbosity hscolourDistPref
,wrapperAction registerCommand
regVerbosity regDistPref
,testCommand `commandAddAction` testAction
,benchmarkCommand `commandAddAction` benchmarkAction
,hiddenCommand $
formatCommand `commandAddAction` formatAction
,hiddenCommand $
upgradeCommand `commandAddAction` upgradeAction
,hiddenCommand $
win32SelfUpgradeCommand`commandAddAction` win32SelfUpgradeAction
]
wrapperAction :: Monoid flags
=> CommandUI flags
-> (flags -> Flag Verbosity)
-> (flags -> Flag String)
-> Command (GlobalFlags -> IO ())
wrapperAction command verbosityFlag distPrefFlag =
commandAddAction command
{ commandDefaultFlags = mempty } $ \flags extraArgs _globalFlags -> do
let verbosity = fromFlagOrDefault normal (verbosityFlag flags)
setupScriptOptions = defaultSetupScriptOptions {
useDistPref = fromFlagOrDefault
(useDistPref defaultSetupScriptOptions)
(distPrefFlag flags)
}
setupWrapper verbosity setupScriptOptions Nothing
command (const flags) extraArgs
configureAction :: (ConfigFlags, ConfigExFlags)
-> [String] -> GlobalFlags -> IO ()
configureAction (configFlags, configExFlags) extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (configVerbosity configFlags)
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity
globalFlags (configUserInstall configFlags)
let configFlags' = savedConfigureFlags config `mappend` configFlags
configExFlags' = savedConfigureExFlags config `mappend` configExFlags
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAuxEx configFlags'
-- If we're working inside a sandbox and the user has set the -w option, we
-- may need to create a sandbox-local package DB for this compiler and add a
-- timestamp record for this compiler to the timestamp file.
let configFlags'' = case useSandbox of
NoSandbox -> configFlags'
(UseSandbox sandboxDir) -> setPackageDB sandboxDir
comp platform configFlags'
whenUsingSandbox useSandbox $ \sandboxDir -> do
initPackageDBIfNeeded verbosity configFlags'' comp conf
-- NOTE: We do not write the new sandbox package DB location to
-- 'cabal.sandbox.config' here because 'configure -w' must not affect
-- subsequent 'install' (for UI compatibility with non-sandboxed mode).
indexFile <- tryGetIndexFilePath config
maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile
(compilerId comp) platform
maybeWithSandboxDirOnSearchPath useSandbox $
configure verbosity
(configPackageDB' configFlags'')
(globalRepos globalFlags')
comp platform conf configFlags'' configExFlags' extraArgs
buildAction :: (BuildFlags, BuildExFlags) -> [String] -> GlobalFlags -> IO ()
buildAction (buildFlags, buildExFlags) extraArgs globalFlags = do
let distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions)
(buildDistPref buildFlags)
verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags)
noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- Calls 'configureAction' to do the real work, so nothing special has to be
-- done to support sandboxes.
(useSandbox, config) <- reconfigure verbosity distPref
mempty [] globalFlags noAddSource
(buildNumJobs buildFlags) (const Nothing)
maybeWithSandboxDirOnSearchPath useSandbox $
build verbosity config distPref buildFlags extraArgs
-- | Actually do the work of building the package. This is separate from
-- 'buildAction' so that 'testAction' and 'benchmarkAction' do not invoke
-- 'reconfigure' twice.
build :: Verbosity -> SavedConfig -> FilePath -> BuildFlags -> [String] -> IO ()
build verbosity config distPref buildFlags extraArgs =
setupWrapper verbosity setupOptions Nothing
(Cabal.buildCommand progConf) mkBuildFlags extraArgs
where
progConf = defaultProgramConfiguration
setupOptions = defaultSetupScriptOptions { useDistPref = distPref }
mkBuildFlags version = filterBuildFlags version config buildFlags'
buildFlags' = buildFlags
{ buildVerbosity = toFlag verbosity
, buildDistPref = toFlag distPref
}
-- | Make sure that we don't pass new flags to setup scripts compiled against
-- old versions of Cabal.
filterBuildFlags :: Version -> SavedConfig -> BuildFlags -> BuildFlags
filterBuildFlags version config buildFlags
| version >= Version [1,19,1] [] = buildFlags_latest
-- Cabal < 1.19.1 doesn't support 'build -j'.
| otherwise = buildFlags_pre_1_19_1
where
buildFlags_pre_1_19_1 = buildFlags {
buildNumJobs = NoFlag
}
buildFlags_latest = buildFlags {
-- Take the 'jobs' setting '~/.cabal/config' into account.
buildNumJobs = Flag . Just . determineNumJobs $
(numJobsConfigFlag `mappend` numJobsCmdLineFlag)
}
numJobsConfigFlag = installNumJobs . savedInstallFlags $ config
numJobsCmdLineFlag = buildNumJobs buildFlags
replAction :: (ReplFlags, BuildExFlags) -> [String] -> GlobalFlags -> IO ()
replAction (replFlags, buildExFlags) extraArgs globalFlags = do
cwd <- getCurrentDirectory
pkgDesc <- findPackageDesc cwd
either (const onNoPkgDesc) (const onPkgDesc) pkgDesc
where
verbosity = fromFlagOrDefault normal (replVerbosity replFlags)
-- There is a .cabal file in the current directory: start a REPL and load
-- the project's modules.
onPkgDesc = do
let distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions)
(replDistPref replFlags)
noAddSource = case replReload replFlags of
Flag True -> SkipAddSourceDepsCheck
_ -> fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
progConf = defaultProgramConfiguration
setupOptions = defaultSetupScriptOptions
{ useCabalVersion = orLaterVersion $ Version [1,18,0] []
, useDistPref = distPref
}
replFlags' = replFlags
{ replVerbosity = toFlag verbosity
, replDistPref = toFlag distPref
}
-- Calls 'configureAction' to do the real work, so nothing special has to
-- be done to support sandboxes.
(useSandbox, _config) <- reconfigure verbosity distPref
mempty [] globalFlags noAddSource NoFlag
(const Nothing)
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
(Cabal.replCommand progConf) (const replFlags') extraArgs
-- No .cabal file in the current directory: just start the REPL (possibly
-- using the sandbox package DB).
onNoPkgDesc = do
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
mempty
let configFlags = savedConfigureFlags config
(comp, _platform, programDb) <- configCompilerAux' configFlags
startInterpreter verbosity programDb comp (configPackageDB' configFlags)
-- | Re-configure the package in the current directory if needed. Deciding
-- when to reconfigure and with which options is convoluted:
--
-- If we are reconfiguring, we must always run @configure@ with the
-- verbosity option we are given; however, that a previous configuration
-- uses a different verbosity setting is not reason enough to reconfigure.
--
-- The package should be configured to use the same \"dist\" prefix as
-- given to the @build@ command, otherwise the build will probably
-- fail. Not only does this determine the \"dist\" prefix setting if we
-- need to reconfigure anyway, but an existing configuration should be
-- invalidated if its \"dist\" prefix differs.
--
-- If the package has never been configured (i.e., there is no
-- LocalBuildInfo), we must configure first, using the default options.
--
-- If the package has been configured, there will be a 'LocalBuildInfo'.
-- If there no package description file, we assume that the
-- 'PackageDescription' is up to date, though the configuration may need
-- to be updated for other reasons (see above). If there is a package
-- description file, and it has been modified since the 'LocalBuildInfo'
-- was generated, then we need to reconfigure.
--
-- The caller of this function may also have specific requirements
-- regarding the flags the last configuration used. For example,
-- 'testAction' requires that the package be configured with test suites
-- enabled. The caller may pass the required settings to this function
-- along with a function to check the validity of the saved 'ConfigFlags';
-- these required settings will be checked first upon determining that
-- a previous configuration exists.
reconfigure :: Verbosity -- ^ Verbosity setting
-> FilePath -- ^ \"dist\" prefix
-> ConfigFlags -- ^ Additional config flags to set. These flags
-- will be 'mappend'ed to the last used or
-- default 'ConfigFlags' as appropriate, so
-- this value should be 'mempty' with only the
-- required flags set. The required verbosity
-- and \"dist\" prefix flags will be set
-- automatically because they are always
-- required; therefore, it is not necessary to
-- set them here.
-> [String] -- ^ Extra arguments
-> GlobalFlags -- ^ Global flags
-> SkipAddSourceDepsCheck
-- ^ Should we skip the timestamp check for modified
-- add-source dependencies?
-> Flag (Maybe Int)
-- ^ -j flag for reinstalling add-source deps.
-> (ConfigFlags -> Maybe String)
-- ^ Check that the required flags are set in
-- the last used 'ConfigFlags'. If the required
-- flags are not set, provide a message to the
-- user explaining the reason for
-- reconfiguration. Because the correct \"dist\"
-- prefix setting is always required, it is checked
-- automatically; this function need not check
-- for it.
-> IO (UseSandbox, SavedConfig)
reconfigure verbosity distPref addConfigFlags extraArgs globalFlags
skipAddSourceDepsCheck numJobsFlag checkFlags = do
eLbi <- tryGetPersistBuildConfig distPref
case eLbi of
Left err -> onNoBuildConfig err
Right lbi -> onBuildConfig lbi
where
-- We couldn't load the saved package config file.
--
-- If we're in a sandbox: add-source deps don't have to be reinstalled
-- (since we don't know the compiler & platform).
onNoBuildConfig :: ConfigStateFileError -> IO (UseSandbox, SavedConfig)
onNoBuildConfig err = do
let msg = case err of
ConfigStateFileMissing -> "Package has never been configured."
ConfigStateFileNoParse -> "Saved package config file seems "
++ "to be corrupt."
_ -> show err
case err of
ConfigStateFileBadVersion _ _ _ -> info verbosity msg
_ -> do
notice verbosity
$ msg ++ " Configuring with default flags." ++ configureManually
configureAction (defaultFlags, defaultConfigExFlags)
extraArgs globalFlags
loadConfigOrSandboxConfig verbosity globalFlags mempty
-- Package has been configured, but the configuration may be out of
-- date or required flags may not be set.
--
-- If we're in a sandbox: reinstall the modified add-source deps and
-- force reconfigure if we did.
onBuildConfig :: LBI.LocalBuildInfo -> IO (UseSandbox, SavedConfig)
onBuildConfig lbi = do
let configFlags = LBI.configFlags lbi
flags = mconcat [configFlags, addConfigFlags, distVerbFlags]
-- Was the sandbox created after the package was already configured? We
-- may need to skip reinstallation of add-source deps and force
-- reconfigure.
let buildConfig = localBuildInfoFile distPref
sandboxConfig <- getSandboxConfigFilePath globalFlags
isSandboxConfigNewer <-
sandboxConfig `existsAndIsMoreRecentThan` buildConfig
let skipAddSourceDepsCheck'
| isSandboxConfigNewer = SkipAddSourceDepsCheck
| otherwise = skipAddSourceDepsCheck
when (skipAddSourceDepsCheck' == SkipAddSourceDepsCheck) $
info verbosity "Skipping add-source deps check..."
(useSandbox, config, depsReinstalled) <-
case skipAddSourceDepsCheck' of
DontSkipAddSourceDepsCheck ->
maybeReinstallAddSourceDeps verbosity numJobsFlag flags globalFlags
SkipAddSourceDepsCheck -> do
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity
globalFlags (configUserInstall flags)
return (useSandbox, config, NoDepsReinstalled)
-- Is the @cabal.config@ file newer than @dist/setup.config@? Then we need
-- to force reconfigure. Note that it's possible to use @cabal.config@
-- even without sandboxes.
isUserPackageEnvironmentFileNewer <-
userPackageEnvironmentFile `existsAndIsMoreRecentThan` buildConfig
-- Determine whether we need to reconfigure and which message to show to
-- the user if that is the case.
mMsg <- determineMessageToShow lbi configFlags depsReinstalled
isSandboxConfigNewer
isUserPackageEnvironmentFileNewer
case mMsg of
-- No message for the user indicates that reconfiguration
-- is not required.
Nothing -> return (useSandbox, config)
-- Show the message and reconfigure.
Just msg -> do
notice verbosity msg
configureAction (flags, defaultConfigExFlags)
extraArgs globalFlags
return (useSandbox, config)
-- Determine what message, if any, to display to the user if reconfiguration
-- is required.
determineMessageToShow :: LBI.LocalBuildInfo -> ConfigFlags
-> WereDepsReinstalled -> Bool -> Bool
-> IO (Maybe String)
determineMessageToShow _ _ _ True _ =
-- The sandbox was created after the package was already configured.
return $! Just $! sandboxConfigNewerMessage
determineMessageToShow _ _ _ False True =
-- The user package environment file was modified.
return $! Just $! userPackageEnvironmentFileModifiedMessage
determineMessageToShow lbi configFlags depsReinstalled False False = do
let savedDistPref = fromFlagOrDefault
(useDistPref defaultSetupScriptOptions)
(configDistPref configFlags)
case depsReinstalled of
ReinstalledSomeDeps ->
-- Some add-source deps were reinstalled.
return $! Just $! reinstalledDepsMessage
NoDepsReinstalled ->
case checkFlags configFlags of
-- Flag required by the caller is not set.
Just msg -> return $! Just $! msg ++ configureManually
Nothing
-- Required "dist" prefix is not set.
| savedDistPref /= distPref ->
return $! Just distPrefMessage
-- All required flags are set, but the configuration
-- may be outdated.
| otherwise -> case LBI.pkgDescrFile lbi of
Nothing -> return Nothing
Just pdFile -> do
outdated <- checkPersistBuildConfigOutdated
distPref pdFile
return $! if outdated
then Just $! outdatedMessage pdFile
else Nothing
defaultFlags = mappend addConfigFlags distVerbFlags
distVerbFlags = mempty
{ configVerbosity = toFlag verbosity
, configDistPref = toFlag distPref
}
reconfiguringMostRecent = " Re-configuring with most recently used options."
configureManually = " If this fails, please run configure manually."
sandboxConfigNewerMessage =
"The sandbox was created after the package was already configured."
++ reconfiguringMostRecent
++ configureManually
userPackageEnvironmentFileModifiedMessage =
"The user package environment file ('"
++ userPackageEnvironmentFile ++ "') was modified."
++ reconfiguringMostRecent
++ configureManually
distPrefMessage =
"Package previously configured with different \"dist\" prefix."
++ reconfiguringMostRecent
++ configureManually
outdatedMessage pdFile =
pdFile ++ " has been changed."
++ reconfiguringMostRecent
++ configureManually
reinstalledDepsMessage =
"Some add-source dependencies have been reinstalled."
++ reconfiguringMostRecent
++ configureManually
installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
-> [String] -> GlobalFlags -> IO ()
installAction (configFlags, _, installFlags, _) _ _globalFlags
| fromFlagOrDefault False (installOnly installFlags)
= let verbosity = fromFlagOrDefault normal (configVerbosity configFlags)
in setupWrapper verbosity defaultSetupScriptOptions Nothing
installCommand (const mempty) []
installAction (configFlags, configExFlags, installFlags, haddockFlags)
extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (configVerbosity configFlags)
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity
globalFlags (configUserInstall configFlags)
targets <- readUserTargets verbosity extraArgs
-- TODO: It'd be nice if 'cabal install' picked up the '-w' flag passed to
-- 'configure' when run inside a sandbox. Right now, running
--
-- $ cabal sandbox init && cabal configure -w /path/to/ghc
-- && cabal build && cabal install
--
-- performs the compilation twice unless you also pass -w to 'install'.
-- However, this is the same behaviour that 'cabal install' has in the normal
-- mode of operation, so we stick to it for consistency.
let sandboxDistPref = case useSandbox of
NoSandbox -> NoFlag
UseSandbox sandboxDir -> Flag $ sandboxBuildDir sandboxDir
configFlags' = maybeForceTests installFlags' $
savedConfigureFlags config `mappend` configFlags
configExFlags' = defaultConfigExFlags `mappend`
savedConfigureExFlags config `mappend` configExFlags
installFlags' = defaultInstallFlags `mappend`
savedInstallFlags config `mappend` installFlags
haddockFlags' = defaultHaddockFlags `mappend`
savedHaddockFlags config `mappend` haddockFlags
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAux' configFlags'
-- TODO: Redesign ProgramDB API to prevent such problems as #2241 in the future.
conf' <- configureAllKnownPrograms verbosity conf
-- If we're working inside a sandbox and the user has set the -w option, we
-- may need to create a sandbox-local package DB for this compiler and add a
-- timestamp record for this compiler to the timestamp file.
configFlags'' <- case useSandbox of
NoSandbox -> configAbsolutePaths $ configFlags'
(UseSandbox sandboxDir) ->
return $ (setPackageDB sandboxDir comp platform configFlags') {
configDistPref = sandboxDistPref
}
whenUsingSandbox useSandbox $ \sandboxDir -> do
initPackageDBIfNeeded verbosity configFlags'' comp conf'
indexFile <- tryGetIndexFilePath config
maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile
(compilerId comp) platform
-- FIXME: Passing 'SandboxPackageInfo' to install unconditionally here means
-- that 'cabal install some-package' inside a sandbox will sometimes reinstall
-- modified add-source deps, even if they are not among the dependencies of
-- 'some-package'. This can also prevent packages that depend on older
-- versions of add-source'd packages from building (see #1362).
maybeWithSandboxPackageInfo verbosity configFlags'' globalFlags'
comp platform conf useSandbox $ \mSandboxPkgInfo ->
maybeWithSandboxDirOnSearchPath useSandbox $
install verbosity
(configPackageDB' configFlags'')
(globalRepos globalFlags')
comp platform conf'
useSandbox mSandboxPkgInfo
globalFlags' configFlags'' configExFlags'
installFlags' haddockFlags'
targets
where
-- '--run-tests' implies '--enable-tests'.
maybeForceTests installFlags' configFlags' =
if fromFlagOrDefault False (installRunTests installFlags')
then configFlags' { configTests = toFlag True }
else configFlags'
testAction :: (TestFlags, BuildFlags, BuildExFlags) -> [String] -> GlobalFlags
-> IO ()
testAction (testFlags, buildFlags, buildExFlags) extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (testVerbosity testFlags)
distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions)
(testDistPref testFlags)
setupOptions = defaultSetupScriptOptions { useDistPref = distPref }
buildFlags' = buildFlags { buildVerbosity = testVerbosity testFlags
, buildDistPref = testDistPref testFlags }
addConfigFlags = mempty { configTests = toFlag True }
checkFlags flags
| fromFlagOrDefault False (configTests flags) = Nothing
| otherwise = Just "Re-configuring with test suites enabled."
noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- reconfigure also checks if we're in a sandbox and reinstalls add-source
-- deps if needed.
(useSandbox, config) <- reconfigure verbosity distPref addConfigFlags []
globalFlags noAddSource
(buildNumJobs buildFlags') checkFlags
-- the package was just configured, so the LBI must be available
lbi <- getPersistBuildConfig distPref
let pkgDescr = LBI.localPkgDescr lbi
nameTestsOnly =
LBI.foldComponent
(const Nothing)
(const Nothing)
(\t ->
if buildable (testBuildInfo t)
then Just (testName t)
else Nothing)
(const Nothing)
tests = mapMaybe nameTestsOnly $ LBI.pkgComponents pkgDescr
extraArgs'
| null extraArgs = tests
| otherwise = extraArgs
if null tests
then notice verbosity "Package has no buildable test suites."
else do
maybeWithSandboxDirOnSearchPath useSandbox $
build verbosity config distPref buildFlags' extraArgs'
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
Cabal.testCommand (const testFlags) extraArgs'
benchmarkAction :: (BenchmarkFlags, BuildFlags, BuildExFlags)
-> [String] -> GlobalFlags
-> IO ()
benchmarkAction (benchmarkFlags, buildFlags, buildExFlags)
extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal
(benchmarkVerbosity benchmarkFlags)
distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions)
(benchmarkDistPref benchmarkFlags)
setupOptions = defaultSetupScriptOptions { useDistPref = distPref }
buildFlags' = buildFlags
{ buildVerbosity = benchmarkVerbosity benchmarkFlags
, buildDistPref = benchmarkDistPref benchmarkFlags }
addConfigFlags = mempty { configBenchmarks = toFlag True }
checkFlags flags
| fromFlagOrDefault False (configBenchmarks flags) = Nothing
| otherwise = Just "Re-configuring with benchmarks enabled."
noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- reconfigure also checks if we're in a sandbox and reinstalls add-source
-- deps if needed.
(useSandbox, config) <- reconfigure verbosity distPref addConfigFlags []
globalFlags noAddSource (buildNumJobs buildFlags')
checkFlags
-- the package was just configured, so the LBI must be available
lbi <- getPersistBuildConfig distPref
let pkgDescr = LBI.localPkgDescr lbi
nameBenchsOnly =
LBI.foldComponent
(const Nothing)
(const Nothing)
(const Nothing)
(\b ->
if buildable (benchmarkBuildInfo b)
then Just (benchmarkName b)
else Nothing)
benchs = mapMaybe nameBenchsOnly $ LBI.pkgComponents pkgDescr
extraArgs'
| null extraArgs = benchs
| otherwise = extraArgs
if null benchs
then notice verbosity "Package has no buildable benchmarks."
else do
maybeWithSandboxDirOnSearchPath useSandbox $
build verbosity config distPref buildFlags' extraArgs'
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
Cabal.benchmarkCommand (const benchmarkFlags) extraArgs'
haddockAction :: HaddockFlags -> [String] -> GlobalFlags -> IO ()
haddockAction haddockFlags extraArgs globalFlags = do
let verbosity = fromFlag (haddockVerbosity haddockFlags)
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty
let haddockFlags' = defaultHaddockFlags `mappend`
savedHaddockFlags config `mappend` haddockFlags
setupScriptOptions = defaultSetupScriptOptions {
useDistPref = fromFlagOrDefault
(useDistPref defaultSetupScriptOptions)
(haddockDistPref haddockFlags')
}
setupWrapper verbosity setupScriptOptions Nothing
haddockCommand (const haddockFlags') extraArgs
cleanAction :: CleanFlags -> [String] -> GlobalFlags -> IO ()
cleanAction cleanFlags extraArgs _globalFlags =
setupWrapper verbosity setupScriptOptions Nothing
cleanCommand (const cleanFlags) extraArgs
where
verbosity = fromFlagOrDefault normal (cleanVerbosity cleanFlags)
setupScriptOptions = defaultSetupScriptOptions {
useDistPref = fromFlagOrDefault
(useDistPref defaultSetupScriptOptions)
(cleanDistPref cleanFlags),
useWin32CleanHack = True
}
listAction :: ListFlags -> [String] -> GlobalFlags -> IO ()
listAction listFlags extraArgs globalFlags = do
let verbosity = fromFlag (listVerbosity listFlags)
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty
let configFlags' = savedConfigureFlags config
configFlags = configFlags' {
configPackageDBs = configPackageDBs configFlags'
`mappend` listPackageDBs listFlags
}
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, _, conf) <- configCompilerAux' configFlags
List.list verbosity
(configPackageDB' configFlags)
(globalRepos globalFlags')
comp
conf
listFlags
extraArgs
infoAction :: InfoFlags -> [String] -> GlobalFlags -> IO ()
infoAction infoFlags extraArgs globalFlags = do
let verbosity = fromFlag (infoVerbosity infoFlags)
targets <- readUserTargets verbosity extraArgs
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty
let configFlags' = savedConfigureFlags config
configFlags = configFlags' {
configPackageDBs = configPackageDBs configFlags'
`mappend` infoPackageDBs infoFlags
}
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, _, conf) <- configCompilerAuxEx configFlags
List.info verbosity
(configPackageDB' configFlags)
(globalRepos globalFlags')
comp
conf
globalFlags'
infoFlags
targets
updateAction :: Flag Verbosity -> [String] -> GlobalFlags -> IO ()
updateAction verbosityFlag extraArgs globalFlags = do
unless (null extraArgs) $
die $ "'update' doesn't take any extra arguments: " ++ unwords extraArgs
let verbosity = fromFlag verbosityFlag
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags NoFlag
let globalFlags' = savedGlobalFlags config `mappend` globalFlags
update verbosity (globalRepos globalFlags')
upgradeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
-> [String] -> GlobalFlags -> IO ()
upgradeAction _ _ _ = die $
"Use the 'cabal install' command instead of 'cabal upgrade'.\n"
++ "You can install the latest version of a package using 'cabal install'. "
++ "The 'cabal upgrade' command has been removed because people found it "
++ "confusing and it often led to broken packages.\n"
++ "If you want the old upgrade behaviour then use the install command "
++ "with the --upgrade-dependencies flag (but check first with --dry-run "
++ "to see what would happen). This will try to pick the latest versions "
++ "of all dependencies, rather than the usual behaviour of trying to pick "
++ "installed versions of all dependencies. If you do use "
++ "--upgrade-dependencies, it is recommended that you do not upgrade core "
++ "packages (e.g. by using appropriate --constraint= flags)."
fetchAction :: FetchFlags -> [String] -> GlobalFlags -> IO ()
fetchAction fetchFlags extraArgs globalFlags = do
let verbosity = fromFlag (fetchVerbosity fetchFlags)
targets <- readUserTargets verbosity extraArgs
config <- loadConfig verbosity (globalConfigFile globalFlags) mempty
let configFlags = savedConfigureFlags config
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAux' configFlags
fetch verbosity
(configPackageDB' configFlags)
(globalRepos globalFlags')
comp platform conf globalFlags' fetchFlags
targets
freezeAction :: FreezeFlags -> [String] -> GlobalFlags -> IO ()
freezeAction freezeFlags _extraArgs globalFlags = do
let verbosity = fromFlag (freezeVerbosity freezeFlags)
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty
let configFlags = savedConfigureFlags config
globalFlags' = savedGlobalFlags config `mappend` globalFlags
(comp, platform, conf) <- configCompilerAux' configFlags
maybeWithSandboxPackageInfo verbosity configFlags globalFlags'
comp platform conf useSandbox $ \mSandboxPkgInfo ->
maybeWithSandboxDirOnSearchPath useSandbox $
freeze verbosity
(configPackageDB' configFlags)
(globalRepos globalFlags')
comp platform conf
mSandboxPkgInfo
globalFlags' freezeFlags
uploadAction :: UploadFlags -> [String] -> GlobalFlags -> IO ()
uploadAction uploadFlags extraArgs globalFlags = do
let verbosity = fromFlag (uploadVerbosity uploadFlags)
config <- loadConfig verbosity (globalConfigFile globalFlags) mempty
let uploadFlags' = savedUploadFlags config `mappend` uploadFlags
globalFlags' = savedGlobalFlags config `mappend` globalFlags
tarfiles = extraArgs
checkTarFiles extraArgs
if fromFlag (uploadCheck uploadFlags')
then Upload.check verbosity tarfiles
else upload verbosity
(globalRepos globalFlags')
(flagToMaybe $ uploadUsername uploadFlags')
(flagToMaybe $ uploadPassword uploadFlags')
tarfiles
where
checkTarFiles tarfiles
| null tarfiles
= die "the 'upload' command expects one or more .tar.gz packages."
| not (null otherFiles)
= die $ "the 'upload' command expects only .tar.gz packages: "
++ intercalate ", " otherFiles
| otherwise = sequence_
[ do exists <- doesFileExist tarfile
unless exists $ die $ "file not found: " ++ tarfile
| tarfile <- tarfiles ]
where otherFiles = filter (not . isTarGzFile) tarfiles
isTarGzFile file = case splitExtension file of
(file', ".gz") -> takeExtension file' == ".tar"
_ -> False
checkAction :: Flag Verbosity -> [String] -> GlobalFlags -> IO ()
checkAction verbosityFlag extraArgs _globalFlags = do
unless (null extraArgs) $
die $ "'check' doesn't take any extra arguments: " ++ unwords extraArgs
allOk <- Check.check (fromFlag verbosityFlag)
unless allOk exitFailure
formatAction :: Flag Verbosity -> [String] -> GlobalFlags -> IO ()
formatAction verbosityFlag extraArgs _globalFlags = do
let verbosity = fromFlag verbosityFlag
path <- case extraArgs of
[] -> do cwd <- getCurrentDirectory
tryFindPackageDesc cwd
(p:_) -> return p
pkgDesc <- readPackageDescription verbosity path
-- Uses 'writeFileAtomic' under the hood.
writeGenericPackageDescription path pkgDesc
sdistAction :: (SDistFlags, SDistExFlags) -> [String] -> GlobalFlags -> IO ()
sdistAction (sdistFlags, sdistExFlags) extraArgs _globalFlags = do
unless (null extraArgs) $
die $ "'sdist' doesn't take any extra arguments: " ++ unwords extraArgs
sdist sdistFlags sdistExFlags
reportAction :: ReportFlags -> [String] -> GlobalFlags -> IO ()
reportAction reportFlags extraArgs globalFlags = do
unless (null extraArgs) $
die $ "'report' doesn't take any extra arguments: " ++ unwords extraArgs
let verbosity = fromFlag (reportVerbosity reportFlags)
config <- loadConfig verbosity (globalConfigFile globalFlags) mempty
let globalFlags' = savedGlobalFlags config `mappend` globalFlags
reportFlags' = savedReportFlags config `mappend` reportFlags
Upload.report verbosity (globalRepos globalFlags')
(flagToMaybe $ reportUsername reportFlags')
(flagToMaybe $ reportPassword reportFlags')
runAction :: (BuildFlags, BuildExFlags) -> [String] -> GlobalFlags -> IO ()
runAction (buildFlags, buildExFlags) extraArgs globalFlags = do
let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags)
distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions)
(buildDistPref buildFlags)
noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- reconfigure also checks if we're in a sandbox and reinstalls add-source
-- deps if needed.
(useSandbox, config) <- reconfigure verbosity distPref mempty []
globalFlags noAddSource (buildNumJobs buildFlags)
(const Nothing)
lbi <- getPersistBuildConfig distPref
(exe, exeArgs) <- splitRunArgs lbi extraArgs
maybeWithSandboxDirOnSearchPath useSandbox $
build verbosity config distPref buildFlags ["exe:" ++ exeName exe]
maybeWithSandboxDirOnSearchPath useSandbox $
run verbosity lbi exe exeArgs
getAction :: GetFlags -> [String] -> GlobalFlags -> IO ()
getAction getFlags extraArgs globalFlags = do
let verbosity = fromFlag (getVerbosity getFlags)
targets <- readUserTargets verbosity extraArgs
config <- loadConfig verbosity (globalConfigFile globalFlags) mempty
let globalFlags' = savedGlobalFlags config `mappend` globalFlags
get verbosity
(globalRepos (savedGlobalFlags config))
globalFlags'
getFlags
targets
unpackAction :: GetFlags -> [String] -> GlobalFlags -> IO ()
unpackAction getFlags extraArgs globalFlags = do
getAction getFlags extraArgs globalFlags
initAction :: InitFlags -> [String] -> GlobalFlags -> IO ()
initAction initFlags _extraArgs globalFlags = do
let verbosity = fromFlag (initVerbosity initFlags)
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags mempty
let configFlags = savedConfigureFlags config
(comp, _, conf) <- configCompilerAux' configFlags
initCabal verbosity
(configPackageDB' configFlags)
comp
conf
initFlags
sandboxAction :: SandboxFlags -> [String] -> GlobalFlags -> IO ()
sandboxAction sandboxFlags extraArgs globalFlags = do
let verbosity = fromFlag (sandboxVerbosity sandboxFlags)
case extraArgs of
-- Basic sandbox commands.
["init"] -> sandboxInit verbosity sandboxFlags globalFlags
["delete"] -> sandboxDelete verbosity sandboxFlags globalFlags
("add-source":extra) -> do
when (noExtraArgs extra) $
die "The 'sandbox add-source' command expects at least one argument"
sandboxAddSource verbosity extra sandboxFlags globalFlags
("delete-source":extra) -> do
when (noExtraArgs extra) $
die "The 'sandbox delete-source' command expects \
\at least one argument"
sandboxDeleteSource verbosity extra sandboxFlags globalFlags
["list-sources"] -> sandboxListSources verbosity sandboxFlags globalFlags
-- More advanced commands.
("hc-pkg":extra) -> do
when (noExtraArgs extra) $
die $ "The 'sandbox hc-pkg' command expects at least one argument"
sandboxHcPkg verbosity sandboxFlags globalFlags extra
["buildopts"] -> die "Not implemented!"
-- Hidden commands.
["dump-pkgenv"] -> dumpPackageEnvironment verbosity sandboxFlags globalFlags
-- Error handling.
[] -> die $ "Please specify a subcommand (see 'help sandbox')"
_ -> die $ "Unknown 'sandbox' subcommand: " ++ unwords extraArgs
where
noExtraArgs = (<1) . length
execAction :: ExecFlags -> [String] -> GlobalFlags -> IO ()
execAction execFlags extraArgs globalFlags = do
let verbosity = fromFlag (execVerbosity execFlags)
(useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
mempty
let configFlags = savedConfigureFlags config
(comp, platform, conf) <- configCompilerAux' configFlags
exec verbosity useSandbox comp platform conf extraArgs
userConfigAction :: UserConfigFlags -> [String] -> GlobalFlags -> IO ()
userConfigAction ucflags extraArgs globalFlags = do
let verbosity = fromFlag (userConfigVerbosity ucflags)
case extraArgs of
("diff":_) -> mapM_ putStrLn =<< userConfigDiff globalFlags
("update":_) -> userConfigUpdate verbosity globalFlags
-- Error handling.
[] -> die $ "Please specify a subcommand (see 'help user-config')"
_ -> die $ "Unknown 'user-config' subcommand: " ++ unwords extraArgs
-- | See 'Distribution.Client.Install.withWin32SelfUpgrade' for details.
--
win32SelfUpgradeAction :: Win32SelfUpgradeFlags -> [String] -> GlobalFlags
-> IO ()
win32SelfUpgradeAction selfUpgradeFlags (pid:path:_extraArgs) _globalFlags = do
let verbosity = fromFlag (win32SelfUpgradeVerbosity selfUpgradeFlags)
Win32SelfUpgrade.deleteOldExeFile verbosity (read pid) path
win32SelfUpgradeAction _ _ _ = return ()
|
DavidAlphaFox/ghc
|
libraries/Cabal/cabal-install/Main.hs
|
bsd-3-clause
| 51,412 | 0 | 24 | 14,196 | 8,986 | 4,733 | 4,253 | 825 | 13 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
--------------------------------------------------------------------------------
-- | Assert folding: add asserts expression NaN and Infinite floats.
--------------------------------------------------------------------------------
module Ivory.Opts.FP
( fpFold
) where
import Ivory.Opts.AssertFold
import qualified Ivory.Language.Syntax.AST as I
import qualified Ivory.Language.Syntax.Type as I
--------------------------------------------------------------------------------
fpFold :: I.Proc -> I.Proc
fpFold = procFold "fp" (expFoldDefault fpAssert)
--------------------------------------------------------------------------------
-- We're assuming we don't have to check lits---that you'd never actually
-- construct a literal inf or NaN value!
fpAssert :: I.Type -> I.Expr -> FolderStmt ()
fpAssert ty e = case ty of
I.TyFloat -> asst
I.TyDouble -> asst
_ -> return ()
where asst = insert (mkAssert ty e)
mkAssert :: I.Type -> I.Expr -> I.Stmt
mkAssert ty e = I.CompilerAssert $ I.ExpOp I.ExpAnd
[ I.ExpOp I.ExpNot [I.ExpOp (I.ExpIsNan ty) [e]]
, I.ExpOp I.ExpNot [I.ExpOp (I.ExpIsInf ty) [e]] ]
|
GaloisInc/ivory
|
ivory-opts/src/Ivory/Opts/FP.hs
|
bsd-3-clause
| 1,233 | 0 | 13 | 176 | 268 | 149 | 119 | 20 | 3 |
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
-- | BlazeMarkup is a markup combinator library. It provides a way to embed
-- markup languages like HTML and SVG in Haskell in an efficient and convenient
-- way, with a light-weight syntax.
--
-- To use the library, one needs to import a set of combinators. For example,
-- you can use HTML 4 Strict from BlazeHtml package.
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- > import Prelude hiding (head, id, div)
-- > import Text.Blaze.Html4.Strict hiding (map)
-- > import Text.Blaze.Html4.Strict.Attributes hiding (title)
--
-- To render the page later on, you need a so called Renderer. The recommended
-- renderer is an UTF-8 renderer which produces a lazy bytestring.
--
-- > import Text.Blaze.Renderer.Utf8 (renderMarkup)
--
-- Now, you can describe pages using the imported combinators.
--
-- > page1 :: Markup
-- > page1 = html $ do
-- > head $ do
-- > title "Introduction page."
-- > link ! rel "stylesheet" ! type_ "text/css" ! href "screen.css"
-- > body $ do
-- > div ! id "header" $ "Syntax"
-- > p "This is an example of BlazeMarkup syntax."
-- > ul $ mapM_ (li . toMarkup . show) [1, 2, 3]
--
-- The resulting HTML can now be extracted using:
--
-- > renderMarkup page1
--
module Text.Blaze
(
-- * Important types.
Markup
, Tag
, Attribute
, AttributeValue
-- * Creating attributes.
, dataAttribute
, customAttribute
-- * Converting values to Markup.
, ToMarkup (..)
, unsafeByteString
, unsafeLazyByteString
-- * Creating tags.
, textTag
, stringTag
-- * Converting values to attribute values.
, ToValue (..)
, unsafeByteStringValue
, unsafeLazyByteStringValue
-- * Setting attributes
, (!)
, (!?)
-- * Modifiying Markup trees
, contents
) where
import Data.Int (Int32, Int64)
import Data.Word (Word, Word32, Word64)
import Data.Text (Text)
import qualified Data.Text.Lazy as LT
import Text.Blaze.Internal
-- | Class allowing us to use a single function for Markup values
--
class ToMarkup a where
-- | Convert a value to Markup.
--
toMarkup :: a -> Markup ev
-- | Convert a value to Markup without escaping
--
preEscapedToMarkup :: a -> Markup ev
preEscapedToMarkup = toMarkup
{-# INLINE preEscapedToMarkup #-}
-- instance ToMarkup (Markup ev) where
-- toMarkup = id
-- {-# INLINE toMarkup #-}
--
-- instance ToMarkup [Markup ev] where
-- toMarkup = mconcat
-- {-# INLINE toMarkup #-}
instance ToMarkup Text where
toMarkup = text
{-# INLINE toMarkup #-}
preEscapedToMarkup = preEscapedText
{-# INLINE preEscapedToMarkup #-}
instance ToMarkup LT.Text where
toMarkup = lazyText
{-# INLINE toMarkup #-}
preEscapedToMarkup = preEscapedLazyText
{-# INLINE preEscapedToMarkup #-}
instance ToMarkup String where
toMarkup = string
{-# INLINE toMarkup #-}
preEscapedToMarkup = preEscapedString
{-# INLINE preEscapedToMarkup #-}
instance ToMarkup Int where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Int32 where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Int64 where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Char where
toMarkup = string . return
{-# INLINE toMarkup #-}
instance ToMarkup Bool where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Integer where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Float where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Double where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Word where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Word32 where
toMarkup = string . show
{-# INLINE toMarkup #-}
instance ToMarkup Word64 where
toMarkup = string . show
{-# INLINE toMarkup #-}
-- | Class allowing us to use a single function for attribute values
--
class ToValue a where
-- | Convert a value to an attribute value
--
toValue :: a -> AttributeValue
-- | Convert a value to an attribute value without escaping
--
preEscapedToValue :: a -> AttributeValue
preEscapedToValue = toValue
{-# INLINE preEscapedToValue #-}
instance ToValue AttributeValue where
toValue = id
{-# INLINE toValue #-}
instance ToValue Text where
toValue = textValue
{-# INLINE toValue #-}
preEscapedToValue = preEscapedTextValue
{-# INLINE preEscapedToValue #-}
instance ToValue LT.Text where
toValue = lazyTextValue
{-# INLINE toValue #-}
preEscapedToValue = preEscapedLazyTextValue
{-# INLINE preEscapedToValue #-}
instance ToValue String where
toValue = stringValue
{-# INLINE toValue #-}
preEscapedToValue = preEscapedStringValue
{-# INLINE preEscapedToValue #-}
instance ToValue Int where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Int32 where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Int64 where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Char where
toValue = stringValue . return
{-# INLINE toValue #-}
instance ToValue Bool where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Integer where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Float where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Double where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Word where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Word32 where
toValue = stringValue . show
{-# INLINE toValue #-}
instance ToValue Word64 where
toValue = stringValue . show
{-# INLINE toValue #-}
|
MoixaEnergy/blaze-react
|
src/Text/Blaze.hs
|
mit
| 5,986 | 0 | 8 | 1,435 | 791 | 484 | 307 | 134 | 0 |
{-| Generic code to work with jobs, e.g. submit jobs and check their status.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Jobs
( submitJobs
, execJobsWait
, execJobsWaitOk
, waitForJobs
) where
import Control.Concurrent (threadDelay)
import Data.List
import Ganeti.BasicTypes
import Ganeti.Errors
import qualified Ganeti.Luxi as L
import Ganeti.OpCodes
import Ganeti.Types
-- | Submits a set of jobs and returns their job IDs without waiting for
-- completion.
submitJobs :: [[MetaOpCode]] -> L.Client -> IO (Result [L.JobId])
submitJobs opcodes client = do
jids <- L.submitManyJobs client opcodes
return (case jids of
Bad e -> Bad $ "Job submission error: " ++ formatError e
Ok jids' -> Ok jids')
-- | Executes a set of jobs and waits for their completion, returning their
-- status.
execJobsWait :: [[MetaOpCode]] -- ^ The list of jobs
-> ([L.JobId] -> IO ()) -- ^ Post-submission callback
-> L.Client -- ^ The Luxi client
-> IO (Result [(L.JobId, JobStatus)])
execJobsWait opcodes callback client = do
jids <- submitJobs opcodes client
case jids of
Bad e -> return $ Bad e
Ok jids' -> do
callback jids'
waitForJobs jids' client
-- | Polls a set of jobs at an increasing interval until all are finished one
-- way or another.
waitForJobs :: [L.JobId] -> L.Client -> IO (Result [(L.JobId, JobStatus)])
waitForJobs jids client = waitForJobs' 500000 15000000
where
waitForJobs' delay maxdelay = do
-- TODO: this should use WaitForJobChange once it's available in Haskell
-- land, instead of a fixed schedule of sleeping intervals.
threadDelay $ min delay maxdelay
sts <- L.queryJobsStatus client jids
case sts of
Bad e -> return . Bad $ "Checking job status: " ++ formatError e
Ok sts' -> if any (<= JOB_STATUS_RUNNING) sts' then
waitForJobs' (delay * 2) maxdelay
else
return . Ok $ zip jids sts'
-- | Execute jobs and return @Ok@ only if all of them succeeded.
execJobsWaitOk :: [[MetaOpCode]] -> L.Client -> IO (Result ())
execJobsWaitOk opcodes client = do
let nullog = const (return () :: IO ())
failed = filter ((/=) JOB_STATUS_SUCCESS . snd)
fmtfail (i, s) = show (fromJobId i) ++ "=>" ++ jobStatusToRaw s
sts <- execJobsWait opcodes nullog client
case sts of
Bad e -> return $ Bad e
Ok sts' -> return (if null $ failed sts' then
Ok ()
else
Bad ("The following jobs failed: " ++
(intercalate ", " . map fmtfail $ failed sts')))
|
damoxc/ganeti
|
src/Ganeti/Jobs.hs
|
gpl-2.0
| 3,420 | 0 | 19 | 896 | 714 | 364 | 350 | 51 | 3 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>Access Control Testing | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
0xkasun/security-tools
|
src/org/zaproxy/zap/extension/accessControl/resources/help_es_ES/helpset_es_ES.hs
|
apache-2.0
| 779 | 65 | 53 | 121 | 339 | 171 | 168 | -1 | -1 |
module Reddit.Actions.UserSpec where
import Reddit.Actions.User
import Reddit.Types.Comment
import Reddit.Types.Listing
import Reddit.Types.Options
import Reddit.Types.User
import ConfigLoad
import Control.Monad
import Data.Maybe
import Data.Time.Clock
import Test.Hspec
isRight :: Either a b -> Bool
isRight = const False `either` const True
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Reddit.Actions.User" $ do
(reddit, username, _) <- runIO loadConfig
time <- runIO getCurrentTime
it "should be able to get the user's most recent comments" $ do
res <- run reddit $ getUserComments username
res `shouldSatisfy` isRight
case res of
Left _ -> expectationFailure "something failed"
Right (Listing _ _ cs) ->
forM_ cs $ \c -> do
author c `shouldBe` username
replies c `shouldSatisfy` (\(Listing _ _ x) -> null x)
created c `shouldSatisfy` (< time)
it "should be able to get multiple pages of user comments" $ do
res <- run reddit $ getUserComments' (Options Nothing (Just 1)) username
res `shouldSatisfy` isRight
case res of
Right (Listing _ (Just a) (c:[])) -> do
next <- run reddit $ getUserComments' (Options (Just $ After a) (Just 1)) username
next `shouldSatisfy` isRight
case next of
Right (Listing _ _ (d:[])) ->
c `shouldSatisfy` (/= d)
_ -> expectationFailure "something failed"
_ -> expectationFailure "something failed"
it "should be able to get the user's about me info" $ do
res <- run reddit aboutMe
res `shouldSatisfy` isRight
case res of
Left _ -> expectationFailure "something failed"
Right user -> do
userName user `shouldBe` username
userCreated user `shouldSatisfy` (< time)
hasMail user `shouldSatisfy` isJust
isFriend user `shouldBe` False
it "should be able to get the user info for a user" $ do
res <- run reddit $ getUserInfo username
res `shouldSatisfy` isRight
it "should be able to check if a username is available" $ do
res <- run reddit $ isUsernameAvailable username
res `shouldSatisfy` isRight
case res of
Left _ -> return ()
Right avail -> avail `shouldBe` False
|
FranklinChen/reddit
|
test-io/Reddit/Actions/UserSpec.hs
|
bsd-2-clause
| 2,266 | 0 | 24 | 571 | 730 | 362 | 368 | 60 | 6 |
-- Error messages when you use 'forall' *without* the RankN flags
-- Test cases similar to #2114
module ShouldFail where
f :: forall a. a->a
f = error "ur"
g :: Int -> (forall a. a-> a) -> Int
g = error "ur"
data S = MkS (forall a. a->a)
-- This one complains about 'a' and 'forall' not in scope
-- because they aren't implicitly quantified,
-- whereas implicit quantification deals with the first two
|
sdiehl/ghc
|
testsuite/tests/rename/should_fail/rnfail052.hs
|
bsd-3-clause
| 413 | 0 | 9 | 87 | 85 | 50 | 35 | -1 | -1 |
module Turbinado.Environment.Request (
HTTP.Request(..),
addRequestToEnvironment,
) where
import qualified Network.HTTP as HTTP
import Network.URI
import Turbinado.Utility.General
import qualified Data.Map as M
import Control.Monad
import Control.Monad.State
import Data.Maybe
import Turbinado.Environment.Types
addRequestToEnvironment :: (HasEnvironment m) => HTTP.Request String -> m ()
addRequestToEnvironment req = do e <- getEnvironment
setEnvironment $ e {getRequest = Just $ req}
|
alsonkemp/turbinado-website
|
Turbinado/Environment/Request.hs
|
bsd-3-clause
| 548 | 0 | 10 | 115 | 133 | 79 | 54 | 14 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, ScopedTypeVariables,
MagicHash, BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.List
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- Operations on lists.
--
-----------------------------------------------------------------------------
module Data.OldList
(
-- * Basic functions
(++)
, head
, last
, tail
, init
, uncons
, null
, length
-- * List transformations
, map
, reverse
, intersperse
, intercalate
, transpose
, subsequences
, permutations
-- * Reducing lists (folds)
, foldl
, foldl'
, foldl1
, foldl1'
, foldr
, foldr1
-- ** Special folds
, concat
, concatMap
, and
, or
, any
, all
, sum
, product
, maximum
, minimum
-- * Building lists
-- ** Scans
, scanl
, scanl'
, scanl1
, scanr
, scanr1
-- ** Accumulating maps
, mapAccumL
, mapAccumR
-- ** Infinite lists
, iterate
, iterate'
, repeat
, replicate
, cycle
-- ** Unfolding
, unfoldr
-- * Sublists
-- ** Extracting sublists
, take
, drop
, splitAt
, takeWhile
, dropWhile
, dropWhileEnd
, span
, break
, stripPrefix
, group
, inits
, tails
-- ** Predicates
, isPrefixOf
, isSuffixOf
, isInfixOf
-- * Searching lists
-- ** Searching by equality
, elem
, notElem
, lookup
-- ** Searching with a predicate
, find
, filter
, partition
-- * Indexing lists
-- | These functions treat a list @xs@ as a indexed collection,
-- with indices ranging from 0 to @'length' xs - 1@.
, (!!)
, elemIndex
, elemIndices
, findIndex
, findIndices
-- * Zipping and unzipping lists
, zip
, zip3
, zip4, zip5, zip6, zip7
, zipWith
, zipWith3
, zipWith4, zipWith5, zipWith6, zipWith7
, unzip
, unzip3
, unzip4, unzip5, unzip6, unzip7
-- * Special lists
-- ** Functions on strings
, lines
, words
, unlines
, unwords
-- ** \"Set\" operations
, nub
, delete
, (\\)
, union
, intersect
-- ** Ordered lists
, sort
, sortOn
, insert
-- * Generalized functions
-- ** The \"@By@\" operations
-- | By convention, overloaded functions have a non-overloaded
-- counterpart whose name is suffixed with \`@By@\'.
--
-- It is often convenient to use these functions together with
-- 'Data.Function.on', for instance @'sortBy' ('compare'
-- \`on\` 'fst')@.
-- *** User-supplied equality (replacing an @Eq@ context)
-- | The predicate is assumed to define an equivalence.
, nubBy
, deleteBy
, deleteFirstsBy
, unionBy
, intersectBy
, groupBy
-- *** User-supplied comparison (replacing an @Ord@ context)
-- | The function is assumed to define a total ordering.
, sortBy
, insertBy
, maximumBy
, minimumBy
-- ** The \"@generic@\" operations
-- | The prefix \`@generic@\' indicates an overloaded function that
-- is a generalized version of a "Prelude" function.
, genericLength
, genericTake
, genericDrop
, genericSplitAt
, genericIndex
, genericReplicate
) where
import Data.Maybe
import Data.Bits ( (.&.) )
import Data.Char ( isSpace )
import Data.Ord ( comparing )
import Data.Tuple ( fst, snd )
import GHC.Num
import GHC.Real
import GHC.List
import GHC.Base
infix 5 \\ -- comment to fool cpp: https://www.haskell.org/ghc/docs/latest/html/users_guide/options-phases.html#cpp-string-gaps
-- -----------------------------------------------------------------------------
-- List functions
-- | The 'dropWhileEnd' function drops the largest suffix of a list
-- in which the given predicate holds for all elements. For example:
--
-- >>> dropWhileEnd isSpace "foo\n"
-- "foo"
--
-- >>> dropWhileEnd isSpace "foo bar"
-- "foo bar"
--
-- > dropWhileEnd isSpace ("foo\n" ++ undefined) == "foo" ++ undefined
--
-- @since 4.5.0.0
dropWhileEnd :: (a -> Bool) -> [a] -> [a]
dropWhileEnd p = foldr (\x xs -> if p x && null xs then [] else x : xs) []
-- | The 'stripPrefix' function drops the given prefix from a list.
-- It returns 'Nothing' if the list did not start with the prefix
-- given, or 'Just' the list after the prefix, if it does.
--
-- >>> stripPrefix "foo" "foobar"
-- Just "bar"
--
-- >>> stripPrefix "foo" "foo"
-- Just ""
--
-- >>> stripPrefix "foo" "barfoo"
-- Nothing
--
-- >>> stripPrefix "foo" "barfoobaz"
-- Nothing
stripPrefix :: Eq a => [a] -> [a] -> Maybe [a]
stripPrefix [] ys = Just ys
stripPrefix (x:xs) (y:ys)
| x == y = stripPrefix xs ys
stripPrefix _ _ = Nothing
-- | The 'elemIndex' function returns the index of the first element
-- in the given list which is equal (by '==') to the query element,
-- or 'Nothing' if there is no such element.
--
-- >>> elemIndex 4 [0..]
-- Just 4
elemIndex :: Eq a => a -> [a] -> Maybe Int
elemIndex x = findIndex (x==)
-- | The 'elemIndices' function extends 'elemIndex', by returning the
-- indices of all elements equal to the query element, in ascending order.
--
-- >>> elemIndices 'o' "Hello World"
-- [4,7]
elemIndices :: Eq a => a -> [a] -> [Int]
elemIndices x = findIndices (x==)
-- | The 'find' function takes a predicate and a list and returns the
-- first element in the list matching the predicate, or 'Nothing' if
-- there is no such element.
--
-- >>> find (> 4) [1..]
-- Just 5
--
-- >>> find (< 0) [1..10]
-- Nothing
find :: (a -> Bool) -> [a] -> Maybe a
find p = listToMaybe . filter p
-- | The 'findIndex' function takes a predicate and a list and returns
-- the index of the first element in the list satisfying the predicate,
-- or 'Nothing' if there is no such element.
--
-- >>> findIndex isSpace "Hello World!"
-- Just 5
findIndex :: (a -> Bool) -> [a] -> Maybe Int
findIndex p = listToMaybe . findIndices p
-- | The 'findIndices' function extends 'findIndex', by returning the
-- indices of all elements satisfying the predicate, in ascending order.
--
-- >>> findIndices (`elem` "aeiou") "Hello World!"
-- [1,4,7]
findIndices :: (a -> Bool) -> [a] -> [Int]
#if defined(USE_REPORT_PRELUDE)
findIndices p xs = [ i | (x,i) <- zip xs [0..], p x]
#else
-- Efficient definition, adapted from Data.Sequence
{-# INLINE findIndices #-}
findIndices p ls = build $ \c n ->
let go x r k | p x = I# k `c` r (k +# 1#)
| otherwise = r (k +# 1#)
in foldr go (\_ -> n) ls 0#
#endif /* USE_REPORT_PRELUDE */
-- | The 'isPrefixOf' function takes two lists and returns 'True'
-- iff the first list is a prefix of the second.
--
-- >>> "Hello" `isPrefixOf` "Hello World!"
-- True
--
-- >>> "Hello" `isPrefixOf` "Wello Horld!"
-- False
isPrefixOf :: (Eq a) => [a] -> [a] -> Bool
isPrefixOf [] _ = True
isPrefixOf _ [] = False
isPrefixOf (x:xs) (y:ys)= x == y && isPrefixOf xs ys
-- | The 'isSuffixOf' function takes two lists and returns 'True' iff
-- the first list is a suffix of the second. The second list must be
-- finite.
--
-- >>> "ld!" `isSuffixOf` "Hello World!"
-- True
--
-- >>> "World" `isSuffixOf` "Hello World!"
-- False
isSuffixOf :: (Eq a) => [a] -> [a] -> Bool
ns `isSuffixOf` hs = maybe False id $ do
delta <- dropLengthMaybe ns hs
return $ ns == dropLength delta hs
-- Since dropLengthMaybe ns hs succeeded, we know that (if hs is finite)
-- length ns + length delta = length hs
-- so dropping the length of delta from hs will yield a suffix exactly
-- the length of ns.
-- A version of drop that drops the length of the first argument from the
-- second argument. If xs is longer than ys, xs will not be traversed in its
-- entirety. dropLength is also generally faster than (drop . length)
-- Both this and dropLengthMaybe could be written as folds over their first
-- arguments, but this reduces clarity with no benefit to isSuffixOf.
--
-- >>> dropLength "Hello" "Holla world"
-- " world"
--
-- >>> dropLength [1..] [1,2,3]
-- []
dropLength :: [a] -> [b] -> [b]
dropLength [] y = y
dropLength _ [] = []
dropLength (_:x') (_:y') = dropLength x' y'
-- A version of dropLength that returns Nothing if the second list runs out of
-- elements before the first.
--
-- >>> dropLengthMaybe [1..] [1,2,3]
-- Nothing
dropLengthMaybe :: [a] -> [b] -> Maybe [b]
dropLengthMaybe [] y = Just y
dropLengthMaybe _ [] = Nothing
dropLengthMaybe (_:x') (_:y') = dropLengthMaybe x' y'
-- | The 'isInfixOf' function takes two lists and returns 'True'
-- iff the first list is contained, wholly and intact,
-- anywhere within the second.
--
-- >>> isInfixOf "Haskell" "I really like Haskell."
-- True
--
-- >>> isInfixOf "Ial" "I really like Haskell."
-- False
isInfixOf :: (Eq a) => [a] -> [a] -> Bool
isInfixOf needle haystack = any (isPrefixOf needle) (tails haystack)
-- | /O(n^2)/. The 'nub' function removes duplicate elements from a list.
-- In particular, it keeps only the first occurrence of each element.
-- (The name 'nub' means \`essence\'.)
-- It is a special case of 'nubBy', which allows the programmer to supply
-- their own equality test.
--
-- >>> nub [1,2,3,4,3,2,1,2,4,3,5]
-- [1,2,3,4,5]
nub :: (Eq a) => [a] -> [a]
nub = nubBy (==)
-- | The 'nubBy' function behaves just like 'nub', except it uses a
-- user-supplied equality predicate instead of the overloaded '=='
-- function.
--
-- >>> nubBy (\x y -> mod x 3 == mod y 3) [1,2,4,5,6]
-- [1,2,6]
nubBy :: (a -> a -> Bool) -> [a] -> [a]
#if defined(USE_REPORT_PRELUDE)
nubBy eq [] = []
nubBy eq (x:xs) = x : nubBy eq (filter (\ y -> not (eq x y)) xs)
#else
-- stolen from HBC
nubBy eq l = nubBy' l []
where
nubBy' [] _ = []
nubBy' (y:ys) xs
| elem_by eq y xs = nubBy' ys xs
| otherwise = y : nubBy' ys (y:xs)
-- Not exported:
-- Note that we keep the call to `eq` with arguments in the
-- same order as in the reference (prelude) implementation,
-- and that this order is different from how `elem` calls (==).
-- See #2528, #3280 and #7913.
-- 'xs' is the list of things we've seen so far,
-- 'y' is the potential new element
elem_by :: (a -> a -> Bool) -> a -> [a] -> Bool
elem_by _ _ [] = False
elem_by eq y (x:xs) = x `eq` y || elem_by eq y xs
#endif
-- | 'delete' @x@ removes the first occurrence of @x@ from its list argument.
-- For example,
--
-- >>> delete 'a' "banana"
-- "bnana"
--
-- It is a special case of 'deleteBy', which allows the programmer to
-- supply their own equality test.
delete :: (Eq a) => a -> [a] -> [a]
delete = deleteBy (==)
-- | The 'deleteBy' function behaves like 'delete', but takes a
-- user-supplied equality predicate.
--
-- >>> deleteBy (<=) 4 [1..10]
-- [1,2,3,5,6,7,8,9,10]
deleteBy :: (a -> a -> Bool) -> a -> [a] -> [a]
deleteBy _ _ [] = []
deleteBy eq x (y:ys) = if x `eq` y then ys else y : deleteBy eq x ys
-- | The '\\' function is list difference (non-associative).
-- In the result of @xs@ '\\' @ys@, the first occurrence of each element of
-- @ys@ in turn (if any) has been removed from @xs@. Thus
--
-- > (xs ++ ys) \\ xs == ys.
--
-- >>> "Hello World!" \\ "ell W"
-- "Hoorld!"
--
-- It is a special case of 'deleteFirstsBy', which allows the programmer
-- to supply their own equality test.
(\\) :: (Eq a) => [a] -> [a] -> [a]
(\\) = foldl (flip delete)
-- | The 'union' function returns the list union of the two lists.
-- For example,
--
-- >>> "dog" `union` "cow"
-- "dogcw"
--
-- Duplicates, and elements of the first list, are removed from the
-- the second list, but if the first list contains duplicates, so will
-- the result.
-- It is a special case of 'unionBy', which allows the programmer to supply
-- their own equality test.
union :: (Eq a) => [a] -> [a] -> [a]
union = unionBy (==)
-- | The 'unionBy' function is the non-overloaded version of 'union'.
unionBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
unionBy eq xs ys = xs ++ foldl (flip (deleteBy eq)) (nubBy eq ys) xs
-- | The 'intersect' function takes the list intersection of two lists.
-- For example,
--
-- >>> [1,2,3,4] `intersect` [2,4,6,8]
-- [2,4]
--
-- If the first list contains duplicates, so will the result.
--
-- >>> [1,2,2,3,4] `intersect` [6,4,4,2]
-- [2,2,4]
--
-- It is a special case of 'intersectBy', which allows the programmer to
-- supply their own equality test. If the element is found in both the first
-- and the second list, the element from the first list will be used.
intersect :: (Eq a) => [a] -> [a] -> [a]
intersect = intersectBy (==)
-- | The 'intersectBy' function is the non-overloaded version of 'intersect'.
intersectBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
intersectBy _ [] _ = []
intersectBy _ _ [] = []
intersectBy eq xs ys = [x | x <- xs, any (eq x) ys]
-- | The 'intersperse' function takes an element and a list and
-- \`intersperses\' that element between the elements of the list.
-- For example,
--
-- >>> intersperse ',' "abcde"
-- "a,b,c,d,e"
intersperse :: a -> [a] -> [a]
intersperse _ [] = []
intersperse sep (x:xs) = x : prependToAll sep xs
-- Not exported:
-- We want to make every element in the 'intersperse'd list available
-- as soon as possible to avoid space leaks. Experiments suggested that
-- a separate top-level helper is more efficient than a local worker.
prependToAll :: a -> [a] -> [a]
prependToAll _ [] = []
prependToAll sep (x:xs) = sep : x : prependToAll sep xs
-- | 'intercalate' @xs xss@ is equivalent to @('concat' ('intersperse' xs xss))@.
-- It inserts the list @xs@ in between the lists in @xss@ and concatenates the
-- result.
--
-- >>> intercalate ", " ["Lorem", "ipsum", "dolor"]
-- "Lorem, ipsum, dolor"
intercalate :: [a] -> [[a]] -> [a]
intercalate xs xss = concat (intersperse xs xss)
-- | The 'transpose' function transposes the rows and columns of its argument.
-- For example,
--
-- >>> transpose [[1,2,3],[4,5,6]]
-- [[1,4],[2,5],[3,6]]
--
-- If some of the rows are shorter than the following rows, their elements are skipped:
--
-- >>> transpose [[10,11],[20],[],[30,31,32]]
-- [[10,20,30],[11,31],[32]]
transpose :: [[a]] -> [[a]]
transpose [] = []
transpose ([] : xss) = transpose xss
transpose ((x:xs) : xss) = (x : [h | (h:_) <- xss]) : transpose (xs : [ t | (_:t) <- xss])
-- | The 'partition' function takes a predicate a list and returns
-- the pair of lists of elements which do and do not satisfy the
-- predicate, respectively; i.e.,
--
-- > partition p xs == (filter p xs, filter (not . p) xs)
--
-- >>> partition (`elem` "aeiou") "Hello World!"
-- ("eoo","Hll Wrld!")
partition :: (a -> Bool) -> [a] -> ([a],[a])
{-# INLINE partition #-}
partition p xs = foldr (select p) ([],[]) xs
select :: (a -> Bool) -> a -> ([a], [a]) -> ([a], [a])
select p x ~(ts,fs) | p x = (x:ts,fs)
| otherwise = (ts, x:fs)
-- | The 'mapAccumL' function behaves like a combination of 'map' and
-- 'foldl'; it applies a function to each element of a list, passing
-- an accumulating parameter from left to right, and returning a final
-- value of this accumulator together with the new list.
mapAccumL :: (acc -> x -> (acc, y)) -- Function of elt of input list
-- and accumulator, returning new
-- accumulator and elt of result list
-> acc -- Initial accumulator
-> [x] -- Input list
-> (acc, [y]) -- Final accumulator and result list
{-# NOINLINE [1] mapAccumL #-}
mapAccumL _ s [] = (s, [])
mapAccumL f s (x:xs) = (s'',y:ys)
where (s', y ) = f s x
(s'',ys) = mapAccumL f s' xs
{-# RULES
"mapAccumL" [~1] forall f s xs . mapAccumL f s xs = foldr (mapAccumLF f) pairWithNil xs s
"mapAccumLList" [1] forall f s xs . foldr (mapAccumLF f) pairWithNil xs s = mapAccumL f s xs
#-}
pairWithNil :: acc -> (acc, [y])
{-# INLINE [0] pairWithNil #-}
pairWithNil x = (x, [])
mapAccumLF :: (acc -> x -> (acc, y)) -> x -> (acc -> (acc, [y])) -> acc -> (acc, [y])
{-# INLINE [0] mapAccumLF #-}
mapAccumLF f = \x r -> oneShot (\s ->
let (s', y) = f s x
(s'', ys) = r s'
in (s'', y:ys))
-- See Note [Left folds via right fold]
-- | The 'mapAccumR' function behaves like a combination of 'map' and
-- 'foldr'; it applies a function to each element of a list, passing
-- an accumulating parameter from right to left, and returning a final
-- value of this accumulator together with the new list.
mapAccumR :: (acc -> x -> (acc, y)) -- Function of elt of input list
-- and accumulator, returning new
-- accumulator and elt of result list
-> acc -- Initial accumulator
-> [x] -- Input list
-> (acc, [y]) -- Final accumulator and result list
mapAccumR _ s [] = (s, [])
mapAccumR f s (x:xs) = (s'', y:ys)
where (s'',y ) = f s' x
(s', ys) = mapAccumR f s xs
-- | The 'insert' function takes an element and a list and inserts the
-- element into the list at the first position where it is less
-- than or equal to the next element. In particular, if the list
-- is sorted before the call, the result will also be sorted.
-- It is a special case of 'insertBy', which allows the programmer to
-- supply their own comparison function.
--
-- >>> insert 4 [1,2,3,5,6,7]
-- [1,2,3,4,5,6,7]
insert :: Ord a => a -> [a] -> [a]
insert e ls = insertBy (compare) e ls
-- | The non-overloaded version of 'insert'.
insertBy :: (a -> a -> Ordering) -> a -> [a] -> [a]
insertBy _ x [] = [x]
insertBy cmp x ys@(y:ys')
= case cmp x y of
GT -> y : insertBy cmp x ys'
_ -> x : ys
-- | The 'maximumBy' function takes a comparison function and a list
-- and returns the greatest element of the list by the comparison function.
-- The list must be finite and non-empty.
--
-- We can use this to find the longest entry of a list:
--
-- >>> maximumBy (\x y -> compare (length x) (length y)) ["Hello", "World", "!", "Longest", "bar"]
-- "Longest"
maximumBy :: (a -> a -> Ordering) -> [a] -> a
maximumBy _ [] = errorWithoutStackTrace "List.maximumBy: empty list"
maximumBy cmp xs = foldl1 maxBy xs
where
maxBy x y = case cmp x y of
GT -> x
_ -> y
-- | The 'minimumBy' function takes a comparison function and a list
-- and returns the least element of the list by the comparison function.
-- The list must be finite and non-empty.
--
-- We can use this to find the shortest entry of a list:
--
-- >>> minimumBy (\x y -> compare (length x) (length y)) ["Hello", "World", "!", "Longest", "bar"]
-- "!"
minimumBy :: (a -> a -> Ordering) -> [a] -> a
minimumBy _ [] = errorWithoutStackTrace "List.minimumBy: empty list"
minimumBy cmp xs = foldl1 minBy xs
where
minBy x y = case cmp x y of
GT -> y
_ -> x
-- | The 'genericLength' function is an overloaded version of 'length'. In
-- particular, instead of returning an 'Int', it returns any type which is
-- an instance of 'Num'. It is, however, less efficient than 'length'.
genericLength :: (Num i) => [a] -> i
{-# NOINLINE [1] genericLength #-}
genericLength [] = 0
genericLength (_:l) = 1 + genericLength l
{-# RULES
"genericLengthInt" genericLength = (strictGenericLength :: [a] -> Int);
"genericLengthInteger" genericLength = (strictGenericLength :: [a] -> Integer);
#-}
strictGenericLength :: (Num i) => [b] -> i
strictGenericLength l = gl l 0
where
gl [] a = a
gl (_:xs) a = let a' = a + 1 in a' `seq` gl xs a'
-- | The 'genericTake' function is an overloaded version of 'take', which
-- accepts any 'Integral' value as the number of elements to take.
genericTake :: (Integral i) => i -> [a] -> [a]
genericTake n _ | n <= 0 = []
genericTake _ [] = []
genericTake n (x:xs) = x : genericTake (n-1) xs
-- | The 'genericDrop' function is an overloaded version of 'drop', which
-- accepts any 'Integral' value as the number of elements to drop.
genericDrop :: (Integral i) => i -> [a] -> [a]
genericDrop n xs | n <= 0 = xs
genericDrop _ [] = []
genericDrop n (_:xs) = genericDrop (n-1) xs
-- | The 'genericSplitAt' function is an overloaded version of 'splitAt', which
-- accepts any 'Integral' value as the position at which to split.
genericSplitAt :: (Integral i) => i -> [a] -> ([a], [a])
genericSplitAt n xs | n <= 0 = ([],xs)
genericSplitAt _ [] = ([],[])
genericSplitAt n (x:xs) = (x:xs',xs'') where
(xs',xs'') = genericSplitAt (n-1) xs
-- | The 'genericIndex' function is an overloaded version of '!!', which
-- accepts any 'Integral' value as the index.
genericIndex :: (Integral i) => [a] -> i -> a
genericIndex (x:_) 0 = x
genericIndex (_:xs) n
| n > 0 = genericIndex xs (n-1)
| otherwise = errorWithoutStackTrace "List.genericIndex: negative argument."
genericIndex _ _ = errorWithoutStackTrace "List.genericIndex: index too large."
-- | The 'genericReplicate' function is an overloaded version of 'replicate',
-- which accepts any 'Integral' value as the number of repetitions to make.
genericReplicate :: (Integral i) => i -> a -> [a]
genericReplicate n x = genericTake n (repeat x)
-- | The 'zip4' function takes four lists and returns a list of
-- quadruples, analogous to 'zip'.
zip4 :: [a] -> [b] -> [c] -> [d] -> [(a,b,c,d)]
zip4 = zipWith4 (,,,)
-- | The 'zip5' function takes five lists and returns a list of
-- five-tuples, analogous to 'zip'.
zip5 :: [a] -> [b] -> [c] -> [d] -> [e] -> [(a,b,c,d,e)]
zip5 = zipWith5 (,,,,)
-- | The 'zip6' function takes six lists and returns a list of six-tuples,
-- analogous to 'zip'.
zip6 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] ->
[(a,b,c,d,e,f)]
zip6 = zipWith6 (,,,,,)
-- | The 'zip7' function takes seven lists and returns a list of
-- seven-tuples, analogous to 'zip'.
zip7 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] ->
[g] -> [(a,b,c,d,e,f,g)]
zip7 = zipWith7 (,,,,,,)
-- | The 'zipWith4' function takes a function which combines four
-- elements, as well as four lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith4 :: (a->b->c->d->e) -> [a]->[b]->[c]->[d]->[e]
zipWith4 z (a:as) (b:bs) (c:cs) (d:ds)
= z a b c d : zipWith4 z as bs cs ds
zipWith4 _ _ _ _ _ = []
-- | The 'zipWith5' function takes a function which combines five
-- elements, as well as five lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith5 :: (a->b->c->d->e->f) ->
[a]->[b]->[c]->[d]->[e]->[f]
zipWith5 z (a:as) (b:bs) (c:cs) (d:ds) (e:es)
= z a b c d e : zipWith5 z as bs cs ds es
zipWith5 _ _ _ _ _ _ = []
-- | The 'zipWith6' function takes a function which combines six
-- elements, as well as six lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith6 :: (a->b->c->d->e->f->g) ->
[a]->[b]->[c]->[d]->[e]->[f]->[g]
zipWith6 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs)
= z a b c d e f : zipWith6 z as bs cs ds es fs
zipWith6 _ _ _ _ _ _ _ = []
-- | The 'zipWith7' function takes a function which combines seven
-- elements, as well as seven lists and returns a list of their point-wise
-- combination, analogous to 'zipWith'.
zipWith7 :: (a->b->c->d->e->f->g->h) ->
[a]->[b]->[c]->[d]->[e]->[f]->[g]->[h]
zipWith7 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs) (g:gs)
= z a b c d e f g : zipWith7 z as bs cs ds es fs gs
zipWith7 _ _ _ _ _ _ _ _ = []
-- | The 'unzip4' function takes a list of quadruples and returns four
-- lists, analogous to 'unzip'.
unzip4 :: [(a,b,c,d)] -> ([a],[b],[c],[d])
unzip4 = foldr (\(a,b,c,d) ~(as,bs,cs,ds) ->
(a:as,b:bs,c:cs,d:ds))
([],[],[],[])
-- | The 'unzip5' function takes a list of five-tuples and returns five
-- lists, analogous to 'unzip'.
unzip5 :: [(a,b,c,d,e)] -> ([a],[b],[c],[d],[e])
unzip5 = foldr (\(a,b,c,d,e) ~(as,bs,cs,ds,es) ->
(a:as,b:bs,c:cs,d:ds,e:es))
([],[],[],[],[])
-- | The 'unzip6' function takes a list of six-tuples and returns six
-- lists, analogous to 'unzip'.
unzip6 :: [(a,b,c,d,e,f)] -> ([a],[b],[c],[d],[e],[f])
unzip6 = foldr (\(a,b,c,d,e,f) ~(as,bs,cs,ds,es,fs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs))
([],[],[],[],[],[])
-- | The 'unzip7' function takes a list of seven-tuples and returns
-- seven lists, analogous to 'unzip'.
unzip7 :: [(a,b,c,d,e,f,g)] -> ([a],[b],[c],[d],[e],[f],[g])
unzip7 = foldr (\(a,b,c,d,e,f,g) ~(as,bs,cs,ds,es,fs,gs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs,g:gs))
([],[],[],[],[],[],[])
-- | The 'deleteFirstsBy' function takes a predicate and two lists and
-- returns the first list with the first occurrence of each element of
-- the second list removed.
deleteFirstsBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
deleteFirstsBy eq = foldl (flip (deleteBy eq))
-- | The 'group' function takes a list and returns a list of lists such
-- that the concatenation of the result is equal to the argument. Moreover,
-- each sublist in the result contains only equal elements. For example,
--
-- >>> group "Mississippi"
-- ["M","i","ss","i","ss","i","pp","i"]
--
-- It is a special case of 'groupBy', which allows the programmer to supply
-- their own equality test.
group :: Eq a => [a] -> [[a]]
group = groupBy (==)
-- | The 'groupBy' function is the non-overloaded version of 'group'.
groupBy :: (a -> a -> Bool) -> [a] -> [[a]]
groupBy _ [] = []
groupBy eq (x:xs) = (x:ys) : groupBy eq zs
where (ys,zs) = span (eq x) xs
-- | The 'inits' function returns all initial segments of the argument,
-- shortest first. For example,
--
-- >>> inits "abc"
-- ["","a","ab","abc"]
--
-- Note that 'inits' has the following strictness property:
-- @inits (xs ++ _|_) = inits xs ++ _|_@
--
-- In particular,
-- @inits _|_ = [] : _|_@
inits :: [a] -> [[a]]
inits = map toListSB . scanl' snocSB emptySB
{-# NOINLINE inits #-}
-- We do not allow inits to inline, because it plays havoc with Call Arity
-- if it fuses with a consumer, and it would generally lead to serious
-- loss of sharing if allowed to fuse with a producer.
-- | The 'tails' function returns all final segments of the argument,
-- longest first. For example,
--
-- >>> tails "abc"
-- ["abc","bc","c",""]
--
-- Note that 'tails' has the following strictness property:
-- @tails _|_ = _|_ : _|_@
tails :: [a] -> [[a]]
{-# INLINABLE tails #-}
tails lst = build (\c n ->
let tailsGo xs = xs `c` case xs of
[] -> n
_ : xs' -> tailsGo xs'
in tailsGo lst)
-- | The 'subsequences' function returns the list of all subsequences of the argument.
--
-- >>> subsequences "abc"
-- ["","a","b","ab","c","ac","bc","abc"]
subsequences :: [a] -> [[a]]
subsequences xs = [] : nonEmptySubsequences xs
-- | The 'nonEmptySubsequences' function returns the list of all subsequences of the argument,
-- except for the empty list.
--
-- >>> nonEmptySubsequences "abc"
-- ["a","b","ab","c","ac","bc","abc"]
nonEmptySubsequences :: [a] -> [[a]]
nonEmptySubsequences [] = []
nonEmptySubsequences (x:xs) = [x] : foldr f [] (nonEmptySubsequences xs)
where f ys r = ys : (x : ys) : r
-- | The 'permutations' function returns the list of all permutations of the argument.
--
-- >>> permutations "abc"
-- ["abc","bac","cba","bca","cab","acb"]
permutations :: [a] -> [[a]]
permutations xs0 = xs0 : perms xs0 []
where
perms [] _ = []
perms (t:ts) is = foldr interleave (perms ts (t:is)) (permutations is)
where interleave xs r = let (_,zs) = interleave' id xs r in zs
interleave' _ [] r = (ts, r)
interleave' f (y:ys) r = let (us,zs) = interleave' (f . (y:)) ys r
in (y:us, f (t:y:us) : zs)
------------------------------------------------------------------------------
-- Quick Sort algorithm taken from HBC's QSort library.
-- | The 'sort' function implements a stable sorting algorithm.
-- It is a special case of 'sortBy', which allows the programmer to supply
-- their own comparison function.
--
-- Elements are arranged from from lowest to highest, keeping duplicates in
-- the order they appeared in the input.
--
-- >>> sort [1,6,4,3,2,5]
-- [1,2,3,4,5,6]
sort :: (Ord a) => [a] -> [a]
-- | The 'sortBy' function is the non-overloaded version of 'sort'.
--
-- >>> sortBy (\(a,_) (b,_) -> compare a b) [(2, "world"), (4, "!"), (1, "Hello")]
-- [(1,"Hello"),(2,"world"),(4,"!")]
sortBy :: (a -> a -> Ordering) -> [a] -> [a]
#if defined(USE_REPORT_PRELUDE)
sort = sortBy compare
sortBy cmp = foldr (insertBy cmp) []
#else
{-
GHC's mergesort replaced by a better implementation, 24/12/2009.
This code originally contributed to the nhc12 compiler by Thomas Nordin
in 2002. Rumoured to have been based on code by Lennart Augustsson, e.g.
http://www.mail-archive.com/[email protected]/msg01822.html
and possibly to bear similarities to a 1982 paper by Richard O'Keefe:
"A smooth applicative merge sort".
Benchmarks show it to be often 2x the speed of the previous implementation.
Fixes ticket http://ghc.haskell.org/trac/ghc/ticket/2143
-}
sort = sortBy compare
sortBy cmp = mergeAll . sequences
where
sequences (a:b:xs)
| a `cmp` b == GT = descending b [a] xs
| otherwise = ascending b (a:) xs
sequences xs = [xs]
descending a as (b:bs)
| a `cmp` b == GT = descending b (a:as) bs
descending a as bs = (a:as): sequences bs
ascending a as (b:bs)
| a `cmp` b /= GT = ascending b (\ys -> as (a:ys)) bs
ascending a as bs = let !x = as [a]
in x : sequences bs
mergeAll [x] = x
mergeAll xs = mergeAll (mergePairs xs)
mergePairs (a:b:xs) = let !x = merge a b
in x : mergePairs xs
mergePairs xs = xs
merge as@(a:as') bs@(b:bs')
| a `cmp` b == GT = b:merge as bs'
| otherwise = a:merge as' bs
merge [] bs = bs
merge as [] = as
{-
sortBy cmp l = mergesort cmp l
sort l = mergesort compare l
Quicksort replaced by mergesort, 14/5/2002.
From: Ian Lynagh <[email protected]>
I am curious as to why the List.sort implementation in GHC is a
quicksort algorithm rather than an algorithm that guarantees n log n
time in the worst case? I have attached a mergesort implementation along
with a few scripts to time it's performance, the results of which are
shown below (* means it didn't finish successfully - in all cases this
was due to a stack overflow).
If I heap profile the random_list case with only 10000 then I see
random_list peaks at using about 2.5M of memory, whereas in the same
program using List.sort it uses only 100k.
Input style Input length Sort data Sort alg User time
stdin 10000 random_list sort 2.82
stdin 10000 random_list mergesort 2.96
stdin 10000 sorted sort 31.37
stdin 10000 sorted mergesort 1.90
stdin 10000 revsorted sort 31.21
stdin 10000 revsorted mergesort 1.88
stdin 100000 random_list sort *
stdin 100000 random_list mergesort *
stdin 100000 sorted sort *
stdin 100000 sorted mergesort *
stdin 100000 revsorted sort *
stdin 100000 revsorted mergesort *
func 10000 random_list sort 0.31
func 10000 random_list mergesort 0.91
func 10000 sorted sort 19.09
func 10000 sorted mergesort 0.15
func 10000 revsorted sort 19.17
func 10000 revsorted mergesort 0.16
func 100000 random_list sort 3.85
func 100000 random_list mergesort *
func 100000 sorted sort 5831.47
func 100000 sorted mergesort 2.23
func 100000 revsorted sort 5872.34
func 100000 revsorted mergesort 2.24
mergesort :: (a -> a -> Ordering) -> [a] -> [a]
mergesort cmp = mergesort' cmp . map wrap
mergesort' :: (a -> a -> Ordering) -> [[a]] -> [a]
mergesort' _ [] = []
mergesort' _ [xs] = xs
mergesort' cmp xss = mergesort' cmp (merge_pairs cmp xss)
merge_pairs :: (a -> a -> Ordering) -> [[a]] -> [[a]]
merge_pairs _ [] = []
merge_pairs _ [xs] = [xs]
merge_pairs cmp (xs:ys:xss) = merge cmp xs ys : merge_pairs cmp xss
merge :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
merge _ [] ys = ys
merge _ xs [] = xs
merge cmp (x:xs) (y:ys)
= case x `cmp` y of
GT -> y : merge cmp (x:xs) ys
_ -> x : merge cmp xs (y:ys)
wrap :: a -> [a]
wrap x = [x]
OLDER: qsort version
-- qsort is stable and does not concatenate.
qsort :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
qsort _ [] r = r
qsort _ [x] r = x:r
qsort cmp (x:xs) r = qpart cmp x xs [] [] r
-- qpart partitions and sorts the sublists
qpart :: (a -> a -> Ordering) -> a -> [a] -> [a] -> [a] -> [a] -> [a]
qpart cmp x [] rlt rge r =
-- rlt and rge are in reverse order and must be sorted with an
-- anti-stable sorting
rqsort cmp rlt (x:rqsort cmp rge r)
qpart cmp x (y:ys) rlt rge r =
case cmp x y of
GT -> qpart cmp x ys (y:rlt) rge r
_ -> qpart cmp x ys rlt (y:rge) r
-- rqsort is as qsort but anti-stable, i.e. reverses equal elements
rqsort :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
rqsort _ [] r = r
rqsort _ [x] r = x:r
rqsort cmp (x:xs) r = rqpart cmp x xs [] [] r
rqpart :: (a -> a -> Ordering) -> a -> [a] -> [a] -> [a] -> [a] -> [a]
rqpart cmp x [] rle rgt r =
qsort cmp rle (x:qsort cmp rgt r)
rqpart cmp x (y:ys) rle rgt r =
case cmp y x of
GT -> rqpart cmp x ys rle (y:rgt) r
_ -> rqpart cmp x ys (y:rle) rgt r
-}
#endif /* USE_REPORT_PRELUDE */
-- | Sort a list by comparing the results of a key function applied to each
-- element. @sortOn f@ is equivalent to @sortBy (comparing f)@, but has the
-- performance advantage of only evaluating @f@ once for each element in the
-- input list. This is called the decorate-sort-undecorate paradigm, or
-- Schwartzian transform.
--
-- Elements are arranged from from lowest to highest, keeping duplicates in
-- the order they appeared in the input.
--
-- >>> sortOn fst [(2, "world"), (4, "!"), (1, "Hello")]
-- [(1,"Hello"),(2,"world"),(4,"!")]
--
-- @since 4.8.0.0
sortOn :: Ord b => (a -> b) -> [a] -> [a]
sortOn f =
map snd . sortBy (comparing fst) . map (\x -> let y = f x in y `seq` (y, x))
-- | The 'unfoldr' function is a \`dual\' to 'foldr': while 'foldr'
-- reduces a list to a summary value, 'unfoldr' builds a list from
-- a seed value. The function takes the element and returns 'Nothing'
-- if it is done producing the list or returns 'Just' @(a,b)@, in which
-- case, @a@ is a prepended to the list and @b@ is used as the next
-- element in a recursive call. For example,
--
-- > iterate f == unfoldr (\x -> Just (x, f x))
--
-- In some cases, 'unfoldr' can undo a 'foldr' operation:
--
-- > unfoldr f' (foldr f z xs) == xs
--
-- if the following holds:
--
-- > f' (f x y) = Just (x,y)
-- > f' z = Nothing
--
-- A simple use of unfoldr:
--
-- >>> unfoldr (\b -> if b == 0 then Nothing else Just (b, b-1)) 10
-- [10,9,8,7,6,5,4,3,2,1]
--
-- Note [INLINE unfoldr]
-- We treat unfoldr a little differently from some other forms for list fusion
-- for two reasons:
--
-- 1. We don't want to use a rule to rewrite a basic form to a fusible
-- form because this would inline before constant floating. As Simon Peyton-
-- Jones and others have pointed out, this could reduce sharing in some cases
-- where sharing is beneficial. Thus we simply INLINE it, which is, for
-- example, how enumFromTo::Int becomes eftInt. Unfortunately, we don't seem
-- to get enough of an inlining discount to get a version of eftInt based on
-- unfoldr to inline as readily as the usual one. We know that all the Maybe
-- nonsense will go away, but the compiler does not.
--
-- 2. The benefit of inlining unfoldr is likely to be huge in many common cases,
-- even apart from list fusion. In particular, inlining unfoldr often
-- allows GHC to erase all the Maybes. This appears to be critical if unfoldr
-- is to be used in high-performance code. A small increase in code size
-- in the relatively rare cases when this does not happen looks like a very
-- small price to pay.
--
-- Doing a back-and-forth dance doesn't seem to accomplish anything if the
-- final form has to be inlined in any case.
unfoldr :: (b -> Maybe (a, b)) -> b -> [a]
{-# INLINE unfoldr #-} -- See Note [INLINE unfoldr]
unfoldr f b0 = build (\c n ->
let go b = case f b of
Just (a, new_b) -> a `c` go new_b
Nothing -> n
in go b0)
-- -----------------------------------------------------------------------------
-- Functions on strings
-- | 'lines' breaks a string up into a list of strings at newline
-- characters. The resulting strings do not contain newlines.
--
-- Note that after splitting the string at newline characters, the
-- last part of the string is considered a line even if it doesn't end
-- with a newline. For example,
--
-- >>> lines ""
-- []
--
-- >>> lines "\n"
-- [""]
--
-- >>> lines "one"
-- ["one"]
--
-- >>> lines "one\n"
-- ["one"]
--
-- >>> lines "one\n\n"
-- ["one",""]
--
-- >>> lines "one\ntwo"
-- ["one","two"]
--
-- >>> lines "one\ntwo\n"
-- ["one","two"]
--
-- Thus @'lines' s@ contains at least as many elements as newlines in @s@.
lines :: String -> [String]
lines "" = []
-- Somehow GHC doesn't detect the selector thunks in the below code,
-- so s' keeps a reference to the first line via the pair and we have
-- a space leak (cf. #4334).
-- So we need to make GHC see the selector thunks with a trick.
lines s = cons (case break (== '\n') s of
(l, s') -> (l, case s' of
[] -> []
_:s'' -> lines s''))
where
cons ~(h, t) = h : t
-- | 'unlines' is an inverse operation to 'lines'.
-- It joins lines, after appending a terminating newline to each.
--
-- >>> unlines ["Hello", "World", "!"]
-- "Hello\nWorld\n!\n"
unlines :: [String] -> String
#if defined(USE_REPORT_PRELUDE)
unlines = concatMap (++ "\n")
#else
-- HBC version (stolen)
-- here's a more efficient version
unlines [] = []
unlines (l:ls) = l ++ '\n' : unlines ls
#endif
-- | 'words' breaks a string up into a list of words, which were delimited
-- by white space.
--
-- >>> words "Lorem ipsum\ndolor"
-- ["Lorem","ipsum","dolor"]
words :: String -> [String]
{-# NOINLINE [1] words #-}
words s = case dropWhile {-partain:Char.-}isSpace s of
"" -> []
s' -> w : words s''
where (w, s'') =
break {-partain:Char.-}isSpace s'
{-# RULES
"words" [~1] forall s . words s = build (\c n -> wordsFB c n s)
"wordsList" [1] wordsFB (:) [] = words
#-}
wordsFB :: ([Char] -> b -> b) -> b -> String -> b
{-# INLINE [0] wordsFB #-} -- See Note [Inline FB functions] in GHC.List
wordsFB c n = go
where
go s = case dropWhile isSpace s of
"" -> n
s' -> w `c` go s''
where (w, s'') = break isSpace s'
-- | 'unwords' is an inverse operation to 'words'.
-- It joins words with separating spaces.
--
-- >>> unwords ["Lorem", "ipsum", "dolor"]
-- "Lorem ipsum dolor"
unwords :: [String] -> String
#if defined(USE_REPORT_PRELUDE)
unwords [] = ""
unwords ws = foldr1 (\w s -> w ++ ' ':s) ws
#else
-- Here's a lazier version that can get the last element of a
-- _|_-terminated list.
{-# NOINLINE [1] unwords #-}
unwords [] = ""
unwords (w:ws) = w ++ go ws
where
go [] = ""
go (v:vs) = ' ' : (v ++ go vs)
-- In general, the foldr-based version is probably slightly worse
-- than the HBC version, because it adds an extra space and then takes
-- it back off again. But when it fuses, it reduces allocation. How much
-- depends entirely on the average word length--it's most effective when
-- the words are on the short side.
{-# RULES
"unwords" [~1] forall ws .
unwords ws = tailUnwords (foldr unwordsFB "" ws)
"unwordsList" [1] forall ws .
tailUnwords (foldr unwordsFB "" ws) = unwords ws
#-}
{-# INLINE [0] tailUnwords #-}
tailUnwords :: String -> String
tailUnwords [] = []
tailUnwords (_:xs) = xs
{-# INLINE [0] unwordsFB #-}
unwordsFB :: String -> String -> String
unwordsFB w r = ' ' : w ++ r
#endif
{- A "SnocBuilder" is a version of Chris Okasaki's banker's queue that supports
toListSB instead of uncons. In single-threaded use, its performance
characteristics are similar to John Hughes's functional difference lists, but
likely somewhat worse. In heavily persistent settings, however, it does much
better, because it takes advantage of sharing. The banker's queue guarantees
(amortized) O(1) snoc and O(1) uncons, meaning that we can think of toListSB as
an O(1) conversion to a list-like structure a constant factor slower than
normal lists--we pay the O(n) cost incrementally as we consume the list. Using
functional difference lists, on the other hand, we would have to pay the whole
cost up front for each output list. -}
{- We store a front list, a rear list, and the length of the queue. Because we
only snoc onto the queue and never uncons, we know it's time to rotate when the
length of the queue plus 1 is a power of 2. Note that we rely on the value of
the length field only for performance. In the unlikely event of overflow, the
performance will suffer but the semantics will remain correct. -}
data SnocBuilder a = SnocBuilder {-# UNPACK #-} !Word [a] [a]
{- Smart constructor that rotates the builder when lp is one minus a power of
2. Does not rotate very small builders because doing so is not worth the
trouble. The lp < 255 test goes first because the power-of-2 test gives awful
branch prediction for very small n (there are 5 powers of 2 between 1 and
16). Putting the well-predicted lp < 255 test first avoids branching on the
power-of-2 test until powers of 2 have become sufficiently rare to be predicted
well. -}
{-# INLINE sb #-}
sb :: Word -> [a] -> [a] -> SnocBuilder a
sb lp f r
| lp < 255 || (lp .&. (lp + 1)) /= 0 = SnocBuilder lp f r
| otherwise = SnocBuilder lp (f ++ reverse r) []
-- The empty builder
emptySB :: SnocBuilder a
emptySB = SnocBuilder 0 [] []
-- Add an element to the end of a queue.
snocSB :: SnocBuilder a -> a -> SnocBuilder a
snocSB (SnocBuilder lp f r) x = sb (lp + 1) f (x:r)
-- Convert a builder to a list
toListSB :: SnocBuilder a -> [a]
toListSB (SnocBuilder _ f r) = f ++ reverse r
|
ezyang/ghc
|
libraries/base/Data/OldList.hs
|
bsd-3-clause
| 45,592 | 0 | 16 | 12,849 | 7,594 | 4,435 | 3,159 | 456 | 8 |
{-# OPTIONS -dcore-lint -fglasgow-exts #-}
-- Fails GHC 5.04.2 with -dcore-lint
-- The issue ariseswhen you have a method that
-- constrains a class variable
module Test where
class C a where
f :: (Eq a) => a
instance C () where
f = f
|
hvr/jhc
|
regress/tests/1_typecheck/2_pass/ghc/tc165.hs
|
mit
| 248 | 0 | 8 | 58 | 46 | 27 | 19 | 6 | 0 |
{-| Module abstracting the node and instance container implementation.
This is currently implemented on top of an 'IntMap', which seems to
give the best performance for our workload.
-}
{-
Copyright (C) 2009, 2010, 2011 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.Container
( -- * Types
Container
, Key
-- * Creation
, IntMap.empty
, IntMap.singleton
, IntMap.fromList
-- * Query
, IntMap.size
, IntMap.null
, find
, IntMap.findMax
, IntMap.member
-- * Update
, add
, addTwo
, IntMap.map
, IntMap.mapAccum
, IntMap.filter
-- * Conversion
, IntMap.elems
, IntMap.keys
-- * Element functions
, nameOf
, findByName
) where
import qualified Data.IntMap as IntMap
import qualified Ganeti.HTools.Types as T
-- | Our key type.
type Key = IntMap.Key
-- | Our container type.
type Container = IntMap.IntMap
-- | Locate a key in the map (must exist).
find :: Key -> Container a -> a
find k = (IntMap.! k)
-- | Add or update one element to the map.
add :: Key -> a -> Container a -> Container a
add = IntMap.insert
-- | Add or update two elements of the map.
addTwo :: Key -> a -> Key -> a -> Container a -> Container a
addTwo k1 v1 k2 v2 = add k1 v1 . add k2 v2
-- | Compute the name of an element in a container.
nameOf :: (T.Element a) => Container a -> Key -> String
nameOf c k = T.nameOf $ find k c
-- | Find an element by name in a Container; this is a very slow function.
findByName :: (T.Element a, Monad m) =>
Container a -> String -> m a
findByName c n =
let all_elems = IntMap.elems c
result = filter ((n `elem`) . T.allNames) all_elems
in case result of
[item] -> return item
_ -> fail $ "Wrong number of elems found with name " ++ n
|
vladimir-ipatov/ganeti
|
src/Ganeti/HTools/Container.hs
|
gpl-2.0
| 2,417 | 0 | 12 | 537 | 422 | 237 | 185 | 41 | 2 |
-- Used to present a consistent shell view for :! commands in GHCi
-- scripts. We're assuming that sh is in the path and that it
-- is a Bourne-compatible shell.
import System.Exit
import System.Process (rawSystem)
shell :: String -> IO ExitCode
shell s = rawSystem "sh" ["-c", s]
|
sdiehl/ghc
|
testsuite/tests/ghci/shell.hs
|
bsd-3-clause
| 284 | 0 | 6 | 52 | 50 | 28 | 22 | 4 | 1 |
module Main (main) where
import Control.Monad (forever)
import System.Environment (getArgs)
import qualified Data.Text as T
import Application
import Types
main :: IO ()
main = do
[jid, pass] <- getArgs
handler <- app
handler Ready
handler (UpdateAccount (T.pack jid) (T.pack pass))
forever (readLn >>= handler)
|
singpolyma/txtmpp
|
Main.hs
|
isc
| 320 | 0 | 12 | 53 | 127 | 68 | 59 | 13 | 1 |
module Named.Parser where
import Named.Data
import Text.ParserCombinators.Parsec
import Control.Applicative ((<*),(*>))
parseNamed = parse parseLambda "Lambda"
lexeme :: Parser a -> Parser a
lexeme a = a <* spaces
variable :: Parser String
variable = do
first <- letter
rest <- many (letter <|> digit)
return (first:rest)
variableExp :: Parser Expression
variableExp = Var <$> variable
call :: Parser Expression
call = chainl1 (lexeme parseExprExceptCall) (return Call)
func :: Parser Expression
func = do
char '\\' <|> char 'λ'
v <- lexeme variable
char '.'
spaces
e <- parseExprTotal
return $ Func v e
parseExprExceptCall :: Parser Expression
parseExprExceptCall =
func <|>
variableExp <|>
(between (char '(') (char ')') parseExprTotal)
parseExprTotal :: Parser Expression
parseExprTotal =
lexeme call <|> lexeme parseExprExceptCall
parseLambda :: Parser Expression
parseLambda = do
spaces
result <- parseExprTotal
return result
|
mcapodici/haskelllearn
|
firstlang/simple/src/Named/Parser.hs
|
mit
| 981 | 0 | 10 | 182 | 327 | 162 | 165 | 37 | 1 |
-- STM 1
module Account1 where
import System.IO
import Control.Concurrent.STM
type Account = TVar Int
withdraw :: Account -> Int -> STM ()
withdraw acc amount = do
bal <- readTVar acc
writeTVar acc (bal - amount)
deposit :: Account -> Int -> STM ()
deposit acc amount = withdraw acc (- amount)
transfer :: Account -> Account -> Int -> IO ()
transfer from to amount
= atomically (do deposit to amount
withdraw from amount)
showAccount :: Account -> IO Int
showAccount acc = atomically (readTVar acc)
main = do
from <- atomically (newTVar 200)
to <- atomically (newTVar 100)
transfer from to 50
v1 <- showAccount from
v2 <- showAccount to
putStrLn $ (show v1) ++ ", " ++ (show v2)
|
NickAger/LearningHaskell
|
ParallelConcurrent/2-BankAccount.hsproj/Account3.hs
|
mit
| 746 | 0 | 11 | 190 | 295 | 142 | 153 | 23 | 1 |
module Main where
import Data.List (foldl')
import Data.List.Split (splitOn)
import System.IO
import SimpleStatistics
getData :: IO [[Double]]
getData = do
input <- readFile "../data-raw.txt"
let input' = foldl' (\xs x -> if null x then xs else x:xs) [] (lines input)
parsed = map (map (read . drop 1 . dropWhile (/= ':')) . splitOn ",") input'
return parsed
getCrossedData :: IO [[Double]]
getCrossedData = do
input <- getData
let zipd = zip [1..] input
return [str | (x, str) <- zipd, x `mod` 2 == 0]
getNonData :: IO [[Double]]
getNonData = do
input <- getData
let zipd = zip [1..] input
return [str | (x, str) <- zipd, x `mod` 2 == 1]
getInfoAt i d = map (!! i) d
main :: IO ()
main = return ()
-- public static Tuple<int,int> collisionBounds = new Tuple<int, int>(-100, 100);
-- public static Tuple<float,float> greekBounds = new Tuple<float, float>(0.0f, 1.0f);
-- public static Tuple<int,int> popSizeBounds = new Tuple<int, int>(2, 10);
-- public static Tuple<int,int> subPathCountBounds = new Tuple<int, int>(2, 5);
-- public static Tuple<float,float> subPathLengthBounds = new Tuple<float, float>(0.5f, 4.0f);
-- public static Tuple<int,int> generationCountBounds = new Tuple<int, int>(1, 10);
-- public static Tuple<int,int> modeBounds = new Tuple<int, int>(1, 2);
-- public static Tuple<float,float> maxDeviationBounds = new Tuple<float, float>(0.5f, 5.0f);
-- public static Tuple<float,float> speedBounds = new Tuple<float, float>(0.5f, 2.75f);
|
cirquit/hycogen
|
plots/hycogen-plots/app/Main.hs
|
mit
| 1,619 | 0 | 17 | 383 | 369 | 198 | 171 | 24 | 2 |
triples = [(a, b, c) | c <- [1..10], a <- [1..c], b <- [1..a], a^2 + b^2 == c^2, a+b+c==24]
|
v0lkan/learning-haskell
|
session-002/triangle.hs
|
mit
| 93 | 0 | 11 | 22 | 97 | 52 | 45 | 1 | 1 |
longueur' xs = case xs of
[] -> 0
_:xs' -> 1 + longueur' xs'
|
Debaerdm/L3-MIAGE
|
Programmation Fonctionnel/TP/TP1/case.hs
|
mit
| 64 | 0 | 9 | 18 | 38 | 18 | 20 | 3 | 2 |
module Problem2 where
fibs = 1 : 2 : zipWith (+) fibs (tail fibs)
sumWhile :: (Int -> Bool) -> [Int] -> Int
sumWhile p xs = sum (takeWhile p xs)
problem2 :: IO ()
problem2 = print $ sumWhile (< 4000000) (filter even fibs)
|
Strikingwolf/project-euler
|
src/Problem2.hs
|
mit
| 225 | 0 | 8 | 48 | 112 | 60 | 52 | 6 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
import Prelude(Show, (.), Int, map)
class Functor f where
fmap :: (a -> b) -> f a -> f b
instance Functor [] where
fmap _ [] = []
fmap g (x:xs) = g x : fmap g xs
data Maybe a = Just a | Nothing
deriving (Show)
instance Functor Maybe where
fmap _ Nothing = Nothing
fmap g (Just x) = Just (g x)
data Either e a = Left e | Right a
deriving (Show)
instance Functor (Either e) where
fmap _ (Left x) = Left x
fmap g (Right x) = Right (g x)
instance Functor ((->) e) where
fmap = (.)
instance Functor ((,) e) where
fmap g (a, b) = (a, g b)
data Pair a = Pair a a
instance Functor Pair where
fmap g (Pair x y) = Pair (g x) (g y)
data ITree a = Leaf (Int -> a) | Node [ITree a]
instance Functor ITree where
fmap g (Node trees) = Node (map (fmap g) trees)
fmap g (Leaf h) = Leaf (g . h)
composeFunctors :: (Functor f0, Functor f1) => (a -> b) -> f0 (f1 a) -> f0 (f1 b)
composeFunctors = fmap . fmap
-- An examlpe of a non functor of type * -> * is ...
data K a = K (a -> Int)
-- Example of a functor that satisfies the 'function composition equivalence' functor law, but not the 'id' functor law.
data BogusFunctor a = Bog a | Us a deriving Show
instance Functor BogusFunctor where
-- This changes the context and therefore breaks the identity law, even though it still satisfies the composition law.
fmap g (Bog x) = Us (g x)
fmap g (Us x) = Us (g x)
-- Evil functor instance
instance Functor [] where
fmap _ [] = []
fmap g (x:xs) = g x : g x : fmap g xs
-- The above functor instance violates both functor laws.
-- It violates the identity law because:
-- fmap id [1,2,3] => [1,1,2,2,3,3]
-- It violates the composition identity because:
-- fmap ((/ 2) . read) ["1", "2"] => [0.5, 0.5, 1.0, 1.0], but:
-- ((fmap (/2)) . (fmap read)) ["1", "2"] => [0.5, 0.5, 0.5, 0.5, 1.0, 1.0, 1.0, 1.0]
|
josiah14/typclassopedia-learning
|
functor.hs
|
mit
| 1,892 | 0 | 10 | 457 | 694 | 366 | 328 | 38 | 1 |
{-# htermination plusFM_C :: Ord a => (b -> b -> b) -> FiniteMap (Maybe a) b -> FiniteMap (Maybe a) b -> FiniteMap (Maybe a) b #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_plusFM_C_9.hs
|
mit
| 148 | 0 | 3 | 30 | 5 | 3 | 2 | 1 | 0 |
module FromLambdaProlog (parseLP) where
import Prelude
import Data.String.Utils
import Data.Maybe
import TypeSystem
import Text.Parsec
import Text.Parsec.Expr
import Text.Parsec.String (Parser)
import Text.Parsec.Language (emptyDef)
import qualified Text.Parsec.Token as Token
type LogicalLine = (String, (Int, Int))
type ContextInfo = Maybe [(Int,[Int])]
contextTag = "% context"
errorHandlerTag = "% errorHandler"
contraTag = "% variance"
elimTag = "% eliminator"
invLabel = "INV"
covLabel = "COV"
contraLabel = "CONTRA"
myTags = [contextTag, errorHandlerTag, contraTag, elimTag, invLabel, covLabel, contraLabel]
startsWithMyTags line = any (\tag -> startswith tag line) myTags
defaultInfo :: [TypeEntry] -> Either VarianceInfo Int
defaultInfo entries = case (toStringTE (last entries)) of
"typ" -> (Left (replicate ((length entries)-1) Cov))
_ -> (Right 0)
-- It stores number of inputs, at the beginning, per predicate. This below means typeOf is inputs/output while subtype is input/input
modeTable :: [(String, Int)]
modeTable = [ ("typeOf", 1),
("subtype", 2)
]
lexer :: Token.TokenParser ()
lexer = Token.makeTokenParser style
where style = emptyDef {
Token.reservedNames = ["(pi x\\", "=> typeOf",":-",".","(",")","x","X","type","->", "[", "]"] ++ myTags}
parens :: Parser a -> Parser a
parens = Token.parens lexer
reserved :: String -> Parser ()
reserved = Token.reserved lexer
identifier :: Parser String
identifier = Token.identifier lexer
conId :: Parser String
conId = do { c <- lower
; cs <- many alphaNum
; return $ (c:cs) }
number :: Parser Int
number = do { cs <- many1 digit
; return $ read cs }
numberBracket :: Parser Int
numberBracket = do { cs <- many1 digit
; _ <- char '['
; return $ read cs }
-- Parser
{-
variable :: Parser Term
variable = Var `fmap` identifier
-}
-- (map (adjustByContra contraEntries) sig) is the adjusted sig.
-- (returns also a pair (contexts, errorHandler))
tsParse :: Parser (TypeSystem, ([(String, [(Int, [Int])])] , [String]))
tsParse = do { sig <- many signature
; rules <- many ruleGram
; contexts <- many contextTags
; elimEntries <- many elimTags
; errorHandler <- many errorHandlerTags
; contraEntries <- many contravarTags
; let newsig = sig_insertElimininators elimEntries (sig_insertVariance contraEntries sig)
-- ; return $ (Ts newsig (rules ++ (completeRules (Ts newsig rules) contexts))) }
; return $ (Ts newsig rules, (contexts, errorHandler)) }
contextTags :: Parser (String, [(Int, [Int])])
contextTags = do {
; reserved contextTag
; c <- Token.lexeme lexer conId
; positions <- Token.commaSep1 lexer ctx_position
; reserved "."
; return (c, positions) }
errorHandlerTags :: Parser (String)
errorHandlerTags = do {
; reserved errorHandlerTag
; c <- Token.lexeme lexer conId
; reserved "."
; return c }
ctx_position :: Parser (Int, [Int])
ctx_position = do {
; pos <- numberBracket -- Token.lexeme lexer number
-- ; reserved "["
; values <- Token.commaSep lexer number
; reserved "]"
; return (pos, values) }
contravarTags :: Parser (String, VarianceInfo)
contravarTags = do {
; reserved contraTag
; c <- Token.lexeme lexer conId
; labels <- many contravarLabel
; reserved "."
; return (c, labels) }
contravarLabel :: Parser VarianceLabel
contravarLabel =
(do { reserved invLabel ; return Inv }) <|>
(do { reserved covLabel ; return Cov }) <|>
(do { reserved contraLabel ; return Contra })
elimTags :: Parser (String, ElimInfo)
elimTags = do {
; reserved elimTag
; c <- Token.lexeme lexer conId
; n <- Token.lexeme lexer number -- eliminating argument
; reserved "."
; return (c, n) }
ruleP :: Parser Rule
ruleP = do
myformula <- formula
reserved ":-"
prems <- Token.commaSep1 lexer premise
reserved "."
return (Rule prems myformula)
fact :: Parser Rule
fact = do
frm <- formula
reserved "."
return (Rule [] frm)
ruleGram :: Parser Rule
ruleGram = try fact <|> ruleP
formula :: Parser Premise
formula = do
pred <- Token.lexeme lexer conId
tterms <- many term
case lookup pred modeTable of { Nothing -> return (Formula pred [] tterms []) ; Just numero -> return (Formula pred [] (take numero tterms) (drop numero tterms))}
hypothetical :: Parser Premise
hypothetical = do
reserved "(pi x\\"
-- reserved "("
formula1 <- (formula)
reserved "=>"
formula2 <- (formula)
-- reserved ")"
reserved ")"
return (Hypothetical formula1 formula2)
generic :: Parser Premise
generic = do
reserved "(pi x\\"
mypremise <- (premise) -- notice: premise, not formula
reserved ")"
return (Generic mypremise)
premise :: Parser Premise
premise = try (generic) <|> (formula) <|> (hypothetical) -- <|> parens (formula pkg)
term :: Parser Term
--term pkg = try variable <|> boundVar <|> (constructor pkg) <|> application -- <|> (parens (term pkg))
term = try (parens (applicationBin)) <|> try (parens (applicationTri)) <|> boundVar <|> variable <|> (parens (constructor))
constructor :: Parser Term
constructor = do {
-- reserved "("
-- ;
c <- Token.lexeme lexer conId
; terms <- many (term)
-- ; reserved ")"
; return (Constructor c terms) }
applicationBin :: Parser Term
applicationBin = do {
-- reserved "("
-- ;
var <- variable
; myterm <- (term)--(boundVar <|> variable)
-- ; reserved ")"
; return (Application var myterm)}
applicationTri :: Parser Term
applicationTri = do {
-- reserved "("
-- ;
var <- variable
; term1 <- (term)--(boundVar <|> variable)
; term2 <- (term)--(boundVar <|> variable)
-- ; reserved ")"
; return (Application (Application var term1) term2)}
boundVar :: Parser Term
boundVar = do { reserved "x" ; return (Bound "x")} <|> do { reserved "X" ; return (Bound "X")}
variable :: Parser Term
variable = do
name <- Token.lexeme lexer varId
return (Var name)
varId :: Parser String
varId = do { c <- upper
; cs <- many (alphaNum <|> char '\'')
; return (c:cs) }
{-
withParens :: Parser Term
withParens = do
reserved "("
tterm <- term
reserved ")"
return tterm
-}
allOf :: Parser a -> Parser a
allOf p = do
(Token.whiteSpace lexer)
r <- p
eof
return r
parseLP :: [String] -> (TypeSystem, ([(String, [(Int, [Int])])] , [String]))
parseLP t = let stuffToParse = unlines (filter (not . parseGarbage) t) in
case parse (allOf tsParse) "stdin" stuffToParse of
Left err -> error (show err)
Right ast -> ast
parseGarbage :: String -> Bool
parseGarbage line =
startswith "sig" line
|| startswith "kind" line
|| startswith "module" line
|| startswith "\n" line
|| (startswith "%" line && (not (startsWithMyTags line)))
signature :: Parser SignatureEntry
signature = do
reserved "type"
c <- Token.lexeme lexer conId
entries <- sepBy typeEntry (do {Token.whiteSpace lexer; string "->"; Token.whiteSpace lexer})
reserved "."
return (Decl c (toStringTE (last entries)) (defaultInfo entries) (init entries))
typeEntry :: Parser TypeEntry
typeEntry = try simple <|> parens hoas
simple :: Parser TypeEntry
simple = do
item <- Token.lexeme lexer conId
return (Simple item)
hoas :: Parser TypeEntry
hoas = do
items <- sepBy (Token.lexeme lexer conId) (do {Token.whiteSpace lexer; string "->"; Token.whiteSpace lexer})
if (length items) == 2 then return (Abs (items !! 0) (items !! 1)) else error "Parsing Error: At the moment, The use of HOAS is restriced to only abstractions of the form (type1 -> type2) (i.e. only one abstracted argument) "
sig_insertVariance :: [(String, VarianceInfo)] -> Signature -> Signature
sig_insertVariance [] sig = sig
sig_insertVariance (entry:rest) sig =
case entry of (c, varianceLabels) -> case (searchDeclByName sig c) of { Nothing -> error ("ERROR: sig_insertVariance. not found: " ++ (show sig) ++ c) ; Just (Decl c1 typ info entries) -> (Decl c1 typ (Left varianceLabels) entries):(sig_insertVariance rest (deleteDeclByName sig c)) }
sig_insertElimininators :: [(String, ElimInfo)] -> Signature -> Signature
sig_insertElimininators [] sig = sig
sig_insertElimininators (entry:rest) sig =
case entry of (c, n) -> case (searchDeclByName sig c) of { Nothing -> error ("ERROR: sig_insertElimininators. not found: " ++ (show sig) ++ c) ; Just (Decl c1 typ info entries) -> (Decl c1 typ (Right n) entries):(sig_insertElimininators rest (deleteDeclByName sig c)) }
completeRules :: TypeSystem -> ([(String, [(Int, [Int])])] , [String]) -> [Rule]
completeRules (Ts sig rules) (contexts, errorHandlers) = (map fstOf3 tripleOfRulesContextAndContainsAndError)
++ [Rule [] (Formula "contains" [] [Var "E", Var "E"] [])] ++ (map sndOf3 tripleOfRulesContextAndContainsAndError)
++ if errorHandlers == [] then [] else [Rule [] (Formula "containsError" [] [Var "E", Var "E"] [])] ++ getRidOfNone (map thirdOf3 tripleOfRulesContextAndContainsAndError)
where
third triplette = snd (snd triplette)
tripleOfRulesContextAndContainsAndError = concat $ map contextForEach contexts
getValue n = Formula "value" [] [Var ("E" ++ show n)] []
toProgressStep n = Formula "step" [] [Var ("E" ++ show n)] [Var ("E" ++ show n ++ "'")]
toContains containPred n = Formula containPred [] [Var ("E" ++ show n)] [Var "E"]
contextForEach cAndLines = map (contextForEachLine (fst cAndLines)) (snd cAndLines)
contextForEachLine c ctxAndValues = case (searchDeclByName sig c) of
Nothing -> error ("ERROR: adjustByContext" ++ (show sig) ++ c)
Just (Decl c1 typ info entries) ->
let newvars = map (\n -> Var ("E" ++ (show n))) [1 .. (length entries)] in
let n = (fst ctxAndValues) in
(
Rule (toProgressStep n : (map getValue (snd ctxAndValues))) (Formula "step" [] [Constructor c newvars] [Constructor c (replaceAtIndex (n-1) (Var ("E" ++ show n ++ "'")) newvars)]),
Rule (toContains "contains" n : (map getValue (snd ctxAndValues))) (Formula "contains" [] [Constructor c newvars] [Var "E"]),
if (elem c1 errorHandlers) || errorHandlers == [] then Nothing else Just (Rule (toContains "containsError" n : (map getValue (snd ctxAndValues))) (Formula "containsError" [] [Constructor c newvars] [Var "E"]))
)
getRidOfNone :: [Maybe Rule] -> [Rule]
getRidOfNone maybelist = map fromJust $ filter isJust maybelist
|
mcimini/GradualizerDynamicSemantics
|
FromLambdaProlog.hs
|
mit
| 10,513 | 78 | 27 | 2,197 | 3,793 | 1,959 | 1,834 | 223 | 4 |
{-# LANGUAGE RecordWildCards #-}
module Game.GameWorld where
import Game.Position
import Game.Tile (Tile(..))
import Game.Unit
import qualified Data.Array as A
import Data.Maybe (catMaybes, fromMaybe)
import qualified Data.List as L
import Control.Applicative
import Data.Binary
type Map = A.Array (Int, Int) Tile
-- laitoin map :: Map nimeksi gamemap
-- jotta ei tule konfliktia Prelude.mapin kanssa
data GameWorld = GameWorld { gamemap :: Map
, units :: [[Unit]]
, turn :: Int
} deriving (Show)
instance Binary GameWorld where
put GameWorld{..} = do
put gamemap
put units
put turn
get = do
gamemap <- get
units <- get
turn <- get
return GameWorld{..}
initialGameWorld :: IO GameWorld
initialGameWorld = GameWorld
<$> return mappi
<*> initialUnits
<*> return 0
where
mappi = convertMap 12 12 testmap
testmap = concat [ "...o........"
, ".#####.Y...."
, ".#...#...o.."
, ".##.##.O..Y."
, ".,,.T......."
, ".,,,..Y....."
, "===,.....o.."
, ",,==........"
, ",,O==,..O..."
, ".,,======,,,"
, "....,,..===="
, ".Y....o....o" ]
-- | Luo tyhjän pelikentän
blankMap :: Int -> Int -> Map
blankMap w h = A.listArray ((0,0), (h,w)) (repeat GrassTile)
-- | Muuntaa merkkijonon pelikentäksi
convertMap :: Int -- ^ Kentän leveys
-> Int -- ^ Kentän korkeus
-> String -- ^ Kenttä merkkijonona
-> Map
convertMap width height = A.listArray ((0,0), (width-1, height-1)) . map charToTile
where
charToTile '.' = GrassTile
charToTile '#' = BlockTile
charToTile 'T' = TreeTile
charToTile 'Y' = Tree2Tile
charToTile ',' = MudTile
charToTile 'o' = RockTile
charToTile 'O' = Rock2Tile
charToTile '=' = WaterTile
charToTile _ = GrassTile
initialUnits :: IO [[Unit]]
initialUnits = mapM sequence [[basicUnit "Matti" (0, 0) 0, basicUnit "Esko" (1, 0) 0, basicUnit "Jussi" (0, 1) 0],
[basicUnit "Yarr" (11, 11) 1, basicUnit "Yarhar" (10, 11) 1, basicUnit "Argh" (11, 10) 1]]
-- | Päivittää pelimaailman yksikköä
updateUnit :: GameWorld -> Unit -> GameWorld
updateUnit gw nu = setUnits gw [replaceUnit r | r <- units gw]
where replaceUnit row = [if u == nu then nu else u | u <- row]
-- | Poistaa yksikön pelimaailmasta
removeUnit :: GameWorld -> Unit -> GameWorld
removeUnit gw ru = setUnits gw [filter (/= ru) r | r <- units gw]
setUnits :: GameWorld -> [[Unit]] -> GameWorld
setUnits (GameWorld m _ t) us = GameWorld m us t
animateUnits :: GameWorld -> GameWorld
animateUnits world = world { units = map (map animateUnit) (units world) }
where
animateUnit :: Unit -> Unit
animateUnit u = u { animFrame = (animFrame u + 1) `mod` maxFrames u }
-- | Hakee yksikön tietyn ehdon perusteella
getUnit :: GameWorld -> (Unit -> Bool) -> Maybe Unit
getUnit world p = toMaybe $ filter p . concat $ units world
where
toMaybe :: [a] -> Maybe a
toMaybe (x:_) = Just x
toMaybe _ = Nothing
-- | Hakee yksikön tietystä kohdasta kentältä
getUnitAt :: GameWorld -> Position -> Maybe Unit
getUnitAt world pos = getUnit world (\u -> position u == pos)
-- | Tarkistaa onko annettu koordinaatti pelikentällä
insideMap :: Map -> Position -> Bool
insideMap gmap = A.inRange (A.bounds gmap)
-- | Palauttaa annetun ukkelin tiilen
getUnitTile :: GameWorld -> Unit -> Tile
getUnitTile gw u = gamemap gw A.! position u
-- | Palauttaa yksiköt, jotka ovat yhden siiron päästä annetusta yksiköstä
getAdjUnits :: GameWorld -> Unit -> [Unit]
getAdjUnits gw u = catMaybes [getUnitAt gw p | p <- getAdjPositions gw (position u)]
getAdjPositions :: GameWorld -> Position -> [Position]
getAdjPositions gw pos = [(x, y) | (y,x) <- adjs pos, isBetween minX maxX x && isBetween minY maxY y]
where
((minX, minY), (maxX, maxY)) = A.bounds $ gamemap gw
adjs (y,x) = [(y+y',x+x') | y' <- [-1..1], x' <- [-1..1], (x',y') /= (0,0)]
isBetween :: Ord a => a -> a -> a -> Bool
isBetween lower upper x = x >= lower && x <= upper
-- | Palauttaa kullekin yksikölle oletus-AP:t
resetAps :: GameWorld -> GameWorld
resetAps gw = gw { units = map (\team -> [getAp u | u <- team]) (units gw) }
where
getAp u = u { ap = 10 + traitApBonus (traits u) }
traitApBonus ts = sum [fromMaybe 0 (traitAp t) | t <- ts]
|
maqqr/psycho-bongo-fight
|
Game/GameWorld.hs
|
mit
| 4,664 | 0 | 12 | 1,302 | 1,517 | 815 | 702 | 96 | 9 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
------------------------------------------------------------------------------
import Control.Exception (SomeException, try)
import qualified Data.Text as T
import Snap.Http.Server
import Snap.Snaplet
import Snap.Snaplet.Config
import Snap.Core
import System.IO
import Control.Applicative
import Control.Lens
import Data.ByteString (ByteString)
import qualified Data.Text as T
import Snap.Snaplet.Heist
import Snap.Snaplet.Jamelgo
import Snap.Util.FileServe
import Heist
import qualified Heist.Interpreted as I
data App = App
{ _heist :: Snaplet (Heist App)
, _jamelgo :: Snaplet Jamelgo
}
$(makeLenses ''App)
instance HasHeist App where
heistLens = subSnaplet heist
main :: IO ()
main = serveSnaplet defaultConfig app
where
routes :: [(ByteString, Handler App App ())]
routes = [ ("", serveDirectory "static")
]
app :: SnapletInit App App
app = makeSnaplet "app" "An snaplet example application." Nothing $ do
h <- nestSnaplet "" heist $ heistInit "templates"
j <- nestSnaplet "" jamelgo $ jamelgoInit
addRoutes routes
return $ App h j
|
danidiaz/jamelgo
|
src/Main.hs
|
mit
| 1,350 | 0 | 12 | 371 | 314 | 175 | 139 | 35 | 1 |
{- |
Module : Language.Egison.Data.Utils
Licence : MIT
This module provides some helper functions for handling Egison data.
-}
module Language.Egison.Data.Utils
( evalRef
, evalObj
, writeObjectRef
, newEvaluatedObjectRef
, tupleToRefs
, tupleToListWHNF
, tupleToList
, makeTuple
, makeITuple
, pmIndices
, updateHash
) where
import Control.Monad
import Control.Monad.State (liftIO)
import Control.Monad.Except (throwError)
import Data.IORef
import qualified Data.HashMap.Lazy as HL
import Language.Egison.Data
import Language.Egison.IExpr
evalRef :: ObjectRef -> EvalM WHNFData
evalRef ref = do
obj <- liftIO $ readIORef ref
case obj of
WHNF val -> return val
Thunk thunk -> do
val <- thunk
writeObjectRef ref val
return val
evalObj :: Object -> EvalM WHNFData
evalObj (WHNF val) = return val
evalObj (Thunk thunk) = thunk
writeObjectRef :: ObjectRef -> WHNFData -> EvalM ()
writeObjectRef ref val = liftIO . writeIORef ref $ WHNF val
newEvaluatedObjectRef :: WHNFData -> EvalM ObjectRef
newEvaluatedObjectRef = liftIO . newIORef . WHNF
tupleToRefs :: WHNFData -> EvalM [ObjectRef]
tupleToRefs (ITuple refs) = return refs
tupleToRefs (Value (Tuple vals)) = mapM (newEvaluatedObjectRef . Value) vals
tupleToRefs whnf = return <$> newEvaluatedObjectRef whnf
tupleToListWHNF :: WHNFData -> EvalM [WHNFData]
tupleToListWHNF (ITuple refs) = mapM evalRef refs
tupleToListWHNF (Value (Tuple vals)) = return $ map Value vals
tupleToListWHNF whnf = return [whnf]
tupleToList :: EgisonValue -> [EgisonValue]
tupleToList (Tuple vals) = vals
tupleToList val = [val]
makeTuple :: [EgisonValue] -> EgisonValue
makeTuple [] = Tuple []
makeTuple [x] = x
makeTuple xs = Tuple xs
makeITuple :: [WHNFData] -> EvalM WHNFData
makeITuple [] = return (ITuple [])
makeITuple [x] = return x
makeITuple xs = ITuple <$> mapM newEvaluatedObjectRef xs
pmIndices :: [Index (Maybe Var)] -> [Index EgisonValue] -> EvalM [Binding]
pmIndices [] [] = return []
pmIndices (MultiSub (Just a) s (Just e):xs) vs = do
let (vs1, vs2) = span isSub vs
let l = fromIntegral (length vs1)
eRef <- newEvaluatedObjectRef (Value (toEgison l))
let hash = (IIntHash HL.empty)
hash <- foldM (\hash (i, v) -> updateHash [i] v hash) hash (zip [s..(s + l - 1)] (map (\(Sub v) -> Value v) vs1))
aRef <- newEvaluatedObjectRef hash
bs <- pmIndices xs vs2
return ((a, aRef) : (e, eRef) : bs)
where
isSub (Sub _) = True
isSub _ = False
pmIndices (MultiSup (Just a) s (Just e):xs) vs = do
let (vs1, vs2) = span isSup vs
let l = fromIntegral (length vs1)
eRef <- newEvaluatedObjectRef (Value (toEgison l))
let hash = (IIntHash HL.empty)
hash <- foldM (\hash (i, v) -> updateHash [i] v hash) hash (zip [s..(s + l - 1)] (map (\(Sup v) -> Value v) vs1))
aRef <- newEvaluatedObjectRef hash
bs <- pmIndices xs vs2
return ((a, aRef) : (e, eRef) : bs)
where
isSup (Sup _) = True
isSup _ = False
pmIndices (x:xs) (v:vs) = do
bs <- pmIndex x v
bs' <- pmIndices xs vs
return (bs ++ bs')
pmIndices _ _ = throwErrorWithTrace InconsistentTensorIndex
pmIndex :: Index (Maybe Var) -> Index EgisonValue -> EvalM [Binding]
pmIndex (Sub (Just var)) (Sub val) = do
ref <- newEvaluatedObjectRef (Value val)
return [(var, ref)]
pmIndex (Sup (Just var)) (Sup val) = do
ref <- newEvaluatedObjectRef (Value val)
return [(var, ref)]
pmIndex _ _ = throwErrorWithTrace InconsistentTensorIndex
updateHash :: [Integer] -> WHNFData -> WHNFData -> EvalM WHNFData
updateHash [index] tgt (IIntHash hash) = do
targetRef <- newEvaluatedObjectRef tgt
return . IIntHash $ HL.insert index targetRef hash
updateHash (index:indices) tgt (IIntHash hash) = do
val <- maybe (return $ IIntHash HL.empty) evalRef $ HL.lookup index hash
ref <- updateHash indices tgt val >>= newEvaluatedObjectRef
return . IIntHash $ HL.insert index ref hash
updateHash indices tgt (Value (IntHash hash)) = do
let keys = HL.keys hash
vals <- mapM (newEvaluatedObjectRef . Value) $ HL.elems hash
updateHash indices tgt (IIntHash $ HL.fromList $ zip keys vals)
updateHash _ _ v = throwError $ Default $ "expected hash value: " ++ show v
|
egison/egison
|
hs-src/Language/Egison/Data/Utils.hs
|
mit
| 4,328 | 0 | 16 | 956 | 1,738 | 866 | 872 | 104 | 3 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module IHaskell.Display.Blaze () where
import IHaskell.Display
import Text.Printf
import Text.Blaze.Html
import Text.Blaze.Renderer.Pretty
import Text.Blaze.Internal
import Control.Monad
instance IHaskellDisplay (MarkupM a) where
display val = return $ Display [stringDisplay, htmlDisplay]
where
str = renderMarkup (void val)
stringDisplay = plain str
htmlDisplay = html str
|
aostiles/LiveHaskell
|
ihaskell-display/ihaskell-blaze/IHaskell/Display/Blaze.hs
|
mit
| 460 | 0 | 10 | 76 | 112 | 64 | 48 | 13 | 0 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Graphics.Urho3D.Graphics.Skeleton(
Skeleton
, Bone(..)
, BoneCollisionShape(..)
, BoneCollisionShapeFlags
, HasName(..)
, HasParentIndex(..)
, HasInitialPosition(..)
, HasInitialRotation(..)
, HasInitialScale(..)
, HasOffsetMatrix(..)
, HasAnimated(..)
, HasCollisionMask(..)
, HasRadius(..)
, HasBoundingBox(..)
, HasNode(..)
, skeletonContext
) where
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Cpp as C
import Data.Monoid
import Foreign
import Foreign.C.String
import Graphics.Urho3D.Core.Context
import Graphics.Urho3D.Creatable
import Graphics.Urho3D.Monad
import Text.RawString.QQ
import Graphics.Urho3D.Graphics.Internal.Skeleton
import Graphics.Urho3D.Container.FlagSet
import Graphics.Urho3D.Container.Ptr
import Graphics.Urho3D.Math.BoundingBox
import Graphics.Urho3D.Math.Matrix3x4
import Graphics.Urho3D.Math.Quaternion
import Graphics.Urho3D.Math.Vector3
import Graphics.Urho3D.Scene.Node
C.context (C.cppCtx
<> skeletonCntx
<> contextContext
<> vector3Context
<> quaternionContext
<> matrix3x4Context
<> boundingBoxContext
<> nodeContext)
C.include "<Urho3D/Graphics/Skeleton.h>"
C.using "namespace Urho3D"
C.verbatim "typedef WeakPtr<Node> WeakNode;"
skeletonContext :: C.Context
skeletonContext = skeletonCntx
instance Creatable (Ptr Skeleton) where
type CreationOptions (Ptr Skeleton) = ()
newObject _ = liftIO $ [C.exp| Skeleton* { new Skeleton() } |]
deleteObject ptr = liftIO $ [C.exp| void { delete $(Skeleton* ptr) } |]
C.verbatim [r|
template <class T>
class Traits
{
public:
struct AlignmentFinder
{
char a;
T b;
};
enum {AlignmentOf = sizeof(AlignmentFinder) - sizeof(T)};
};
|]
instance Storable Bone where
sizeOf _ = fromIntegral $ [C.pure| int { (int)sizeof(Bone) } |]
alignment _ = fromIntegral $ [C.pure| int { (int)Traits<Bone>::AlignmentOf } |]
peek ptr = do
_boneName <- peekCString =<< [C.exp| const char* { $(Bone* ptr)->name_.CString() } |]
_boneParentIndex <- fromIntegral <$> [C.exp| unsigned int { $(Bone* ptr)->parentIndex_} |]
_boneInitialPosition <- peek =<< [C.exp| Vector3* { &$(Bone* ptr)->initialPosition_} |]
_boneInitialRotation <- peek =<< [C.exp| Quaternion* { &$(Bone* ptr)->initialRotation_} |]
_boneInitialScale <- peek =<< [C.exp| Vector3* { &$(Bone* ptr)->initialScale_} |]
_boneOffsetMatrix <- peek =<< [C.exp| Matrix3x4* { &$(Bone* ptr)->offsetMatrix_} |]
_boneAnimated <- toBool <$> [C.exp| int { (int)$(Bone* ptr)->animated_} |]
_boneCollisionMask <- FlagSet . fromIntegral <$> [C.exp| unsigned char { (unsigned char)$(Bone* ptr)->collisionMask_} |]
_boneRadius <- realToFrac <$> [C.exp| float { $(Bone* ptr)->radius_} |]
_boneBoundingBox <- peek =<< [C.exp| BoundingBox* { &$(Bone* ptr)->boundingBox_} |]
_boneNode <- peekWeakPtr =<< [C.exp| WeakNode* { new WeakPtr<Node>($(Bone* ptr)->node_)} |]
return $ Bone {..}
poke ptr Bone{..} =
withCString _boneName $ \_boneName' ->
withObject _boneName $ \_boneNameHash' ->
with _boneInitialPosition $ \_boneInitialPosition' ->
with _boneInitialRotation $ \_boneInitialRotation' ->
with _boneInitialScale $ \_boneInitialScale' ->
with _boneOffsetMatrix $ \_boneOffsetMatrix' ->
with _boneBoundingBox $ \_boneBoundingBox' -> do
let _boneParentIndex' = fromIntegral _boneParentIndex
_boneAnimated' = fromBool _boneAnimated
_boneRadius' = realToFrac _boneRadius
_boneNode' = parentPointer _boneNode
_boneCollisionMask' = fromIntegral . unFlagSet $ _boneCollisionMask
[C.block| void {
$(Bone* ptr)->name_ = String($(const char* _boneName'));
$(Bone* ptr)->nameHash_ = *$(StringHash* _boneNameHash');
$(Bone* ptr)->parentIndex_ = $(unsigned int _boneParentIndex');
$(Bone* ptr)->initialPosition_ = *$(Vector3* _boneInitialPosition');
$(Bone* ptr)->initialRotation_ = *$(Quaternion* _boneInitialRotation');
$(Bone* ptr)->initialScale_ = *$(Vector3* _boneInitialScale');
$(Bone* ptr)->offsetMatrix_ = *$(Matrix3x4* _boneOffsetMatrix');
$(Bone* ptr)->animated_ = $(int _boneAnimated') != 0;
$(Bone* ptr)->collisionMask_ = BoneCollisionShapeFlags($(unsigned char _boneCollisionMask'));
$(Bone* ptr)->radius_ = $(float _boneRadius');
$(Bone* ptr)->boundingBox_ = *$(BoundingBox* _boneBoundingBox');
$(Bone* ptr)->node_ = WeakPtr<Node>($(Node* _boneNode'));
} |]
|
Teaspot-Studio/Urho3D-Haskell
|
src/Graphics/Urho3D/Graphics/Skeleton.hs
|
mit
| 4,567 | 0 | 26 | 793 | 795 | 470 | 325 | -1 | -1 |
module Main where
import qualified Scripting.Fuligite.REPL as REPL
import Scripting.Fuligite.RunFile
import System.Environment (getArgs)
-- | Main function, check if we've been suppled a file
-- if so run that, else drop into the repl
main :: IO ()
main = do
args <- getArgs
if null args
then REPL.main
else runFile $ head args
--runFile :: String -> IO ()
--runFile fileName = do
-- result <- parseFromFile objectfile fileName
-- case result of
-- Right props -> do
-- runPropFile props
-- Left err -> print err
--
--runObjectFile :: Object -> IO ()
--
--runPropFile :: [(ObjKey, Expr)] -> IO ()
--runPropFile props = do
-- -- set up the environment
-- let env = foldr
-- (\(key,expr) acc -> Map.insert key expr acc)
-- defaultEnv props
--
-- let mMain = Map.lookup (StrKey "main") env
-- case mMain of
-- Just expr -> do
-- let eResult = runStateT (eval expr) (makeEvalState env mkObj)
-- case eResult of
-- Left err -> print err
-- Right (expr',st) -> do
-- print $ maybe "" id (getStdOut st)
--
-- Nothing -> print "No main"
--
--getStdOut :: EvalState -> Maybe String
--getStdOut st = do
-- let env = getEnv st
-- expr <- case env of
-- [] -> Nothing
-- top:_ -> Map.lookup (StrKey "stdout") top
-- lit <- case expr of
-- Lit l -> Just l
-- _ -> Nothing
-- return $ toString lit
|
tomjefferys/fuligite
|
src/Main.hs
|
mit
| 1,494 | 0 | 9 | 452 | 114 | 82 | 32 | 10 | 2 |
module Main where
import System.Environment
main :: IO ()
main = do
args <- getArgs
let a = read (args !! 0)
putStrLn (show (a ^ 3))
|
mmwtsn/write-yourself-a-scheme
|
01-first-steps/cube.hs
|
mit
| 146 | 0 | 12 | 40 | 69 | 35 | 34 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.BlockChain.Block.Blocks where
import Control.Arrow ((&&&))
import Data.Aeson
import Data.Maybe (fromJust)
import Network.HTTP.Conduit
-- availabe from the @1HaskellADay git repository
import qualified Data.BlockChain.Block.Summary as Smy
import Data.BlockChain.Block.Transactions (Transaction)
import qualified Data.BlockChain.Block.Transactions as Txn
import Data.BlockChain.Block.Types
import Data.Tree.Merkle
{--
There is a reason the summary is called the summary, for, when we download the
entire block we get:
-rw-r--r-- 1 geophf staff 4020041 Sep 6 00:55 lateblk.json
-rw-r--r-- 1 geophf staff 12763 Sep 6 00:40 latesum.json
or, put another way, the latest block summary is 12k, but the entire block,
with all its transactions is 4M!
Eep!
Let's just deal with blocks over the wire for now, instead of putting 4 meg
into this git repository with one push.
From the above Summary import, we know how to load the latest block summary.
Do that, get the hash for the block, and from the hash, download the entire
block into memory and extract the list of transactions.
--}
data Block = Block { blockhash :: Hash, ver :: Integer, prevBlock :: String,
merkleRoot :: String, time, bits, fee, nonce :: Integer,
nTx, size :: Int, blockIdx :: Integer, mainChain :: Bool,
height, receivedTime :: Integer, relayedBy :: String,
tx :: [Transaction] }
deriving (Eq, Ord, Show)
instance Sheesh Block where
hash = blockhash
instance FromJSON Block where
parseJSON (Object o) = Block <$> o .: "hash" <*> o .: "ver"
<*> o .: "prev_block" <*> o .: "mrkl_root"
<*> o .: "time" <*> o .: "bits"
<*> o .: "fee" <*> o .: "nonce"
<*> o .: "n_tx" <*> o .: "size"
<*> o .: "block_index" <*> o .: "main_chain"
<*> o .: "height" <*> o .: "received_time"
<*> o .: "relayed_by" <*> o .: "tx"
rawBlockURL :: FilePath
rawBlockURL = "https://blockchain.info/rawblock/"
readBlock :: FilePath -> Hash -> IO Block
readBlock url hash = fmap (fromJust . decode) $ simpleHttp (url ++ hash)
-- hint look at above Summary import on how to read in the latest summary, then
-- look at how to extract the block-hash from the summary report.
{--
*Y2016.M09.D06.Solution> Smy.latestSummary ~> smmy
*Y2016.M09.D06.Solution> readBlock rawBlockURL (Smy.blockHash smmy)
How many transactions are there in this block? What is the average size of
the transactions?
For block (hash: "0000000000000000039eb091b53b88042b9dc578285604914f6837c470c075df"):
*Y2016.M09.D06.Solution> length (tx blk) ~> 191
*Y2016.M09.D06.Solution> Txn.transactionsMeanSize (tx blk) ~> 5211
We should be able to link the transcation in this full block with the
summary block transaction index ... we'll get to doing that another day.
--}
-- From a set of hashes we want to fetch the blocks:
fetchBlocks :: [Hash] -> IO [Block]
fetchBlocks = mapM (readBlock rawBlockURL)
-- Now we want a specific kind of Merkle leaf. As we already have the hash,
-- we use the block hash for the leaf hash
mkBlockLeaf :: Block -> Leaf Block
mkBlockLeaf = uncurry Leaf . (blockhash &&& id)
|
geophf/1HaskellADay
|
exercises/HAD/Data/BlockChain/Block/Blocks.hs
|
mit
| 3,434 | 0 | 37 | 864 | 484 | 284 | 200 | 36 | 1 |
module Intro where
-- Exercise 1
toDigits :: Integer -> [Integer]
toDigits n = splitDigits n []
where
splitDigits num acc
| num <= 0 = acc
| otherwise = splitDigits next (digit:acc)
where
digit = num `mod` 10
next = num `div` 10
toDigitsRev :: Integer -> [Integer]
toDigitsRev = reverse . toDigits
-- Exercise 2
doubleEveryOther :: [Integer] -> [Integer]
doubleEveryOther = double [] False
where
double acc _ [] = acc
double acc True (x:xs) = double ((2*x):acc) False xs
double acc False (x:xs) = double (x:acc) True xs
-- Exercise 3
sumDigits :: [Integer] -> Integer
sumDigits = sum . concatMap toDigits
-- Exercise 4
validate :: Integer -> Bool
validate n = sum' `mod` 10 == 0
where
sum' = sumDigits $ doubleEveryOther $ toDigitsRev n
-- Exercise 5
type Peg = String
type Move = (Peg, Peg)
hanoi :: Integer -> Peg -> Peg -> Peg -> [Move]
hanoi 0 _ _ _ = []
hanoi n source dest int =
hanoi (n - 1) source int dest ++ [(source, dest)] ++ hanoi (n - 1) int dest source
|
slogsdon/haskell-exercises
|
cis194/Intro.hs
|
mit
| 1,047 | 0 | 11 | 264 | 430 | 233 | 197 | 26 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import System.ZMQ4.Monadic
import ZHelpers (dumpSock)
main :: IO ()
main =
runZMQ $ do
sink <- socket Router
bind sink "inproc://example"
anonymous <- socket Req
connect anonymous "inproc://example"
send anonymous [] "ROUTER uses a generated 5 byte identity"
dumpSock sink
identified <- socket Req
setIdentity (restrict "PEER2") identified
connect identified "inproc://example"
send identified [] "ROUTER socket uses REQ's socket identity"
dumpSock sink
|
soscpd/bee
|
root/tests/zguide/examples/Haskell/identity.hs
|
mit
| 632 | 0 | 10 | 194 | 142 | 64 | 78 | 18 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ExistentialQuantification #-}
-- |
-- Stability: experimental
module Test.Hspec.Core.Format (
Format
, FormatConfig(..)
, Event(..)
, Progress
, Path
, Location(..)
, Seconds(..)
, Item(..)
, Result(..)
, FailureReason(..)
, monadic
) where
import Prelude ()
import Test.Hspec.Core.Compat
import Control.Exception
import Control.Concurrent
import Control.Concurrent.Async (async)
import qualified Control.Concurrent.Async as Async
import Control.Monad.IO.Class
import Test.Hspec.Core.Example (Progress, Location(..), FailureReason(..))
import Test.Hspec.Core.Util (Path)
import Test.Hspec.Core.Clock (Seconds(..))
type Format = Event -> IO ()
data Item = Item {
itemLocation :: Maybe Location
, itemDuration :: Seconds
, itemInfo :: String
, itemResult :: Result
} deriving Show
data Result =
Success
| Pending (Maybe Location) (Maybe String)
| Failure (Maybe Location) FailureReason
deriving Show
data Event =
Started
| GroupStarted Path
| GroupDone Path
| Progress Path Progress
| ItemStarted Path
| ItemDone Path Item
| Done [(Path, Item)]
deriving Show
data FormatConfig = FormatConfig {
formatConfigUseColor :: Bool
, formatConfigOutputUnicode :: Bool
, formatConfigUseDiff :: Bool
, formatConfigPrettyPrint :: Bool
, formatConfigPrintTimes :: Bool
, formatConfigHtmlOutput :: Bool
, formatConfigPrintCpuTime :: Bool
, formatConfigUsedSeed :: Integer
, formatConfigExpectedTotalCount :: Int
} deriving (Eq, Show)
data Signal = Ok | NotOk SomeException
monadic :: MonadIO m => (m () -> IO ()) -> (Event -> m ()) -> IO Format
monadic run format = do
mvar <- newEmptyMVar
done <- newEmptyMVar
let
putEvent :: Event -> IO ()
putEvent = putMVar mvar
takeEvent :: MonadIO m => m Event
takeEvent = liftIO $ takeMVar mvar
signal :: MonadIO m => Signal -> m ()
signal = liftIO . putMVar done
wait :: IO Signal
wait = takeMVar done
go = do
event <- takeEvent
format event
case event of
Done {} -> return ()
_ -> do
signal Ok
go
t <- async $ do
(run go >> signal Ok) `catch` (signal . NotOk)
return $ \ event -> do
running <- Async.poll t
case running of
Just _ -> return ()
Nothing -> do
putEvent event
r <- wait
case r of
Ok -> return ()
NotOk err -> do
Async.wait t
throwIO err
|
hspec/hspec
|
hspec-core/src/Test/Hspec/Core/Format.hs
|
mit
| 2,549 | 0 | 22 | 681 | 792 | 440 | 352 | 92 | 4 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Lab5 where
import Control.Monad
import Control.Concurrent
import Control.Monad.Cont
import Control.Monad.Trans
data Action
= Atom (IO Action)
| Fork Action Action
| Stop
data Concurrent a = Concurrent ((a -> Action) -> Action)
instance Show Action where
show (Atom x) = "atom"
show (Fork x y) = "fork " ++ show x ++ " " ++ show y
show Stop = "stop"
-- ===================================
-- Ex. 0
-- ===================================
fromConcurrent :: Concurrent a -> ((a -> Action) -> Action)
fromConcurrent (Concurrent a) = a
action :: Concurrent a -> Action
action a = fromConcurrent a (const Stop)
-- ===================================
-- Ex. 1
-- ===================================
stop :: Concurrent a
stop = Concurrent (\_ -> Stop)
-- ===================================
-- Ex. 2
-- ===================================
atom :: IO a -> Concurrent a
atom a = Concurrent (\k -> Atom (
do
x <- a
return (k x)
))
-- ===================================
-- Ex. 3
-- ===================================
fork :: Concurrent a -> Concurrent ()
fork a = Concurrent (\k -> Fork (action a) (k ()))
par :: Concurrent a -> Concurrent a -> Concurrent a
par a1 a2 = Concurrent (\k -> Fork (fromConcurrent a1 k) (fromConcurrent a2 k))
-- ===================================
-- Ex. 4
-- ===================================
--instance Monad Concurrent where
-- (Concurrent f) >>= g = Concurrent (\k -> f (\x -> fromConcurrent (g x) k))
-- return x = Concurrent (\c -> c x)
instance Monad Concurrent where
f >>= g = Concurrent (\k -> (fromConcurrent f (\x -> (fromConcurrent (g x) k))))
return x = Concurrent (\c -> c x)
-- ===================================
-- Ex. 5
-- ===================================
roundRobin :: [Action] -> IO ()
roundRobin [] = return ()
roundRobin (Atom m : as) = do
a' <- m
roundRobin $ as ++ [a']
roundRobin (Fork a1 a2 : as) = roundRobin $ as ++ [a1,a2]
roundRobin (Stop : as) = roundRobin as
-- ===================================
-- Tests
-- ===================================
ex0 :: Concurrent ()
ex0 = par (loop (genRandom 1337)) (loop (genRandom 2600) >> atom (putStrLn ""))
ex1 :: Concurrent ()
ex1 = do atom (putStr "Haskell")
fork (loop $ genRandom 7331)
loop $ genRandom 42
atom (putStrLn "")
-- ===================================
-- Helper Functions
-- ===================================
run :: Concurrent a -> IO ()
run x = roundRobin [action x]
genRandom :: Int -> [Int]
genRandom 1337 = [1, 96, 36, 11, 42, 47, 9, 1, 62, 73]
genRandom 7331 = [17, 73, 92, 36, 22, 72, 19, 35, 6, 74]
genRandom 2600 = [83, 98, 35, 84, 44, 61, 54, 35, 83, 9]
genRandom 42 = [71, 71, 17, 14, 16, 91, 18, 71, 58, 75]
loop :: [Int] -> Concurrent ()
loop xs = mapM_ (atom . putStr . show) xs
|
AlexMckey/FP101x-ItFP_Haskell
|
Sources/my_pmctemplate.hs
|
cc0-1.0
| 3,075 | 0 | 16 | 769 | 1,006 | 540 | 466 | 56 | 1 |
import Control.Applicative
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import qualified Data.List as L
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Set as S
import qualified Data.Tuple as T
addToSet :: (Num a, Ord a) => S.Set (a, Int) -> [(Int, a)] -> S.Set (a, Int)
addToSet set xs = L.foldl' (\ acc x -> S.insert (T.swap x) acc) set xs
addToMap :: Ord k => M.Map k a -> [(k, a)] -> M.Map k a
addToMap m xs = L.foldl' (\ acc (x, d) -> M.insert x d acc) m xs
-- ^ O(n log^2 n) dijkstra algorithm by using the set structure
dijkstra :: (Num a, Ord a) =>
M.Map Int [(Int, a)] -- ^ graph structure
-> Int -- ^ source node
-> M.Map Int a -- ^ result
dijkstra graph source = dijkstra' graph (S.singleton (0, source))
(M.singleton source 0)
dijkstra' :: (Ord t, Num t) =>
M.Map Int [(Int, t)] -- ^ graph structure
-> S.Set (t, Int) -- ^ priority queue
-> M.Map Int t
-> M.Map Int t
dijkstra' graph pq res
| S.null pq = res
| otherwise = dijkstra' graph (addToSet pq' info) (addToMap res info)
where
fwd = M.findWithDefault
((dis, node), pq') = S.deleteFindMin pq
nodes = fwd [] node graph
update = filter (\ (x, e) -> (fwd (-1) x res) == (-1) ||
dis + e < (fwd (-1) x res)) nodes
info = map (\ (x, e) -> (x, dis + e)) update
buildGraph :: Ord t => [(t, t1, t2)] -> M.Map t [(t1, t2)]
buildGraph edges =
let g = L.groupBy (\ (x, _, _) (y, _, _) -> x == y) edges
in M.fromList (map (\ x -> (f1 $ head x, map (\ (_, y, z) -> (y, z)) x)) g)
readInt' :: BS.ByteString -> Int
readInt' = fst . fromJust . BS.readInt
f1 :: (t, t1, t2) -> t
f1 (x, _, _) = x
f3 :: (t, t1, t2) -> t2
f3 (_, _, x) = x
main :: IO ()
main = do
contents <- BS.lines <$> BS.getContents
let edges = map (\ [x, y, z] -> (x, y, z))
(map (\ x -> map readInt' (BS.words x)) contents)
n = f1 $ head edges
res = dijkstra (buildGraph $ L.sort $ tail edges) (f3 $ head edges)
forM_ [0..(n - 1)] $ \i -> do
if M.member i res
then print $ M.findWithDefault (-1) i res
else putStrLn "INF"
|
m00nlight/hackerrank
|
library/haskell/dijkstra.hs
|
gpl-2.0
| 2,386 | 0 | 18 | 811 | 1,101 | 601 | 500 | 53 | 2 |
{-
Copyright (C) 2005 John Goerzen <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
module Utils where
import Types
import Config
import MissingH.Maybe
import MissingH.Path
import Control.Concurrent
import Control.Exception
import System.IO
--import Foreign.C.String
import Data.List
getFSPath :: GAddress -> FilePath
getFSPath ga =
forceMaybeMsg ("getFSPath1 " ++ show ga) . secureAbsNormPath (baseDir ++ "/gopher") $ base
where base = (host ga) ++ "/" ++ (show $ port ga) ++ "/" ++
(path ga) ++ case (dtype ga) of
'1' -> "/.gophermap"
_ -> ""
newLock :: IO Lock
newLock = newEmptyMVar
acquire :: Lock -> IO ()
acquire l =
do t <- myThreadId
putMVar l t
release :: Lock -> IO ()
release l =
do t <- myThreadId
r <- tryTakeMVar l
case r of
Nothing -> do msg $ "Warning: released lock which was unheld."
Just x -> if x == t
then return ()
else fail $ "Thread " ++ (show t) ++
" released lock held by thread " ++
(show x)
withLock :: Lock -> (IO a) -> IO a
withLock l action = bracket_ (acquire l) (release l) action
msg :: String -> IO ()
msg l =
do t <- myThreadId
let disp = (show t) ++ ": " ++ l ++ "\n"
putStr disp
hFlush stdout
--withCStringLen disp (\(c, len) -> hPutBuf stdout c len >> hFlush stdout)
ce :: String -> String
ce i =
'\'' :
(concat $ map (\c -> if c == '\'' then "''" else [c]) i)
++ "'"
|
jgoerzen/gopherbot
|
Utils.hs
|
gpl-2.0
| 2,325 | 0 | 15 | 728 | 504 | 255 | 249 | 46 | 3 |
-- find the k'th element of a list
p3 :: [a] -> Int -> a
p3 (x:xs) 1 = x
p3 (x:xs) n = p3 xs (n-1)
|
yalpul/CENG242
|
H99/1-10/p3.hs
|
gpl-3.0
| 100 | 0 | 7 | 28 | 64 | 34 | 30 | 3 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ExplicitNamespaces #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnicodeSyntax #-}
-- | Defines equality relation on types and provides
-- a simple interface to write proof on haskell types
module Relation.Equality where
import Data.Singletons (Sing)
infix 4 :≡
-- | Equality relation for types
data (a :: k) :≡ (b :: k) where
Refl ∷ ∀ a. a :≡ a
-- | Operator version of "trans"
(∾) ∷ ∀ a b c. (a :≡ b) → (b :≡ c) → (a :≡ c)
(∾) = trans
-- | Congruence property
cong ∷ ∀ a b c f. (Sing a → Sing b) → (a :≡ c) → (f a :≡ f c)
cong _ Refl = Refl
-- | Transitive property
trans ∷ ∀ a b c. (a :≡ b) → (b :≡ c) → (a :≡ c)
trans Refl Refl = Refl
-- | Symmetric property
sym ∷ ∀ a b. (a :≡ b) → (b :≡ a)
sym Refl = Refl
-- | Type-safe cast using propositional equality
castWith ∷ ∀ a b. (a :≡ b) → a → b
castWith Refl x = x
-- | Generalized form of cast
gcastWith ∷ ∀ a b r. (a :≡ b) → ((a ~ b) ⇒ r) → r
gcastWith Refl x = x
|
rnhmjoj/interlude
|
src/Relation/Equality.hs
|
gpl-3.0
| 1,382 | 0 | 11 | 334 | 363 | 219 | 144 | 29 | 1 |
module HEP.Physics.TTBar.Cuts where
import HEP.Parser.LHCOAnalysis.PhysObj hiding (FourMomentum,fourmomfrometaphipt,trd3)
import Data.List
check :: Bool -> Maybe Bool
check True = Just True
check False = Nothing
met_event :: PhyEventClassified -> Maybe Double
met_event p = Just $ (snd.phiptmet.met) p
numbjet :: PhyEventClassified -> Maybe Double
numbjet = Just . fromIntegral . Prelude.length . bjetlst
numjet :: PhyEventClassified -> Maybe Double
numjet = Just . fromIntegral . Prelude.length . jetlst
numbjet_PGS :: PhyEventClassified -> Int
numbjet_PGS = numofobj BJet -- Prelude.length . bjetlst
numlepton_PGS :: PhyEventClassified -> Int
numlepton_PGS p = let numm = numofobj Muon p
nume = numofobj Electron p
in nume + numm
leptonlst_eta_pt :: PhyEventClassified -> [(Double,Double)]
leptonlst_eta_pt p = let elst = electronlst p
mlst = muonlst p
etapt (_,x) = (eta x,pt x)
in map etapt elst ++ map etapt mlst
missingET :: PhyEventClassified -> Double
missingET = snd . phiptmet . met
cut_photon_veto :: PhyEventClassified -> Bool
cut_photon_veto p = numofobj Photon p == 0
cut_morethanzero_bjet :: PhyEventClassified -> Bool
cut_morethanzero_bjet p = numbjet_PGS p >= 1
cut_single_lepton :: PhyEventClassified -> Bool
cut_single_lepton p = let ne = numofobj Electron p
nm = numofobj Muon p
nt = numofobj Tau p
in ne + nm == 1 && nt == 0
cut_missing_pt :: Double -> PhyEventClassified -> Bool
cut_missing_pt ptcut p = missingET p > ptcut
cut_lepton_eta_pt :: Double -> Double -> PhyEventClassified -> Bool
cut_lepton_eta_pt etacut ptcut p =
let (eta_, pt_) = head $ leptonlst_eta_pt p
in (abs eta_ < etacut) && (pt_ > ptcut)
cut_n_jet_eta_pt :: Int -> Double -> Double -> PhyEventClassified -> Bool
cut_n_jet_eta_pt n etacut ptcut p =
let nj = numofobj Jet p
nbj = numofobj BJet p
in if (nj + nbj) < n
then False
else let jlst = map snd $ jetlst p
jlst_filtered = map pt $ filter (\x-> abs ( eta x )< etacut ) jlst
bjlst = map snd $ bjetlst p
bjlst_filtered = map pt $ filter (\x-> abs (eta x) < etacut) bjlst
combined = filter (>ptcut) $ sortBy (flip compare) $ jlst_filtered ++ bjlst_filtered
in length combined >= n
cut_central_bjet_eta :: Double -> PhyEventClassified -> Bool
cut_central_bjet_eta etacut p =
let bjlst = map snd $ bjetlst p
bjlst_filtered = map pt $ filter (\x-> abs(eta x) < etacut) bjlst
in length bjlst_filtered >= 1
checkall_cuts :: [(a->Bool)] -> a -> Bool
checkall_cuts cset p = and (map (\f -> f p) cset)
comboM :: Monad m => (a-> m b) -> (a -> m c) -> a -> m (b,c)
comboM f g x = do y1 <- f x
y2 <- g x
return (y1,y2)
|
wavewave/ttbar
|
lib/HEP/Physics/TTBar/Cuts.hs
|
gpl-3.0
| 2,980 | 0 | 19 | 863 | 1,031 | 522 | 509 | 63 | 2 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
-- parsing all of this pricing information is pretty brutal...
-- ... this is disgusting.. sorry.
module System.DevUtils.Base.Cloud.Amazon (
JSONLocations (..),
Pricing (..),
GeneralPricing (..),
populate,
defaultJSONLocations,
generalize,
ec2ToGP,
ec2ToGP'ri'or'di,
ebsToGP,
rdsToGP,
rdsToGP'ri'or'di,
ecToGP,
ecToGP'ri'or'di,
s3ToGP
) where
import qualified System.DevUtils.Base.Cloud.Amazon.EC2 as EC2
import qualified System.DevUtils.Base.Cloud.Amazon.EC2.Reserved as EC2Reserved
import qualified System.DevUtils.Base.Cloud.Amazon.ElastiCache as EC
import qualified System.DevUtils.Base.Cloud.Amazon.ElastiCache.Reserved as ECReserved
import qualified System.DevUtils.Base.Cloud.Amazon.EBS as EBS
import qualified System.DevUtils.Base.Cloud.Amazon.RDS as RDS
import qualified System.DevUtils.Base.Cloud.Amazon.RDS.Reserved as RDSReserved
import qualified System.DevUtils.Base.Cloud.Amazon.S3 as S3
import qualified System.DevUtils.Base.Cloud.Amazon.Misc
import System.DevUtils.Base.Cloud.Amazon.Misc
import System.DevUtils.Base.Finance.Misc
import qualified Data.ByteString.Lazy as B
import Data.Aeson
import Data.Maybe
import Data.Char
data JSONLocations = JSONLocations {
ebsPath :: FilePath,
ec2Path'onDemand :: FilePath,
ec2ReservedPath'Light :: FilePath,
ec2ReservedPath'Medium :: FilePath,
ec2ReservedPath'Heavy :: FilePath,
ec2DedicatedPath'onDemand :: FilePath,
ec2DedicatedPath'Light :: FilePath,
ec2DedicatedPath'Medium :: FilePath,
ec2DedicatedPath'Heavy :: FilePath,
ec2Path'onDemand'Legacy :: FilePath,
ec2ReservedPath'Light'Legacy :: FilePath,
ec2ReservedPath'Medium'Legacy :: FilePath,
ec2ReservedPath'Heavy'Legacy :: FilePath,
ecPath'onDemand :: FilePath,
ecReservedPath'Light :: FilePath,
ecReservedPath'Medium :: FilePath,
ecReservedPath'Heavy :: FilePath,
rdsPath'onDemand :: FilePath,
rdsReservedPath'Light :: FilePath,
rdsReservedPath'Medium :: FilePath,
rdsReservedPath'Heavy :: FilePath,
s3Path :: FilePath
} deriving (Show, Read)
data Pricing = Pricing {
ebs :: EBS.EBSRoot,
ec2'onDemand :: EC2.EC2Root,
ec2'ri'light :: EC2Reserved.EC2Root,
ec2'ri'medium :: EC2Reserved.EC2Root,
ec2'ri'heavy :: EC2Reserved.EC2Root,
ec2'di'onDemand :: EC2Reserved.EC2Root,
ec2'di'light :: EC2Reserved.EC2Root,
ec2'di'medium :: EC2Reserved.EC2Root,
ec2'di'heavy :: EC2Reserved.EC2Root,
ec2'onDemand'legacy :: EC2.EC2Root,
ec2'ri'light'legacy :: EC2Reserved.EC2Root,
ec2'ri'medium'legacy :: EC2Reserved.EC2Root,
ec2'ri'heavy'legacy :: EC2Reserved.EC2Root,
ec'onDemand :: EC.ECRoot,
ec'ri'light :: ECReserved.ECRoot,
ec'ri'medium :: ECReserved.ECRoot,
ec'ri'heavy :: ECReserved.ECRoot,
rds'onDemand :: RDS.RDSRoot,
rds'ri'light :: RDSReserved.RDSRoot,
rds'ri'medium :: RDSReserved.RDSRoot,
rds'ri'heavy :: RDSReserved.RDSRoot,
s3 :: S3.S3Root
} deriving (Show, Read)
defaultJSONLocations = JSONLocations {
ebsPath = p "EBS",
ec2Path'onDemand = p "EC2-Linux-OnDemand",
ec2ReservedPath'Light = p "EC2-Linux-RI-Light",
ec2ReservedPath'Medium = p "EC2-Linux-RI-Medium",
ec2ReservedPath'Heavy = p "EC2-Linux-RI-Heavy",
ec2DedicatedPath'onDemand = p "EC2-Linux-OnDemand",
ec2DedicatedPath'Light = p "EC2-Linux-DI-RI-Light",
ec2DedicatedPath'Medium = p "EC2-Linux-DI-RI-Medium",
ec2DedicatedPath'Heavy = p "EC2-Linux-DI-RI-Heavy",
ec2Path'onDemand'Legacy = p "EC2-Linux-OnDemand-Legacy",
ec2ReservedPath'Light'Legacy = p "EC2-Linux-RI-Light-Legacy",
ec2ReservedPath'Medium'Legacy = p "EC2-Linux-RI-Medium-Legacy",
ec2ReservedPath'Heavy'Legacy = p "EC2-Linux-RI-Heavy-Legacy",
ecPath'onDemand = p "ElastiCache-Cleaned-Standard",
ecReservedPath'Light = p "ElastiCache-Cleaned-RI-Light-Standard",
ecReservedPath'Medium = p "ElastiCache-Cleaned-RI-Medium-Standard",
ecReservedPath'Heavy = p "ElastiCache-Cleaned-RI-Heavy-Standard",
rdsReservedPath'Light = p "RDS-MySQL-Reserved-Light",
rdsReservedPath'Medium = p "RDS-MySQL-Reserved-Medium",
rdsReservedPath'Heavy = p "RDS-MySQL-Reserved-Heavy",
rdsPath'onDemand = p "RDS-MySQL-Standard",
s3Path = p "S3"
}
where
p s = "etc/Cloud/Amazon/Pricing/" ++ s ++ ".json"
populate JSONLocations{..} = do
_ebs <- B.readFile ebsPath
_ec2'onDemand <- B.readFile ec2Path'onDemand
_ec2'ri'light <- B.readFile ec2ReservedPath'Light
_ec2'ri'medium <- B.readFile ec2ReservedPath'Medium
_ec2'ri'heavy <- B.readFile ec2ReservedPath'Heavy
_ec2'di'light <- B.readFile ec2DedicatedPath'Light
_ec2'di'medium <- B.readFile ec2DedicatedPath'Medium
_ec2'di'heavy <- B.readFile ec2DedicatedPath'Heavy
_ec2'onDemand'legacy <- B.readFile ec2Path'onDemand'Legacy
_ec2'ri'light'legacy <- B.readFile ec2ReservedPath'Light'Legacy
_ec2'ri'medium'legacy <- B.readFile ec2ReservedPath'Medium'Legacy
_ec2'ri'heavy'legacy <- B.readFile ec2ReservedPath'Heavy'Legacy
_ec'onDemand <- B.readFile ecPath'onDemand
_ec'ri'light <- B.readFile ecReservedPath'Light
_ec'ri'medium <- B.readFile ecReservedPath'Medium
_ec'ri'heavy <- B.readFile ecReservedPath'Heavy
_rds'onDemand <- B.readFile rdsPath'onDemand
_rds'ri'light <- B.readFile rdsReservedPath'Light
_rds'ri'medium <- B.readFile rdsReservedPath'Medium
_rds'ri'heavy <- B.readFile rdsReservedPath'Heavy
_s3 <- B.readFile s3Path
return $ Pricing {
ebs = fromJust (decode' _ebs :: Maybe EBS.EBSRoot),
ec2'onDemand = fromJust (decode' _ec2'onDemand :: Maybe EC2.EC2Root),
ec2'ri'light = fromJust (decode' _ec2'ri'light :: Maybe EC2Reserved.EC2Root),
ec2'ri'medium = fromJust (decode' _ec2'ri'medium :: Maybe EC2Reserved.EC2Root),
ec2'ri'heavy = fromJust (decode' _ec2'ri'heavy :: Maybe EC2Reserved.EC2Root),
ec2'di'onDemand = fromJust (decode' _ec2'di'light :: Maybe EC2Reserved.EC2Root),
ec2'di'light = fromJust (decode' _ec2'di'light :: Maybe EC2Reserved.EC2Root),
ec2'di'medium = fromJust (decode' _ec2'di'medium :: Maybe EC2Reserved.EC2Root),
ec2'di'heavy = fromJust (decode' _ec2'di'heavy :: Maybe EC2Reserved.EC2Root),
ec2'onDemand'legacy = fromJust (decode' _ec2'onDemand'legacy :: Maybe EC2.EC2Root),
ec2'ri'light'legacy = fromJust (decode' _ec2'ri'light'legacy :: Maybe EC2Reserved.EC2Root),
ec2'ri'medium'legacy = fromJust (decode' _ec2'ri'medium'legacy :: Maybe EC2Reserved.EC2Root),
ec2'ri'heavy'legacy = fromJust (decode' _ec2'ri'heavy'legacy :: Maybe EC2Reserved.EC2Root),
ec'onDemand = fromJust (decode' _ec'onDemand :: Maybe EC.ECRoot),
ec'ri'light = fromJust (decode' _ec'ri'light :: Maybe ECReserved.ECRoot),
ec'ri'medium = fromJust (decode' _ec'ri'medium :: Maybe ECReserved.ECRoot),
ec'ri'heavy = fromJust (decode' _ec'ri'heavy :: Maybe ECReserved.ECRoot),
rds'onDemand = fromJust (decode' _rds'onDemand :: Maybe RDS.RDSRoot),
rds'ri'light = fromJust (decode' _rds'ri'light :: Maybe RDSReserved.RDSRoot),
rds'ri'medium = fromJust (decode' _rds'ri'medium :: Maybe RDSReserved.RDSRoot),
rds'ri'heavy = fromJust (decode' _rds'ri'heavy :: Maybe RDSReserved.RDSRoot),
s3 = fromJust (decode' _s3 :: Maybe S3.S3Root)
}
generalize :: Pricing -> [GeneralPricing]
generalize Pricing{..} =
concat
[
ec2ToGP ec2'onDemand'legacy,
ec2ToGP'ri'or'di ec2'ri'light'legacy "ri-light-",
ec2ToGP'ri'or'di ec2'ri'medium'legacy "ri-medium-",
ec2ToGP'ri'or'di ec2'ri'heavy'legacy "ri-heavy-",
ec2ToGP ec2'onDemand,
ec2ToGP'ri'or'di ec2'ri'light "ri-light-",
ec2ToGP'ri'or'di ec2'ri'medium "ri-medium-",
ec2ToGP'ri'or'di ec2'ri'heavy "ri-heavy-",
ec2ToGP'ri'or'di ec2'di'light "di-light-",
ec2ToGP'ri'or'di ec2'di'medium "di-medium-",
ec2ToGP'ri'or'di ec2'di'heavy "di-heavy-",
ebsToGP ebs,
ecToGP ec'onDemand,
ecToGP'ri'or'di ec'ri'light "ri-light-",
ecToGP'ri'or'di ec'ri'medium "ri-medium-",
ecToGP'ri'or'di ec'ri'heavy "ri-heavy-",
rdsToGP rds'onDemand,
rdsToGP'ri'or'di rds'ri'light "ri-light-",
rdsToGP'ri'or'di rds'ri'medium "ri-medium-",
rdsToGP'ri'or'di rds'ri'heavy "ri-heavy-",
s3ToGP s3
]
ec2ToGP ec2 =
concat $ concat $ map (\region ->
map (\instanceType ->
map (\size ->
GeneralPricing { fam = "ec2", region = EC2.region region, name = EC2.size size, rate'type = "onDemand", upfront = 0.0, rate = readCurrency (usd $ EC2.prices $ head $ EC2.valueColumns size) }
) $ EC2.sizes instanceType
) $ EC2.instanceType region
) $ EC2.regions $ EC2.config ec2
ec2ToGP'ri'or'di ec2 rate'type'prefix =
concat $ concat $ concat $ map (\region ->
map (\instanceType ->
map (\size ->
let
v = EC2Reserved.valueColumns size
y1 = v !! 0
y1hr = v !! 1
y3 = v !! 2
y3hr = v !! 3
in
case (any (\x -> (usd $ EC2Reserved.prices x) =="N/A") v) of
True -> []
otherwise ->
[GeneralPricing { fam = "ec2", region = EC2Reserved.region region, name = EC2Reserved.size size, rate'type = rate'type'prefix ++ "y1", upfront = readCurrency (usd $ EC2Reserved.prices y1), rate = readCurrency (usd $ EC2Reserved.prices y1hr) },
GeneralPricing { fam = "ec2", region = EC2Reserved.region region, name = EC2Reserved.size size, rate'type = rate'type'prefix ++ "y3", upfront = readCurrency (usd $ EC2Reserved.prices y3), rate = readCurrency (usd $ EC2Reserved.prices y3hr) }]
) $ EC2Reserved.sizes instanceType
) $ EC2Reserved.instanceType region
) $ EC2Reserved.regions $ EC2Reserved.config ec2
ebsToGP ebs =
concat $ concat $ map (\region ->
map (\types ->
map (\values ->
GeneralPricing { fam = "ebs", region = EBS.region region, name = EBS.name types, rate'type = EBS.rateV values, upfront = 0.0, rate = readCurrency (usd $ EBS.prices values) }
) $ EBS.values types
) $ EBS.types region
) $ EBS.regions $ EBS.config ebs
rdsToGP rds =
concat $ concat $ map (\region ->
map (\types ->
map (\tier ->
GeneralPricing { fam = "rds", region = RDS.region region, name = RDS.name tier, rate'type = "onDemand", upfront = 0.0, rate = readCurrency (usd $ RDS.prices tier) }
) $ RDS.tiers types
) $ RDS.types region
) $ RDS.regions $ RDS.config rds
rdsToGP'ri'or'di rds rate'type'prefix =
concat $ concat $ concat $ map (\region ->
map (\instanceType ->
map (\size ->
let
v = RDSReserved.valueColumns size
y1 = v !! 0
y1hr = v !! 1
y3 = v !! 2
y3hr = v !! 3
in
case (any (\x -> (usd $ RDSReserved.prices x) =="N/A") v) of
True -> []
otherwise ->
[GeneralPricing { fam = "rds", region = RDSReserved.region region, name = RDSReserved.size size, rate'type = rate'type'prefix ++ "y1", upfront = readCurrency (usd $ RDSReserved.prices y1), rate = readCurrency (usd $ RDSReserved.prices y1hr) },
GeneralPricing { fam = "rds", region = RDSReserved.region region, name = RDSReserved.size size, rate'type = rate'type'prefix ++ "y3", upfront = readCurrency (usd $ RDSReserved.prices y3), rate = readCurrency (usd $ RDSReserved.prices y3hr) }]
) $ RDSReserved.tiers instanceType
) $ RDSReserved.instanceType region
) $ RDSReserved.regions $ RDSReserved.config rds
ecToGP ec =
concat $ concat $ map (\region ->
map (\types ->
map (\tier ->
GeneralPricing { fam = "ec", region = EC.region region, name = EC.nameT tier, rate'type = "onDemand", upfront = 0.0, rate = readCurrency (usd $ EC.prices tier) }
) $ EC.tiers types
) $ EC.types region
) $ EC.regions $ EC.config ec
ecToGP'ri'or'di ec rate'type'prefix =
concat $ concat $ concat $ map (\region ->
map (\instanceType ->
map (\size ->
let
v = ECReserved.valueColumns size
y1 = v !! 0
y1hr = v !! 1
y3 = v !! 2
y3hr = v !! 3
in
case (any (\x -> (usd $ ECReserved.prices x) =="N/A") v) of
True -> []
otherwise ->
[GeneralPricing { fam = "ec", region = ECReserved.region region, name = ECReserved.size size, rate'type = rate'type'prefix ++ "y1", upfront = readCurrency (usd $ ECReserved.prices y1), rate = readCurrency (usd $ ECReserved.prices y1hr) },
GeneralPricing { fam = "ec", region = ECReserved.region region, name = ECReserved.size size, rate'type = rate'type'prefix ++ "y3", upfront = readCurrency (usd $ ECReserved.prices y3), rate = readCurrency (usd $ ECReserved.prices y3hr) }]
) $ ECReserved.tiers instanceType
) $ ECReserved.instanceType region
) $ ECReserved.regions $ ECReserved.config ec
s3ToGP s3 =
concat $ concat $ map (\region ->
map (\tier ->
map (\storageType ->
GeneralPricing { fam = "s3", region = S3.region region, name = S3.name tier, rate'type = S3.typeV storageType, upfront = 0.0, rate = readCurrency (usd $ S3.prices storageType) :: Double }
) $ S3.storageTypes tier
) $ S3.tiers region
) $ S3.regions $ S3.config s3
readCurrency s = read (filter (\c -> isDigit c || c == '.') s) :: Double
|
adarqui/DevUtils-Base
|
src/System/DevUtils/Base/Cloud/Amazon.hs
|
gpl-3.0
| 12,675 | 0 | 31 | 1,972 | 3,624 | 1,974 | 1,650 | 268 | 2 |
module NumberTheory.PrimeRelated.PrimeTreeDraw
where
import Data.Tree
import NumberTheory.PrimeRelated
primeFacTree :: (FactorMethod, FactorMethod) -> Integer -> Either (Tree Integer) ([Tree Integer])
primeFacTree (fm1, fm2) n
| primeQ n = Left $ unfoldTree fctr n
| not $ primeQ n = Right $ unfoldForest fctr (factorList fm1 (n))
where
fctr = (\x -> if x == 2 then (x, []) else (x, factorList fm1 (x - 1)))
drawArbTree :: (Show a) => Tree a -> String
drawArbTree tree = drawTree $ fmap show tree
drawArbForest :: (Show a) => Forest a -> String
drawArbForest forest = drawForest $ map (fmap show) forest
drawPFT :: Either (Tree Integer) ([Tree Integer]) -> String
drawPFT ft = either (drawArbTree) (drawArbForest) ft
|
mathlover2/number-theory
|
NumberTheory/PrimeRelated/PrimeTreeDraw.hs
|
gpl-3.0
| 764 | 2 | 13 | 162 | 313 | 164 | 149 | 14 | 2 |
module QHaskell.Type.ADT where
import QHaskell.MyPrelude
import QHaskell.Nat.ADT
data Typ =
Wrd
| Bol
| Flt
| Arr Typ Typ
| Tpl Typ Typ
| TVr Nat
deriving instance Eq Typ
deriving instance Show Typ
|
shayan-najd/QHaskell
|
QHaskell/Type/ADT.hs
|
gpl-3.0
| 218 | 0 | 6 | 54 | 66 | 39 | 27 | -1 | -1 |
{-# LANGUAGE TemplateHaskell, TypeApplications, GADTs, DerivingVia #-}
module Lamdu.Sugar.Convert.Option
( Result(..), rTexts, rExpr, rDeps, rAllowEmptyQuery, rWithTypeAnnotations
, ResultQuery(..), _QueryTexts, _QueryNewTag
, simpleResult
, ResultGroups(..), filterResults
, Matches, matchResult
, TypeMatch(..)
, makeTagRes, makeNoms, makeForType, makeLocals
, getListing, makeGlobals
, tagTexts, recTexts, caseTexts, ifTexts, symTexts, lamTexts
, makeOption
, taggedVar
) where
import qualified Control.Lens as Lens
import Control.Monad ((>=>))
import Control.Monad.Once (OnceT)
import Control.Monad.Transaction (MonadTransaction(..))
import Data.Containers.ListUtils (nubOrd)
import Data.List (sortOn)
import Data.Property (MkProperty', getP, modP, pureModify, pVal)
import qualified Data.Text as Text
import GUI.Momentu.Direction (Layout(..))
import Hyper
import Hyper.Infer
import Hyper.Recurse
import Hyper.Syntax (funcIn)
import Hyper.Syntax.Nominal (nScheme)
import Hyper.Syntax.Row (freExtends)
import Hyper.Syntax.Scheme (sTyp)
import Hyper.Type.Functor (_F)
import Hyper.Unify (UVar, applyBindings, unify)
import Hyper.Unify.Generalize (instantiate)
import qualified Lamdu.Annotations as Annotations
import Lamdu.Calc.Definition (Deps, depsNominals, depsGlobalTypes)
import qualified Lamdu.Calc.Infer as Infer
import qualified Lamdu.Calc.Term as V
import qualified Lamdu.Calc.Type as T
import qualified Lamdu.Data.Anchors as Anchors
import qualified Lamdu.Data.Definition as Def
import qualified Lamdu.Data.Tag as Tag
import qualified Lamdu.Expr.IRef as ExprIRef
import qualified Lamdu.Expr.Load as Load
import Lamdu.Expr.UniqueId (ToUUID(..))
import qualified Lamdu.I18N.Code as Texts
import qualified Lamdu.I18N.CodeUI as Texts
import qualified Lamdu.I18N.Name as Texts
import Lamdu.I18N.UnicodeAlts (unicodeAlts)
import Lamdu.Sugar.Annotations (ShowAnnotation, MarkAnnotations(..), alwaysShowAnnotations)
import Lamdu.Sugar.Convert.Annotation (makeAnnotation)
import Lamdu.Sugar.Convert.Binder (convertBinder)
import Lamdu.Sugar.Convert.Binder.Params (mkVarInfo)
import qualified Lamdu.Sugar.Convert.Input as Input
import Lamdu.Sugar.Convert.Monad (ConvertM)
import qualified Lamdu.Sugar.Convert.Monad as ConvertM
import Lamdu.Sugar.Convert.Suggest (suggestTopLevelVal)
import Lamdu.Sugar.Internal
import Lamdu.Sugar.Internal.EntityId (EntityId(..))
import qualified Lamdu.Sugar.Internal.EntityId as EntityId
import Lamdu.Sugar.Lens.Annotations (HAnnotations(..))
import qualified Lamdu.Sugar.Types as Sugar
import Revision.Deltum.Hyper (Write(..), writeRecursively)
import qualified Revision.Deltum.IRef as IRef
import Revision.Deltum.Transaction (Transaction)
import qualified Revision.Deltum.Transaction as Transaction
import Lamdu.Prelude
type T = Transaction
data Matches a = Matches
{ _mExact :: a
, _mPrefix :: a
, _mInfix :: a
, _mCreateNew :: a
} deriving (Functor, Foldable, Generic, Traversable)
deriving (Monoid, Semigroup) via (Generically (Matches a))
Lens.makeLenses ''Matches
data ResultQuery
= QueryTexts !(Sugar.TagSuffixes -> Sugar.QueryLangInfo -> [Text])
| QueryNewTag T.Tag
Lens.makePrisms ''ResultQuery
data Result a = Result
{ _rDeps :: !Deps
, _rExpr :: !a
, _rTexts :: !ResultQuery
, _rWithTypeAnnotations :: !Bool
, _rAllowEmptyQuery :: !Bool
} deriving (Functor, Foldable, Traversable)
Lens.makeLenses ''Result
simpleResult :: a -> (Sugar.TagSuffixes -> Sugar.QueryLangInfo -> [Text]) -> Result a
simpleResult expr texts =
Result
{ _rDeps = mempty
, _rExpr = expr
, _rTexts = QueryTexts texts
, _rWithTypeAnnotations = False
, _rAllowEmptyQuery = True
}
data TypeMatch = TypeMatches | TypeMismatch deriving (Eq, Ord)
data ResultGroups a = ResultGroups
{ gSyntax :: a
, gDefs :: a
, gLocals :: a
, gInjects :: a
, gToNoms :: a
, gFromNoms :: a
, gForType :: a
, gGetFields :: a
, gWrapInRecs :: a
} deriving (Functor, Foldable, Traversable, Generic)
deriving (Semigroup, Monoid) via (Generically (ResultGroups a))
filterResults ::
(Monad m, Ord b) =>
MkProperty' (T m) (Set T.Tag) ->
(TypeMatch -> a -> b) ->
ResultGroups (OnceT (T m) [Result (a, Sugar.Option t name i (T m))]) -> Sugar.Query ->
OnceT (T m) [Sugar.Option t name i (T m)]
filterResults tagsProp order res query =
resGroups <&> (^. traverse)
where
resGroups
| "" == query ^. Sugar.qSearchTerm = groups (gForType <> gSyntax <> gLocals)
| "." `Text.isPrefixOf` (query ^. Sugar.qSearchTerm) =
groups (gForType <> gSyntax <> gDefs <> gFromNoms <> gGetFields)
| "'" `Text.isPrefixOf` (query ^. Sugar.qSearchTerm) = groups (gForType <> gToNoms <> gInjects)
| "{" `Text.isPrefixOf` (query ^. Sugar.qSearchTerm) =
groups (gForType <> gSyntax <> gWrapInRecs)
| otherwise =
-- Within certain search-term matching level (exact/prefix/infix),
-- prefer locals over globals even for type mismatches
groups (gForType <> gLocals) <> groups (gSyntax <> gDefs <> gToNoms <> gFromNoms)
groups f =
f res
<&> Lens.mapped . Lens.filteredBy (rTexts . _QueryNewTag) <. rExpr . _2 . Sugar.optionPick %@~
(\t -> (modP tagsProp (Lens.contains t .~ True) <>))
<&> foldMap (matchResult query)
<&> fmap ((^.. traverse . _2) . sortOn s)
s (i, opt) = order (if opt ^. Sugar.optionTypeMatch then TypeMatches else TypeMismatch) i
matchResult :: Sugar.Query -> Result a -> Matches [a]
matchResult query result
| query ^. Sugar.qSearchTerm == "" && not (result ^. rAllowEmptyQuery) = mempty
| otherwise =
case result ^. rTexts of
QueryTexts makeTexts
| s `elem` texts -> mempty & mExact .~ e
| any (Text.isPrefixOf s) texts -> mempty & mPrefix .~ e
| any (Text.isInfixOf s) texts -> mempty & mInfix .~ e
| otherwise -> mempty
where
texts =
makeTexts (query ^. Sugar.qTagSuffixes) (query ^. Sugar.qLangInfo)
<&> Text.toLower >>= unicodeAlts
QueryNewTag{} -> mempty & mCreateNew .~ e
where
e = [result ^. rExpr]
s = query ^. Sugar.qSearchTerm & Text.toLower
makeTagRes ::
Monad m =>
T.Tag ->
Text ->
(T.Tag -> a) ->
ConvertM m [Result a]
makeTagRes newTag prefix f =
getListing Anchors.tags >>= traverse mk
<&> (newTagRes :)
where
mk tag =
ExprIRef.readTagData tag & transaction <&> tagTexts Nothing
<&> Lens.mapped . Lens.mapped . traverse %~ (prefix <>)
<&> simpleResult (f tag)
newTagRes =
Result
{ _rDeps = mempty
, _rAllowEmptyQuery = False
, _rExpr = f newTag
, _rWithTypeAnnotations = False
, _rTexts = QueryNewTag newTag
}
taggedVar :: (Monad m, ToUUID a) => a -> T.Tag -> Transaction m (Sugar.TagSuffixes -> Sugar.QueryLangInfo -> [Text])
taggedVar v t = ExprIRef.readTagData t <&> tagTexts (Just (Sugar.TaggedVarId (toUUID v) t))
symTexts :: (Monad m, ToUUID a) => Text -> a -> T m (Sugar.TagSuffixes -> Sugar.QueryLangInfo -> [Text])
symTexts prefix tid =
getP (Anchors.assocTag tid) >>= taggedVar tid
<&> Lens.mapped . Lens.mapped . traverse %~ (prefix <>)
tagWithPrefix :: Monad m => Text -> T.Tag -> T m (Sugar.TagSuffixes -> Sugar.QueryLangInfo -> [Text])
tagWithPrefix prefix t =
ExprIRef.readTagData t <&> tagTexts Nothing
<&> Lens.mapped . Lens.mapped . traverse %~ (prefix <>)
makeNoms ::
Monad m =>
[T.NominalId] ->
Text ->
(Pure # T.Type -> T.NominalId -> T m [Result a]) ->
ConvertM m [Result a]
makeNoms avoid prefix f =
getListing Anchors.tids >>= traverse (transaction . mk) <&> (^.. traverse . Lens._Just . traverse)
where
mk tid
| tid `elem` avoid = pure Nothing
| otherwise =
Load.nominal tid <&> (^? Lens._Right) >>= Lens._Just %%~
\d ->
do
texts <- symTexts prefix tid
f (d ^. _Pure . nScheme . sTyp) tid
<&> traverse %~ (rDeps . depsNominals . Lens.at tid ?~ d) . (rTexts . _QueryTexts <>~ texts)
makeGlobals :: Monad m => (V.Var -> Pure # T.Type -> T m (Maybe a)) -> ConvertM m [Result a]
makeGlobals f =
do
deps <-
do
recRef <- Lens.view (ConvertM.scScopeInfo . ConvertM.siRecursiveRef)
Lens.view (ConvertM.scFrozenDeps . pVal . depsGlobalTypes) <&> maybe id addRecRef recRef
-- For globals currently in deps, use their frozen type,
-- to avoid using parameters inconsistent with frozen type.
let filt d = Lens.nullOf (Lens.ix (ExprIRef.globalId d)) deps
sequenceA
[ deps ^@.. Lens.itraversed & traverse existingGlobal & transaction
, getListing Anchors.globals <&> filter filt >>= transaction . traverse newGlobal
] <&> mconcat
<&> (^.. traverse . Lens._Just)
<&> Lens.mapped %~ rWithTypeAnnotations .~ True
where
addRecRef r = Lens.at (ExprIRef.globalId (r ^. ConvertM.rrDefI)) ?~ r ^. ConvertM.rrDefType
existingGlobal (x, s) = f x (s ^. _Pure . sTyp) >>= Lens._Just (\r -> symTexts "" x <&> simpleResult r)
newGlobal x =
do
s <- Transaction.readIRef x <&> (^. Def.defType)
f v (s ^. _Pure . sTyp)
>>= Lens._Just (\r -> symTexts "" x <&> (simpleResult r <&> rDeps . depsGlobalTypes . Lens.at v ?~ s))
where
v = ExprIRef.globalId x
getListing ::
Monad m =>
(Anchors.CodeAnchors m -> MkProperty' (T m) (Set a)) ->
ConvertM m [a]
getListing anchor =
Lens.view Anchors.codeAnchors
>>= transaction . getP . anchor
<&> (^.. Lens.folded)
makeForType :: Monad m => Pure # T.Type -> T m [Result (Pure # V.Term)]
makeForType t =
suggestTopLevelVal t
>>= traverse (\(deps, v) -> mkTexts v <&> (simpleResult v <&> rDeps .~ deps))
where
mkTexts v =
case v ^. _Pure of
V.BRecExtend{} -> pure (const recTexts)
V.BLeaf V.LRecEmpty -> pure (const recTexts)
V.BCase{} -> pure (const caseTexts)
V.BLeaf V.LAbsurd -> pure (const caseTexts)
V.BLeaf (V.LFromNom nomId) -> symTexts "." nomId
V.BLeaf (V.LGetField tag) -> tagWithPrefix "." tag
V.BLeaf (V.LInject tag) -> tagWithPrefix "'" tag
V.BApp (V.App (Pure (V.BLeaf (V.LInject tag))) _) -> tagWithPrefix "'" tag
_ -> mempty
tagTexts :: Maybe Sugar.TaggedVarId -> Tag.Tag -> Sugar.TagSuffixes -> Sugar.QueryLangInfo -> [Text]
tagTexts v t suffixes l
| null names = l ^.. Sugar.qNameTexts . Texts.unnamed
| otherwise = names
where
names =
t ^..
( Tag.tagTexts . Lens.ix (l ^. Sugar.qLangId) . (Tag.name <> Tag.abbreviation . Lens._Just)
<> Tag.tagSymbol . (Tag._UniversalSymbol <> Tag._DirectionalSymbol . dir)
) <&> addSuffix
addSuffix =
case v of
Nothing -> id
Just tv -> suffixes ^. Lens.at tv & maybe id (flip mappend . Text.pack . show)
dir =
case l ^. Sugar.qLangDir of
LeftToRight -> Tag.opLeftToRight
RightToLeft -> Tag.opRightToLeft
recTexts :: Sugar.QueryLangInfo -> [Text]
recTexts = (^.. Sugar.qCodeTexts . Texts.recordOpener) <> (^.. Sugar.qCodeTexts . Texts.recordCloser)
caseTexts :: Sugar.QueryLangInfo -> [Text]
caseTexts = (<&> ("." <>)) . (^.. Sugar.qCodeTexts . Texts.case_)
lamTexts :: Pure # T.Type -> Sugar.QueryLangInfo -> [Text]
lamTexts typ =
(^.. Sugar.qUITexts . Texts.lambda) <> const ("\\" : pipe)
where
pipe = ["|" | Lens.has (_Pure . T._TFun . funcIn . _Pure . T._TRecord . _Pure . T._REmpty) typ]
ifTexts :: Sugar.QueryLangInfo -> [Text]
ifTexts = (^.. Sugar.qCodeTexts . Texts.if_)
makeOption ::
Monad m =>
Input.Payload m # V.Term ->
Result [(a, Ann (Write m) # V.Term)] ->
ConvertM m (Result (a, Sugar.Option Sugar.HoleOpt InternalName (OnceT (T m)) (T m)))
makeOption dstPl res =
do
curCtx <- Lens.view ConvertM.scInferContext
let (scope, ctx0) =
Infer.runPureInfer (dstPl ^. Input.inferScope) curCtx
(Infer.loadDeps (res ^. rDeps) ?? dstPl ^. Input.inferScope)
^?! Lens._Right
let inferResults0 = res ^. rExpr <&> _2 %~ Infer.runPureInfer scope ctx0 . infer
let inferResults1 =
inferResults0 ^@.. traverse . Lens.filteredBy _1 <. _2 . Lens._Right <&>
\(idx, (e, ctx)) ->
Infer.runPureInfer () ctx (inferUVarsApplyBindings e) <&> (,) (e, ctx, idx)
let errInfo =
show
( zip
(res ^.. rExpr . traverse . _2 <&> (hPlain #) . unwrap (const (^. hVal)))
(inferResults0 <&> void . (^. _2))
, inferResults1 <&> void
, scope ^. V.scopeLevel
)
let ((iExpr, ctx1, i), (inferred, _)) =
inferResults1 ^? traverse . Lens._Right
& fromMaybe (error ("inference of all options failed: " <> errInfo))
let unifyResult =
Infer.runPureInfer () ctx1
(unify (dstPl ^. Input.inferredTypeUVar) (inferred ^. hAnn . _2 . inferResult . _2)
*> inferUVarsApplyBindings iExpr)
let inferred1 =
case unifyResult of
Left _err -> inferred
Right (newInferred, _) -> newInferred
(written, changes) <-
inferred1 & hflipped %~ hmap (const markToPrune)
& hAnn . _2 . _1 .~ Const False
& writeRecursively
<&> prune
<&> ExprIRef.toHRefs (dstPl ^. Input.stored . ExprIRef.setIref)
<&> hflipped %~ hmap (const mkPayload)
-- The forked transaction serves two purposes:
-- No actual data is written to the db for generating an option
-- The results cache is not invalidated due to writing to the database
& Transaction.fork & transaction
let resExpr =
case written ^? hVal . V._BApp of
Just (V.App (Ann _ f) x) ->
-- For applying arguments to fragmented funcs,
-- prune replaces the func in the expr with a hole or fragmented hole.
-- We extract the argument of it.
case f of
V.BLeaf V.LHole -> x
V.BApp (V.App (Ann _ (V.BLeaf V.LHole)) (Ann _ (V.BLeaf V.LHole))) -> x
_ -> written
_ -> written
& Input.preprocess (dstPl ^. Input.inferScope) (dstPl ^. Input.localsInScope)
recordVars <- Lens.view (ConvertM.scScopeInfo . ConvertM.siRecordParams)
let recordVarTags =
do
v <- resExpr ^? hVal . V._BLeaf . V._LVar
recordVars ^. Lens.at v
<&> map (nameWithContext Nothing v) . nubOrd . map head . (^.. Lens.folded)
s <-
convertBinder resExpr <&> annValue %~
case recordVarTags of
Just t -> const (Sugar.HoleVarsRecord t)
Nothing -> Sugar.HoleBinder
& local (ConvertM.scInferContext .~ ctx1)
& -- Updated deps are required to sugar labeled apply
Lens.locally (ConvertM.scFrozenDeps . pVal) (<> res ^. rDeps)
<&> markNodeAnnotations @_ @(Sugar.HoleOpt (ShowAnnotation, EvalPrep) InternalName _ _)
<&> hflipped %~ hmap (const (Lens._Wrapped %~
\(showAnn, x) -> convertPayload x & Sugar.plAnnotation %~ (,) showAnn
))
-- We explicitly do want annotations of variables such as global defs to appear
<&> Lens.filteredBy (hVal . Sugar._HoleBinder . Sugar.bBody . Sugar._BinderTerm . Sugar._BodyLeaf . Sugar._LeafGetVar) .
annotation . Sugar.plAnnotation . _1 .~ alwaysShowAnnotations
<&> hVal . Sugar._HoleBinder . Sugar.bBody . Sugar._BinderTerm . Sugar._BodySimpleApply . Sugar.appFunc .
Lens.filteredBy (hVal . Sugar._BodyLeaf . Sugar._LeafGetVar) .
annotation . Sugar.plAnnotation . _1 .~ alwaysShowAnnotations
>>= hAnnotations mkAnn
depsProp <- Lens.view ConvertM.scFrozenDeps
pick <- ConvertM.typeProtectedSetToVal ?? dstPl ^. Input.stored <&> Lens.mapped %~ void
res & rExpr .~
( i
, Sugar.Option
{ Sugar._optionPick =
do
pureModify depsProp (<> res ^. rDeps)
Transaction.merge changes
pick (written ^. hAnn . Input.stored . ExprIRef.iref)
, Sugar._optionExpr = s
, Sugar._optionTypeMatch = Lens.has Lens._Right unifyResult
, Sugar._optionMNewTag = res ^? rTexts . _QueryNewTag
}
) & pure
where
mkAnn x
| res ^. rWithTypeAnnotations = makeAnnotation Annotations.Evaluation x <&> Sugar._AnnotationVal .~ ()
| otherwise = pure Sugar.AnnotationNone
mkPayload (stored :*: inferRes) =
Input.Payload
{ Input._entityId = stored ^. ExprIRef.iref . _F & IRef.uuid & EntityId
, Input._stored = stored
, Input._inferScope = V.emptyScope
, Input._varRefsOfLambda = []
, Input._localsInScope = []
, Input._inferRes = inferRes
}
markToPrune (w :*: a) = w :*: Const (Lens.has ExprIRef._ExistingRef w) :*: a
-- Replace emplaced fragment expression with hole.
-- This avoids sugaring it (it may be large) but also conversion to
-- fragment value is based on it (in case of chained postfix funcs).
prune :: Ann (a :*: Const Bool :*: b) # V.Term -> Ann (a :*: b) # V.Term
prune (Ann (a :*: Const p :*: b) x) =
(if p
then V.BLeaf V.LHole
else
hmap
( \case
HWitness V.W_Term_Term -> prune
HWitness V.W_Term_HCompose_Prune_Type -> hflipped %~ hmap (const (\(la :*: _ :*: lb) -> la :*: lb))
) x
) & Ann (a :*: b)
makeLocals ::
Monad m =>
(Pure # T.Type -> Pure # V.Term -> T m a) ->
V.Scope # UVar -> ConvertM m [Result a]
makeLocals f scope =
do
ctx <- Lens.view ConvertM.scInferContext
fieldParams <-
Lens.view (ConvertM.scScopeInfo . ConvertM.siRecordParams)
<&> (^@.. Lens.itraversed <. Lens.folded)
>>= transaction . traverse (mkGetField ctx)
recRef <-
Lens.view (ConvertM.scScopeInfo . ConvertM.siRecursiveRef)
<&> (^? Lens._Just . ConvertM.rrDefI . Lens.to ExprIRef.globalId)
deps <- Lens.view (ConvertM.scFrozenDeps . pVal . depsGlobalTypes)
Infer.runPureInfer scope ctx
(scope ^@.. V.scopeVarTypes . Lens.itraversed
& filter (\(k, _) ->
-- Avoid repeating globals
Lens.nullOf (Lens.ix k) deps &&
Just k /= recRef)
& (traverse . _2) (instantiate . (^. _HFlip) >=> applyBindings)
) ^?! Lens._Right . _1
-- Avoid unit variables (like those hidden in pipe syntax)
& filter (Lens.hasn't (_2 . _Pure . T._TRecord . _Pure . T._REmpty))
& traverse mkVar
<&> (<> fieldParams)
where
mkVar (var, typ) =
simpleResult
<$> transaction (f typ (_Pure . V._BLeaf . V._LVar # var))
<*> localName typ var
mkGetField ctx (var, tags) =
simpleResult
<$> f typ (foldr V.BAppP (V.BLeafP (V.LVar var)) (reverse tags <&> V.BLeafP . V.LGetField) ^. hPlain)
<*> taggedVar (foldl EntityId.ofTaggedEntity (EntityId.ofBinder var) xs) x
where
(xs, x) = tags ^?! Lens._Snoc
typ =
foldl (\ty t -> ty ^?! _Pure . T._TRecord . T.flatRow . freExtends . Lens.ix t)
(Infer.runPureInfer scope ctx
(instantiate (scope ^?! V.scopeVarTypes . Lens.ix var . _HFlip) >>= applyBindings)
^?! Lens._Right . _1)
tags
mkEvalPrep :: ConvertPayload m -> EvalPrep
mkEvalPrep pl =
EvalPrep
{ _eType = pl ^. pUnsugared . hAnn . Input.inferredType
, _eEvalId = pl ^. pEntityId
}
convertPayload :: ConvertPayload m -> Sugar.Payload EvalPrep (T m)
convertPayload pl =
Sugar.Payload
{ Sugar._plAnnotation = mkEvalPrep pl
, Sugar._plActions = pl ^. pActions
, Sugar._plEntityId = pl ^. pEntityId
, Sugar._plParenInfo = Sugar.ParenInfo 0 False
, Sugar._plHiddenEntityIds = []
}
-- Duplicate name-gen behaviour for locals
localName :: MonadTransaction n m => Pure # T.Type -> V.Var -> m (Sugar.TagSuffixes -> Sugar.QueryLangInfo -> [Text])
localName typ var =
do
tag <- Anchors.assocTag var & getP & transaction
if tag == Anchors.anonTag
then mkVarInfo typ <&> autoName
else pure tag
>>= transaction . taggedVar var
|
lamdu/lamdu
|
src/Lamdu/Sugar/Convert/Option.hs
|
gpl-3.0
| 21,996 | 0 | 37 | 6,763 | 6,752 | 3,567 | 3,185 | -1 | -1 |
module WebParsing.PostParser
(addPostToDatabase) where
import Control.Monad.Trans (liftIO)
import Data.Either (fromRight)
import Data.Functor (void)
import Data.List (find)
import Data.List.Split (keepDelimsL, split, splitWhen, whenElt)
import Data.Text (strip)
import qualified Data.Text as T
import Database.DataType (PostType (..))
import Database.Persist (insertUnique)
import Database.Persist.Sqlite (SqlPersistM, insert_)
import Database.Tables
import Text.HTML.TagSoup
import Text.HTML.TagSoup.Match
import qualified Text.Parsec as P
import Text.Parsec.Text (Parser)
import WebParsing.ParsecCombinators (parseUntil, text)
import WebParsing.ReqParser (parseReqs)
addPostToDatabase :: [Tag T.Text] -> SqlPersistM ()
addPostToDatabase programElements = do
let fullPostName = maybe "" (strip . fromTagText) $ find isTagText programElements
postHtml = sections isRequirementSection programElements
requirementLines = if null postHtml then [] else reqHtmlToLines $ last postHtml
requirements = concatMap parseRequirement requirementLines
liftIO $ print fullPostName
case P.parse postInfoParser "POSt information" fullPostName of
Left _ -> return ()
Right post -> do
postExists <- insertUnique post
case postExists of
Just key ->
mapM_ (insert_ . PostCategory key) requirements
Nothing -> return ()
where
isRequirementSection tag = tagOpenAttrNameLit "div" "class" (T.isInfixOf "views-field-field-enrolment-requirements") tag || tagOpenAttrNameLit "div" "class" (T.isInfixOf "views-field-field-completion-requirements") tag
-- | Parse a Post value from its title.
-- Titles are usually of the form "Actuarial Science Major (Science Program)".
postInfoParser :: Parser Post
postInfoParser = do
deptName <- parseDepartmentName
postType <- parsePostType P.<|> return Other
return $ Post postType deptName "" ""
where
parseDepartmentName :: Parser T.Text
parseDepartmentName = parseUntil $ P.choice [
void (P.lookAhead parsePostType),
void (P.char '(')
]
parsePostType :: Parser PostType
parsePostType = do
postTypeName <- P.choice $ map (P.try . text) ["Specialist", "Major", "Minor"]
return $ read $ T.unpack postTypeName
-- | Split requirements HTML into individual lines.
reqHtmlToLines :: [Tag T.Text] -> [[T.Text]]
reqHtmlToLines tags =
let sects = split (keepDelimsL $ whenElt isSectionSplit) tags
sectionsNoNotes = filter (not . isNoteSection) sects
paragraphs = concatMap (splitWhen (isTagOpenName "p")) sectionsNoNotes
lines' = map (map (T.strip . convertLine) . splitLines) paragraphs
in
lines'
where
isSectionSplit :: Tag T.Text -> Bool
isSectionSplit tag =
isTagText tag &&
any (flip T.isInfixOf $ fromTagText tag) ["First", "Second", "Third", "Higher", "Notes", "NOTES"]
isNoteSection :: [Tag T.Text] -> Bool
isNoteSection (sectionTitleTag:_) =
isTagText sectionTitleTag && any (flip T.isInfixOf $ fromTagText sectionTitleTag) ["Notes", "NOTES"]
isNoteSection [] = False
splitLines :: [Tag T.Text] -> [[Tag T.Text]]
splitLines = splitWhen (\tag -> isTagOpenName "br" tag || isTagOpenName "li" tag)
convertLine :: [Tag T.Text] -> T.Text
convertLine [] = ""
convertLine (t:ts)
| isTagOpenName "li" t = T.append "0." (innerText ts)
| otherwise = innerText (t:ts)
parseRequirement :: [T.Text] -> [T.Text]
parseRequirement requirement = map parseSingleReq $ filter isReq requirement
where
isReq t = T.length t >= 7 &&
not (any (`T.isInfixOf` t) ["First", "Second", "Third", "Higher"])
parseSingleReq =
T.pack . show .
parseReqs . -- Using parser for new Req type
T.unpack .
fromRight "" .
P.parse getLineText "Reading a requirement line" .
T.strip
-- Strips the optional leading numbering (#.) from a line.
getLineText :: Parser T.Text
getLineText = do
P.optional (P.digit >> P.char '.' >> P.space)
parseUntil P.eof
|
Courseography/courseography
|
app/WebParsing/PostParser.hs
|
gpl-3.0
| 4,332 | 0 | 18 | 1,092 | 1,223 | 635 | 588 | 85 | 4 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Helpers
(
tickClockRunGet,
tickClockRunSet,
tickClockRunSetSpeed,
tickClockLevelPuzzleModeGet,
tickClockLevelPuzzleModeSet,
tickClockLevelPuzzleModeSetSpeed,
tickClockMemoryModeGet,
tickClockMemoryModeSet,
tickClockMemoryModeSetSpeed,
createDynamicData,
basenameFromString,
resourceGameData,
) where
import MyPrelude
import File
import Data.Char
import Game.MEnv
import Game.GameData
import Game.Grid.GridData
import Game.LevelPuzzleMode.LevelPuzzleWorld
import Game.LevelPuzzleMode.LevelPuzzleData
import OpenGL
import OpenGL.Helpers
--------------------------------------------------------------------------------
-- clocks for each mode
tickClockRunGet :: MEnv' TickT
tickClockRunGet = tickClockFGet
tickClockRunSet :: TickT -> MEnv' ()
tickClockRunSet = tickClockFSet
tickClockRunSetSpeed :: Double -> MEnv' ()
tickClockRunSetSpeed = tickClockFSetSpeed
tickClockLevelPuzzleModeGet :: MEnv' TickT
tickClockLevelPuzzleModeGet = tickClockAGet
tickClockLevelPuzzleModeSet :: TickT -> MEnv' ()
tickClockLevelPuzzleModeSet = tickClockASet
tickClockLevelPuzzleModeSetSpeed :: Double -> MEnv' ()
tickClockLevelPuzzleModeSetSpeed = tickClockASetSpeed
tickClockMemoryModeGet :: MEnv' TickT
tickClockMemoryModeGet = tickClockCGet
tickClockMemoryModeSet :: TickT -> MEnv' ()
tickClockMemoryModeSet = tickClockCSet
tickClockMemoryModeSetSpeed :: Double -> MEnv' ()
tickClockMemoryModeSetSpeed = tickClockCSetSpeed
--------------------------------------------------------------------------------
--
-- | create dynamic data to be used by application, if not present
createDynamicData :: MEnv' ()
createDynamicData = io $ do
-- directories
createDirectory "players/empty/Run/"
createDirectory "players/empty/LevelPuzzle/"
createDirectory "LevelPuzzle/"
-- files
copyStaticFile "players/empty/Run/world"
copyStaticFile "LevelPuzzle/world"
where
createDirectory p = do
fileDynamicData p >>= createDirectoryIfMissing True
copyStaticFile p = do
path <- fileDynamicData p
exists <- doesFileExist path
--unless exists $ do
do -- overwirte!!
putStrLn "overwriting dynamic file!"
path' <- fileStaticData p
copyFile path' path
--------------------------------------------------------------------------------
--
{-
filePlayerPathEmpty :: FilePath -> MEnv' FilePath
filePlayerPathEmptyplayer path =
(fileDynamicData "players/empty/LevelPuzzle/") ++ path
filePlayerPath :: Player -> FilePath -> MEnv' FilePath
filePlayerPath player path =
(fileDynamicData "players/") ++ filenameFromPlayer player ++ "/LevelPuzzle/" ++ path
-}
-- | path to LevelPuzzleWorld with name
fileLevelPuzzleWorld :: String -> MEnv' FilePath
fileLevelPuzzleWorld name = io $
fileDynamicData $ "LevelPuzzle/" ++ name
basenameFromString :: String -> FilePath
basenameFromString str =
map helper str
where
-- substitute non-alphanum characters with '_'
helper c =
case isAlphaNum c of
False -> '_'
True -> c
--------------------------------------------------------------------------------
-- resources
resourceGameData :: MEnv' GameData
resourceGameData =
resourceGet
--------------------------------------------------------------------------------
-- GUI
|
karamellpelle/grid
|
designer/source/Game/Helpers.hs
|
gpl-3.0
| 4,204 | 0 | 12 | 776 | 504 | 277 | 227 | 71 | 2 |
-- |
-- Module : Network.AWS.Internal.Body
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.Internal.Body where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Morph
import Control.Monad.Trans.Resource
import Data.Conduit
import qualified Data.Conduit.Binary as Conduit
import Data.Int
import Network.AWS.Prelude
import qualified Network.HTTP.Client as Client
import Network.HTTP.Conduit
import System.IO
import Prelude
-- | Construct a 'RqBody' from a source, manually specifying the
-- SHA256 hash and file size.
sourceBody :: Digest SHA256
-> Int64
-> Source (ResourceT IO) ByteString
-> RqBody
sourceBody h n = RqBody h . requestBodySource n
-- | Construct a 'RqBody' from a 'Handle', manually specifying the
-- SHA256 hash and file size.
sourceHandle :: Digest SHA256 -> Int64 -> Handle -> RqBody
sourceHandle h n = sourceBody h n . Conduit.sourceHandle
-- | Construct a 'RqBody' from a 'FilePath', manually specifying the
-- SHA256 hash and file size.
sourceFile :: Digest SHA256 -> Int64 -> FilePath -> RqBody
sourceFile h n = sourceBody h n . Conduit.sourceFile
-- | Construct a 'RqBody' from a 'FilePath', calculating the SHA256 hash
-- and file size.
--
-- /Note:/ While this function will perform in constant space, it will enumerate the
-- entirety of the file contents _twice_. Firstly to calculate the SHA256 and
-- lastly to stream the contents to the socket during sending.
sourceFileIO :: MonadIO m => FilePath -> m RqBody
sourceFileIO f = liftIO $
RqBody <$> runResourceT (Conduit.sourceFile f $$ sinkSHA256)
<*> Client.streamFile f
-- | Convenience function for obtaining the size of a file.
getFileSize :: MonadIO m => FilePath -> m Int64
getFileSize f = liftIO $ fromIntegral `liftM` withBinaryFile f ReadMode hFileSize
-- | Connect a 'Sink' to a reponse body.
sinkBody :: MonadResource m => RsBody -> Sink ByteString m a -> m a
sinkBody (RsBody src) sink = hoist liftResourceT src $$+- sink
sinkMD5 :: Monad m => Consumer ByteString m (Digest MD5)
sinkMD5 = sinkHash
sinkSHA256 :: Monad m => Consumer ByteString m (Digest SHA256)
sinkSHA256 = sinkHash
-- | A cryptonite compatible incremental hash sink.
sinkHash :: (Monad m, HashAlgorithm a) => Consumer ByteString m (Digest a)
sinkHash = sink hashInit
where
sink ctx = do
b <- await
case b of
Nothing -> return $! hashFinalize ctx
Just bs -> sink $! hashUpdate ctx bs
|
fmapfmapfmap/amazonka
|
amazonka/src/Network/AWS/Internal/Body.hs
|
mpl-2.0
| 2,827 | 0 | 13 | 649 | 556 | 298 | 258 | 42 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Books.Volumes.Mybooks.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Return a list of books in My Library.
--
-- /See:/ <https://developers.google.com/books/docs/v1/getting_started Books API Reference> for @books.volumes.mybooks.list@.
module Network.Google.Resource.Books.Volumes.Mybooks.List
(
-- * REST Resource
VolumesMybooksListResource
-- * Creating a Request
, volumesMybooksList
, VolumesMybooksList
-- * Request Lenses
, vmlProcessingState
, vmlAcquireMethod
, vmlCountry
, vmlLocale
, vmlSource
, vmlStartIndex
, vmlMaxResults
) where
import Network.Google.Books.Types
import Network.Google.Prelude
-- | A resource alias for @books.volumes.mybooks.list@ method which the
-- 'VolumesMybooksList' request conforms to.
type VolumesMybooksListResource =
"books" :>
"v1" :>
"volumes" :>
"mybooks" :>
QueryParams "processingState"
VolumesMybooksListProcessingState
:>
QueryParams "acquireMethod"
VolumesMybooksListAcquireMethod
:>
QueryParam "country" Text :>
QueryParam "locale" Text :>
QueryParam "source" Text :>
QueryParam "startIndex" (Textual Word32) :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] Volumes
-- | Return a list of books in My Library.
--
-- /See:/ 'volumesMybooksList' smart constructor.
data VolumesMybooksList = VolumesMybooksList'
{ _vmlProcessingState :: !(Maybe [VolumesMybooksListProcessingState])
, _vmlAcquireMethod :: !(Maybe [VolumesMybooksListAcquireMethod])
, _vmlCountry :: !(Maybe Text)
, _vmlLocale :: !(Maybe Text)
, _vmlSource :: !(Maybe Text)
, _vmlStartIndex :: !(Maybe (Textual Word32))
, _vmlMaxResults :: !(Maybe (Textual Word32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'VolumesMybooksList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vmlProcessingState'
--
-- * 'vmlAcquireMethod'
--
-- * 'vmlCountry'
--
-- * 'vmlLocale'
--
-- * 'vmlSource'
--
-- * 'vmlStartIndex'
--
-- * 'vmlMaxResults'
volumesMybooksList
:: VolumesMybooksList
volumesMybooksList =
VolumesMybooksList'
{ _vmlProcessingState = Nothing
, _vmlAcquireMethod = Nothing
, _vmlCountry = Nothing
, _vmlLocale = Nothing
, _vmlSource = Nothing
, _vmlStartIndex = Nothing
, _vmlMaxResults = Nothing
}
-- | The processing state of the user uploaded volumes to be returned.
-- Applicable only if the UPLOADED is specified in the acquireMethod.
vmlProcessingState :: Lens' VolumesMybooksList [VolumesMybooksListProcessingState]
vmlProcessingState
= lens _vmlProcessingState
(\ s a -> s{_vmlProcessingState = a})
. _Default
. _Coerce
-- | How the book was acquired
vmlAcquireMethod :: Lens' VolumesMybooksList [VolumesMybooksListAcquireMethod]
vmlAcquireMethod
= lens _vmlAcquireMethod
(\ s a -> s{_vmlAcquireMethod = a})
. _Default
. _Coerce
-- | ISO-3166-1 code to override the IP-based location.
vmlCountry :: Lens' VolumesMybooksList (Maybe Text)
vmlCountry
= lens _vmlCountry (\ s a -> s{_vmlCountry = a})
-- | ISO-639-1 language and ISO-3166-1 country code. Ex:\'en_US\'. Used for
-- generating recommendations.
vmlLocale :: Lens' VolumesMybooksList (Maybe Text)
vmlLocale
= lens _vmlLocale (\ s a -> s{_vmlLocale = a})
-- | String to identify the originator of this request.
vmlSource :: Lens' VolumesMybooksList (Maybe Text)
vmlSource
= lens _vmlSource (\ s a -> s{_vmlSource = a})
-- | Index of the first result to return (starts at 0)
vmlStartIndex :: Lens' VolumesMybooksList (Maybe Word32)
vmlStartIndex
= lens _vmlStartIndex
(\ s a -> s{_vmlStartIndex = a})
. mapping _Coerce
-- | Maximum number of results to return.
vmlMaxResults :: Lens' VolumesMybooksList (Maybe Word32)
vmlMaxResults
= lens _vmlMaxResults
(\ s a -> s{_vmlMaxResults = a})
. mapping _Coerce
instance GoogleRequest VolumesMybooksList where
type Rs VolumesMybooksList = Volumes
type Scopes VolumesMybooksList =
'["https://www.googleapis.com/auth/books"]
requestClient VolumesMybooksList'{..}
= go (_vmlProcessingState ^. _Default)
(_vmlAcquireMethod ^. _Default)
_vmlCountry
_vmlLocale
_vmlSource
_vmlStartIndex
_vmlMaxResults
(Just AltJSON)
booksService
where go
= buildClient
(Proxy :: Proxy VolumesMybooksListResource)
mempty
|
rueshyna/gogol
|
gogol-books/gen/Network/Google/Resource/Books/Volumes/Mybooks/List.hs
|
mpl-2.0
| 5,644 | 0 | 19 | 1,439 | 861 | 497 | 364 | 126 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
module PolyvariadicFunctions where
class PolyAdd a where
-- | `polyAdd` sums its arguments, all `Int`s.
polyAdd' :: Integer -> a
--
instance (PolyAdd f, Integral n) => PolyAdd (n -> f) where
polyAdd' a = polyAdd' . (a +) . toInteger
--
instance PolyAdd Integer where
polyAdd' = id
--
instance PolyAdd Int where
polyAdd' = fromInteger
--
polyAdd :: PolyAdd a => a
polyAdd = polyAdd' 0
class PolyList a r | r -> a where
polyList' :: [a] -> r
--
a ->> b = b ++ [a]
instance (a ~ b, PolyList a r) => PolyList a (b -> r) where
polyList' l = polyList' . (l ++) . return
--
instance PolyList a [a] where
polyList' = id
--
polyList :: PolyList a b => b
polyList = polyList' []
class PolyWord a where
-- | `polyWords` turns its arguments into a spaced string.
polyWords' :: String -> a
--
instance PolyWord String where
polyWords' [] = []
polyWords' ls = tail ls
--
instance PolyWord f => PolyWord (String -> f) where
polyWords' a = polyWords' . ((a ++ " ") ++)
--
polyWords :: PolyWord a => a
polyWords = polyWords' []
|
ice1000/OI-codes
|
codewars/301-400/three-simple-polyvariadic-haskell-functions.hs
|
agpl-3.0
| 1,291 | 0 | 9 | 267 | 398 | 218 | 180 | 36 | 1 |
module Plugin where
import API
resource = Interface { field = 7 :: Num a => a }
|
Changaco/haskell-plugins
|
testsuite/pdynload/spj4/Plugin.hs
|
lgpl-2.1
| 82 | 0 | 8 | 20 | 30 | 18 | 12 | 3 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QAbstractScrollArea_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:30
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QAbstractScrollArea_h where
import Qtc.Enums.Base
import Qtc.Enums.Gui.QPaintDevice
import Qtc.Enums.Core.Qt
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QAbstractScrollArea ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QAbstractScrollArea_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QAbstractScrollArea_unSetUserMethod" qtc_QAbstractScrollArea_unSetUserMethod :: Ptr (TQAbstractScrollArea a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QAbstractScrollAreaSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QAbstractScrollArea_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QAbstractScrollArea ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QAbstractScrollArea_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QAbstractScrollAreaSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QAbstractScrollArea_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QAbstractScrollArea ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QAbstractScrollArea_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QAbstractScrollAreaSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QAbstractScrollArea_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QAbstractScrollArea setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QAbstractScrollArea_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QAbstractScrollArea_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setUserMethod" qtc_QAbstractScrollArea_setUserMethod :: Ptr (TQAbstractScrollArea a) -> CInt -> Ptr (Ptr (TQAbstractScrollArea x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QAbstractScrollArea :: (Ptr (TQAbstractScrollArea x0) -> IO ()) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QAbstractScrollArea_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QAbstractScrollArea setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QAbstractScrollArea_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QAbstractScrollArea_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QAbstractScrollArea setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QAbstractScrollArea_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QAbstractScrollArea_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setUserMethodVariant" qtc_QAbstractScrollArea_setUserMethodVariant :: Ptr (TQAbstractScrollArea a) -> CInt -> Ptr (Ptr (TQAbstractScrollArea x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QAbstractScrollArea :: (Ptr (TQAbstractScrollArea x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QAbstractScrollArea_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QAbstractScrollArea setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QAbstractScrollArea_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QAbstractScrollArea_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QAbstractScrollArea ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QAbstractScrollArea_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QAbstractScrollArea_unSetHandler" qtc_QAbstractScrollArea_unSetHandler :: Ptr (TQAbstractScrollArea a) -> CWString -> IO (CBool)
instance QunSetHandler (QAbstractScrollAreaSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QAbstractScrollArea_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> QEvent t1 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
x1obj <- objectFromPtr_nf x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler1" qtc_QAbstractScrollArea_setHandler1 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea1 :: (Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO ()) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> QEvent t1 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
x1obj <- objectFromPtr_nf x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QcontextMenuEvent_h (QAbstractScrollArea ()) ((QContextMenuEvent t1)) where
contextMenuEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_contextMenuEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_contextMenuEvent" qtc_QAbstractScrollArea_contextMenuEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQContextMenuEvent t1) -> IO ()
instance QcontextMenuEvent_h (QAbstractScrollAreaSc a) ((QContextMenuEvent t1)) where
contextMenuEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_contextMenuEvent cobj_x0 cobj_x1
instance QdragEnterEvent_h (QAbstractScrollArea ()) ((QDragEnterEvent t1)) where
dragEnterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_dragEnterEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_dragEnterEvent" qtc_QAbstractScrollArea_dragEnterEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQDragEnterEvent t1) -> IO ()
instance QdragEnterEvent_h (QAbstractScrollAreaSc a) ((QDragEnterEvent t1)) where
dragEnterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_dragEnterEvent cobj_x0 cobj_x1
instance QdragLeaveEvent_h (QAbstractScrollArea ()) ((QDragLeaveEvent t1)) where
dragLeaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_dragLeaveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_dragLeaveEvent" qtc_QAbstractScrollArea_dragLeaveEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQDragLeaveEvent t1) -> IO ()
instance QdragLeaveEvent_h (QAbstractScrollAreaSc a) ((QDragLeaveEvent t1)) where
dragLeaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_dragLeaveEvent cobj_x0 cobj_x1
instance QdragMoveEvent_h (QAbstractScrollArea ()) ((QDragMoveEvent t1)) where
dragMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_dragMoveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_dragMoveEvent" qtc_QAbstractScrollArea_dragMoveEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQDragMoveEvent t1) -> IO ()
instance QdragMoveEvent_h (QAbstractScrollAreaSc a) ((QDragMoveEvent t1)) where
dragMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_dragMoveEvent cobj_x0 cobj_x1
instance QdropEvent_h (QAbstractScrollArea ()) ((QDropEvent t1)) where
dropEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_dropEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_dropEvent" qtc_QAbstractScrollArea_dropEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQDropEvent t1) -> IO ()
instance QdropEvent_h (QAbstractScrollAreaSc a) ((QDropEvent t1)) where
dropEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_dropEvent cobj_x0 cobj_x1
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler2" qtc_QAbstractScrollArea_setHandler2 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea2 :: (Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO (CBool)) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> QEvent t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> Ptr (TQEvent t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance Qevent_h (QAbstractScrollArea ()) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_event cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_event" qtc_QAbstractScrollArea_event :: Ptr (TQAbstractScrollArea a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent_h (QAbstractScrollAreaSc a) ((QEvent t1)) where
event_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_event cobj_x0 cobj_x1
instance QkeyPressEvent_h (QAbstractScrollArea ()) ((QKeyEvent t1)) where
keyPressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_keyPressEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_keyPressEvent" qtc_QAbstractScrollArea_keyPressEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyPressEvent_h (QAbstractScrollAreaSc a) ((QKeyEvent t1)) where
keyPressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_keyPressEvent cobj_x0 cobj_x1
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> IO (QSize t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQSize t0))
setHandlerWrapper x0
= do x0obj <- qAbstractScrollAreaFromPtr x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler3" qtc_QAbstractScrollArea_setHandler3 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQSize t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea3 :: (Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQSize t0))) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQSize t0))))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea3_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> IO (QSize t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQSize t0))
setHandlerWrapper x0
= do x0obj <- qAbstractScrollAreaFromPtr x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QqminimumSizeHint_h (QAbstractScrollArea ()) (()) where
qminimumSizeHint_h x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_minimumSizeHint cobj_x0
foreign import ccall "qtc_QAbstractScrollArea_minimumSizeHint" qtc_QAbstractScrollArea_minimumSizeHint :: Ptr (TQAbstractScrollArea a) -> IO (Ptr (TQSize ()))
instance QqminimumSizeHint_h (QAbstractScrollAreaSc a) (()) where
qminimumSizeHint_h x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_minimumSizeHint cobj_x0
instance QminimumSizeHint_h (QAbstractScrollArea ()) (()) where
minimumSizeHint_h x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_minimumSizeHint_qth cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QAbstractScrollArea_minimumSizeHint_qth" qtc_QAbstractScrollArea_minimumSizeHint_qth :: Ptr (TQAbstractScrollArea a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QminimumSizeHint_h (QAbstractScrollAreaSc a) (()) where
minimumSizeHint_h x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_minimumSizeHint_qth cobj_x0 csize_ret_w csize_ret_h
instance QmouseDoubleClickEvent_h (QAbstractScrollArea ()) ((QMouseEvent t1)) where
mouseDoubleClickEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_mouseDoubleClickEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_mouseDoubleClickEvent" qtc_QAbstractScrollArea_mouseDoubleClickEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseDoubleClickEvent_h (QAbstractScrollAreaSc a) ((QMouseEvent t1)) where
mouseDoubleClickEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_mouseDoubleClickEvent cobj_x0 cobj_x1
instance QmouseMoveEvent_h (QAbstractScrollArea ()) ((QMouseEvent t1)) where
mouseMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_mouseMoveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_mouseMoveEvent" qtc_QAbstractScrollArea_mouseMoveEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseMoveEvent_h (QAbstractScrollAreaSc a) ((QMouseEvent t1)) where
mouseMoveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_mouseMoveEvent cobj_x0 cobj_x1
instance QmousePressEvent_h (QAbstractScrollArea ()) ((QMouseEvent t1)) where
mousePressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_mousePressEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_mousePressEvent" qtc_QAbstractScrollArea_mousePressEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmousePressEvent_h (QAbstractScrollAreaSc a) ((QMouseEvent t1)) where
mousePressEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_mousePressEvent cobj_x0 cobj_x1
instance QmouseReleaseEvent_h (QAbstractScrollArea ()) ((QMouseEvent t1)) where
mouseReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_mouseReleaseEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_mouseReleaseEvent" qtc_QAbstractScrollArea_mouseReleaseEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQMouseEvent t1) -> IO ()
instance QmouseReleaseEvent_h (QAbstractScrollAreaSc a) ((QMouseEvent t1)) where
mouseReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_mouseReleaseEvent cobj_x0 cobj_x1
instance QpaintEvent_h (QAbstractScrollArea ()) ((QPaintEvent t1)) where
paintEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_paintEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_paintEvent" qtc_QAbstractScrollArea_paintEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQPaintEvent t1) -> IO ()
instance QpaintEvent_h (QAbstractScrollAreaSc a) ((QPaintEvent t1)) where
paintEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_paintEvent cobj_x0 cobj_x1
instance QresizeEvent_h (QAbstractScrollArea ()) ((QResizeEvent t1)) where
resizeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_resizeEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_resizeEvent" qtc_QAbstractScrollArea_resizeEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQResizeEvent t1) -> IO ()
instance QresizeEvent_h (QAbstractScrollAreaSc a) ((QResizeEvent t1)) where
resizeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_resizeEvent cobj_x0 cobj_x1
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> Int -> Int -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> CInt -> CInt -> IO ()
setHandlerWrapper x0 x1 x2
= do x0obj <- qAbstractScrollAreaFromPtr x0
let x1int = fromCInt x1
let x2int = fromCInt x2
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1int x2int
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler4" qtc_QAbstractScrollArea_setHandler4 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> CInt -> CInt -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea4 :: (Ptr (TQAbstractScrollArea x0) -> CInt -> CInt -> IO ()) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> CInt -> CInt -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea4_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> Int -> Int -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea4 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea4_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler4 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> CInt -> CInt -> IO ()
setHandlerWrapper x0 x1 x2
= do x0obj <- qAbstractScrollAreaFromPtr x0
let x1int = fromCInt x1
let x2int = fromCInt x2
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1int x2int
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QscrollContentsBy_h (QAbstractScrollArea ()) ((Int, Int)) where
scrollContentsBy_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_scrollContentsBy cobj_x0 (toCInt x1) (toCInt x2)
foreign import ccall "qtc_QAbstractScrollArea_scrollContentsBy" qtc_QAbstractScrollArea_scrollContentsBy :: Ptr (TQAbstractScrollArea a) -> CInt -> CInt -> IO ()
instance QscrollContentsBy_h (QAbstractScrollAreaSc a) ((Int, Int)) where
scrollContentsBy_h x0 (x1, x2)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_scrollContentsBy cobj_x0 (toCInt x1) (toCInt x2)
instance QqsizeHint_h (QAbstractScrollArea ()) (()) where
qsizeHint_h x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_sizeHint cobj_x0
foreign import ccall "qtc_QAbstractScrollArea_sizeHint" qtc_QAbstractScrollArea_sizeHint :: Ptr (TQAbstractScrollArea a) -> IO (Ptr (TQSize ()))
instance QqsizeHint_h (QAbstractScrollAreaSc a) (()) where
qsizeHint_h x0 ()
= withQSizeResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_sizeHint cobj_x0
instance QsizeHint_h (QAbstractScrollArea ()) (()) where
sizeHint_h x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_sizeHint_qth cobj_x0 csize_ret_w csize_ret_h
foreign import ccall "qtc_QAbstractScrollArea_sizeHint_qth" qtc_QAbstractScrollArea_sizeHint_qth :: Ptr (TQAbstractScrollArea a) -> Ptr CInt -> Ptr CInt -> IO ()
instance QsizeHint_h (QAbstractScrollAreaSc a) (()) where
sizeHint_h x0 ()
= withSizeResult $ \csize_ret_w csize_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_sizeHint_qth cobj_x0 csize_ret_w csize_ret_h
instance QviewportEvent_h (QAbstractScrollArea ()) ((QEvent t1)) where
viewportEvent_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_viewportEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_viewportEvent" qtc_QAbstractScrollArea_viewportEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQEvent t1) -> IO CBool
instance QviewportEvent_h (QAbstractScrollAreaSc a) ((QEvent t1)) where
viewportEvent_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_viewportEvent cobj_x0 cobj_x1
instance QwheelEvent_h (QAbstractScrollArea ()) ((QWheelEvent t1)) where
wheelEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_wheelEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_wheelEvent" qtc_QAbstractScrollArea_wheelEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQWheelEvent t1) -> IO ()
instance QwheelEvent_h (QAbstractScrollAreaSc a) ((QWheelEvent t1)) where
wheelEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_wheelEvent cobj_x0 cobj_x1
instance QchangeEvent_h (QAbstractScrollArea ()) ((QEvent t1)) where
changeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_changeEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_changeEvent" qtc_QAbstractScrollArea_changeEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQEvent t1) -> IO ()
instance QchangeEvent_h (QAbstractScrollAreaSc a) ((QEvent t1)) where
changeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_changeEvent cobj_x0 cobj_x1
instance QactionEvent_h (QAbstractScrollArea ()) ((QActionEvent t1)) where
actionEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_actionEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_actionEvent" qtc_QAbstractScrollArea_actionEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQActionEvent t1) -> IO ()
instance QactionEvent_h (QAbstractScrollAreaSc a) ((QActionEvent t1)) where
actionEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_actionEvent cobj_x0 cobj_x1
instance QcloseEvent_h (QAbstractScrollArea ()) ((QCloseEvent t1)) where
closeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_closeEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_closeEvent" qtc_QAbstractScrollArea_closeEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQCloseEvent t1) -> IO ()
instance QcloseEvent_h (QAbstractScrollAreaSc a) ((QCloseEvent t1)) where
closeEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_closeEvent cobj_x0 cobj_x1
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> IO (CInt)
setHandlerWrapper x0
= do x0obj <- qAbstractScrollAreaFromPtr x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler5" qtc_QAbstractScrollArea_setHandler5 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> IO (CInt)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea5 :: (Ptr (TQAbstractScrollArea x0) -> IO (CInt)) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> IO (CInt)))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea5_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea5 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea5_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler5 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> IO (CInt)
setHandlerWrapper x0
= do x0obj <- qAbstractScrollAreaFromPtr x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QdevType_h (QAbstractScrollArea ()) (()) where
devType_h x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_devType cobj_x0
foreign import ccall "qtc_QAbstractScrollArea_devType" qtc_QAbstractScrollArea_devType :: Ptr (TQAbstractScrollArea a) -> IO CInt
instance QdevType_h (QAbstractScrollAreaSc a) (()) where
devType_h x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_devType cobj_x0
instance QenterEvent_h (QAbstractScrollArea ()) ((QEvent t1)) where
enterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_enterEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_enterEvent" qtc_QAbstractScrollArea_enterEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQEvent t1) -> IO ()
instance QenterEvent_h (QAbstractScrollAreaSc a) ((QEvent t1)) where
enterEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_enterEvent cobj_x0 cobj_x1
instance QfocusInEvent_h (QAbstractScrollArea ()) ((QFocusEvent t1)) where
focusInEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_focusInEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_focusInEvent" qtc_QAbstractScrollArea_focusInEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusInEvent_h (QAbstractScrollAreaSc a) ((QFocusEvent t1)) where
focusInEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_focusInEvent cobj_x0 cobj_x1
instance QfocusOutEvent_h (QAbstractScrollArea ()) ((QFocusEvent t1)) where
focusOutEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_focusOutEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_focusOutEvent" qtc_QAbstractScrollArea_focusOutEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQFocusEvent t1) -> IO ()
instance QfocusOutEvent_h (QAbstractScrollAreaSc a) ((QFocusEvent t1)) where
focusOutEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_focusOutEvent cobj_x0 cobj_x1
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> Int -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea6 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea6_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler6 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> CInt -> IO (CInt)
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
let x1int = fromCInt x1
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj x1int
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler6" qtc_QAbstractScrollArea_setHandler6 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> CInt -> IO (CInt)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea6 :: (Ptr (TQAbstractScrollArea x0) -> CInt -> IO (CInt)) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> CInt -> IO (CInt)))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea6_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> Int -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea6 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea6_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler6 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> CInt -> IO (CInt)
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
let x1int = fromCInt x1
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj x1int
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QheightForWidth_h (QAbstractScrollArea ()) ((Int)) where
heightForWidth_h x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_heightForWidth cobj_x0 (toCInt x1)
foreign import ccall "qtc_QAbstractScrollArea_heightForWidth" qtc_QAbstractScrollArea_heightForWidth :: Ptr (TQAbstractScrollArea a) -> CInt -> IO CInt
instance QheightForWidth_h (QAbstractScrollAreaSc a) ((Int)) where
heightForWidth_h x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_heightForWidth cobj_x0 (toCInt x1)
instance QhideEvent_h (QAbstractScrollArea ()) ((QHideEvent t1)) where
hideEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_hideEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_hideEvent" qtc_QAbstractScrollArea_hideEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQHideEvent t1) -> IO ()
instance QhideEvent_h (QAbstractScrollAreaSc a) ((QHideEvent t1)) where
hideEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_hideEvent cobj_x0 cobj_x1
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> InputMethodQuery -> IO (QVariant t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea7 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea7_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler7 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> CLong -> IO (Ptr (TQVariant t0))
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
let x1enum = qEnum_fromInt $ fromCLong x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1enum
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler7" qtc_QAbstractScrollArea_setHandler7 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> CLong -> IO (Ptr (TQVariant t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea7 :: (Ptr (TQAbstractScrollArea x0) -> CLong -> IO (Ptr (TQVariant t0))) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> CLong -> IO (Ptr (TQVariant t0))))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea7_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> InputMethodQuery -> IO (QVariant t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea7 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea7_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler7 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> CLong -> IO (Ptr (TQVariant t0))
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
let x1enum = qEnum_fromInt $ fromCLong x1
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1enum
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QinputMethodQuery_h (QAbstractScrollArea ()) ((InputMethodQuery)) where
inputMethodQuery_h x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_inputMethodQuery cobj_x0 (toCLong $ qEnum_toInt x1)
foreign import ccall "qtc_QAbstractScrollArea_inputMethodQuery" qtc_QAbstractScrollArea_inputMethodQuery :: Ptr (TQAbstractScrollArea a) -> CLong -> IO (Ptr (TQVariant ()))
instance QinputMethodQuery_h (QAbstractScrollAreaSc a) ((InputMethodQuery)) where
inputMethodQuery_h x0 (x1)
= withQVariantResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_inputMethodQuery cobj_x0 (toCLong $ qEnum_toInt x1)
instance QkeyReleaseEvent_h (QAbstractScrollArea ()) ((QKeyEvent t1)) where
keyReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_keyReleaseEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_keyReleaseEvent" qtc_QAbstractScrollArea_keyReleaseEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQKeyEvent t1) -> IO ()
instance QkeyReleaseEvent_h (QAbstractScrollAreaSc a) ((QKeyEvent t1)) where
keyReleaseEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_keyReleaseEvent cobj_x0 cobj_x1
instance QleaveEvent_h (QAbstractScrollArea ()) ((QEvent t1)) where
leaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_leaveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_leaveEvent" qtc_QAbstractScrollArea_leaveEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQEvent t1) -> IO ()
instance QleaveEvent_h (QAbstractScrollAreaSc a) ((QEvent t1)) where
leaveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_leaveEvent cobj_x0 cobj_x1
instance QmoveEvent_h (QAbstractScrollArea ()) ((QMoveEvent t1)) where
moveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_moveEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_moveEvent" qtc_QAbstractScrollArea_moveEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQMoveEvent t1) -> IO ()
instance QmoveEvent_h (QAbstractScrollAreaSc a) ((QMoveEvent t1)) where
moveEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_moveEvent cobj_x0 cobj_x1
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> IO (QPaintEngine t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea8 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea8_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler8 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQPaintEngine t0))
setHandlerWrapper x0
= do x0obj <- qAbstractScrollAreaFromPtr x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler8" qtc_QAbstractScrollArea_setHandler8 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQPaintEngine t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea8 :: (Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQPaintEngine t0))) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQPaintEngine t0))))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea8_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> IO (QPaintEngine t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea8 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea8_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler8 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> IO (Ptr (TQPaintEngine t0))
setHandlerWrapper x0
= do x0obj <- qAbstractScrollAreaFromPtr x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QpaintEngine_h (QAbstractScrollArea ()) (()) where
paintEngine_h x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_paintEngine cobj_x0
foreign import ccall "qtc_QAbstractScrollArea_paintEngine" qtc_QAbstractScrollArea_paintEngine :: Ptr (TQAbstractScrollArea a) -> IO (Ptr (TQPaintEngine ()))
instance QpaintEngine_h (QAbstractScrollAreaSc a) (()) where
paintEngine_h x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_paintEngine cobj_x0
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> Bool -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea9 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea9_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler9 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> CBool -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
let x1bool = fromCBool x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1bool
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler9" qtc_QAbstractScrollArea_setHandler9 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> CBool -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea9 :: (Ptr (TQAbstractScrollArea x0) -> CBool -> IO ()) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> CBool -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea9_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> Bool -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea9 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea9_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler9 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> CBool -> IO ()
setHandlerWrapper x0 x1
= do x0obj <- qAbstractScrollAreaFromPtr x0
let x1bool = fromCBool x1
if (objectIsNull x0obj)
then return ()
else _handler x0obj x1bool
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetVisible_h (QAbstractScrollArea ()) ((Bool)) where
setVisible_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_setVisible cobj_x0 (toCBool x1)
foreign import ccall "qtc_QAbstractScrollArea_setVisible" qtc_QAbstractScrollArea_setVisible :: Ptr (TQAbstractScrollArea a) -> CBool -> IO ()
instance QsetVisible_h (QAbstractScrollAreaSc a) ((Bool)) where
setVisible_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QAbstractScrollArea_setVisible cobj_x0 (toCBool x1)
instance QshowEvent_h (QAbstractScrollArea ()) ((QShowEvent t1)) where
showEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_showEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_showEvent" qtc_QAbstractScrollArea_showEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQShowEvent t1) -> IO ()
instance QshowEvent_h (QAbstractScrollAreaSc a) ((QShowEvent t1)) where
showEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_showEvent cobj_x0 cobj_x1
instance QtabletEvent_h (QAbstractScrollArea ()) ((QTabletEvent t1)) where
tabletEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_tabletEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QAbstractScrollArea_tabletEvent" qtc_QAbstractScrollArea_tabletEvent :: Ptr (TQAbstractScrollArea a) -> Ptr (TQTabletEvent t1) -> IO ()
instance QtabletEvent_h (QAbstractScrollAreaSc a) ((QTabletEvent t1)) where
tabletEvent_h x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QAbstractScrollArea_tabletEvent cobj_x0 cobj_x1
instance QsetHandler (QAbstractScrollArea ()) (QAbstractScrollArea x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea10 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea10_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler10 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qAbstractScrollAreaFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QAbstractScrollArea_setHandler10" qtc_QAbstractScrollArea_setHandler10 :: Ptr (TQAbstractScrollArea a) -> CWString -> Ptr (Ptr (TQAbstractScrollArea x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea10 :: (Ptr (TQAbstractScrollArea x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)) -> IO (FunPtr (Ptr (TQAbstractScrollArea x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QAbstractScrollArea10_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QAbstractScrollAreaSc a) (QAbstractScrollArea x0 -> QObject t1 -> QEvent t2 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QAbstractScrollArea10 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QAbstractScrollArea10_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QAbstractScrollArea_setHandler10 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQAbstractScrollArea x0) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO (CBool)
setHandlerWrapper x0 x1 x2
= do x0obj <- qAbstractScrollAreaFromPtr x0
x1obj <- qObjectFromPtr x1
x2obj <- objectFromPtr_nf x2
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj x2obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QeventFilter_h (QAbstractScrollArea ()) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QAbstractScrollArea_eventFilter cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QAbstractScrollArea_eventFilter" qtc_QAbstractScrollArea_eventFilter :: Ptr (TQAbstractScrollArea a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter_h (QAbstractScrollAreaSc a) ((QObject t1, QEvent t2)) where
eventFilter_h x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QAbstractScrollArea_eventFilter cobj_x0 cobj_x1 cobj_x2
|
uduki/hsQt
|
Qtc/Gui/QAbstractScrollArea_h.hs
|
bsd-2-clause
| 66,129 | 0 | 18 | 13,214 | 20,257 | 9,768 | 10,489 | -1 | -1 |
module Main where
import Client
import Network.HTTP.Client (newManager, defaultManagerSettings)
import Servant.Client
main :: IO ()
main = do
m <- newManager defaultManagerSettings
res <- runClientM (getTeam 86) (ClientEnv m baseUrl)
case res of
Left err -> putStrLn $ "Error: " ++ show err
Right object -> do
print object
|
julienXX/football-data-client
|
example/Main.hs
|
bsd-3-clause
| 345 | 0 | 12 | 71 | 119 | 59 | 60 | 12 | 2 |
{-|
Module : IRTS.JavaScript.Specialize
Description : The JavaScript specializer.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE OverloadedStrings, PatternGuards #-}
module IRTS.JavaScript.Specialize
( SCtor
, STest
, SProj
, specialCased
, specialCall
, qualifyN
) where
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import Idris.Core.TT
import IRTS.JavaScript.AST
split :: Char -> String -> [String]
split c "" = [""]
split c (x:xs)
| c == x = "" : split c xs
| otherwise =
let ~(h:t) = split c xs
in ((x : h) : t)
qualify :: String -> Name -> Name
qualify "" n = n
qualify ns n = sNS n (reverse $ split '.' ns)
qualifyN :: String -> String -> Name
qualifyN ns n = qualify ns $ sUN n
-- special-cased constructors
type SCtor = [JsExpr] -> JsExpr
type STest = JsExpr -> JsExpr
type SProj = JsExpr -> Int -> JsExpr
constructorOptimizeDB :: Map.Map Name (SCtor, STest, SProj)
constructorOptimizeDB =
Map.fromList
[ item "Prelude.Bool" "True" (const $ JsBool True) trueTest cantProj
, item "Prelude.Bool" "False" (const $ JsBool False) falseTest cantProj
, item "Prelude.Interfaces" "LT" (const $ JsInt (0-1)) ltTest cantProj
, item "Prelude.Interfaces" "EQ" (const $ JsInt 0) eqTest cantProj
, item "Prelude.Interfaces" "GT" (const $ JsInt 1) gtTest cantProj
-- , item "Prelude.List" "::" cons fillList uncons
-- , item "Prelude.List" "Nil" nil emptyList cantProj
-- , item "Prelude.Maybe" "Just" (\[x] -> x) notNoneTest justProj
-- , item "Prelude.Maybe" "Nothing" (const $ JsUndefined) noneTest cantProj
]
-- constructors
where
trueTest = id
falseTest e = JsUniOp (T.pack "!") e
ltTest e = JsBinOp "<" e (JsInt 0)
eqTest e = JsBinOp "===" e (JsInt 0)
gtTest e = JsBinOp ">" e (JsInt 0)
-- projections
cantProj x j = error $ "This type should be projected"
item :: String
-> String
-> SCtor
-> STest
-> SProj
-> (Name, (SCtor, STest, SProj))
item ns n ctor test match = (qualifyN ns n, (ctor, test, match))
specialCased :: Name -> Maybe (SCtor, STest, SProj)
specialCased n = Map.lookup n constructorOptimizeDB
-- special functions
type SSig = (Int, [JsExpr] -> JsExpr)
callSpecializeDB :: Map.Map Name (SSig)
callSpecializeDB =
Map.fromList
[ qb "Eq" "Int" "==" "==="
, qb "Ord" "Int" "<" "<"
, qb "Ord" "Int" ">" ">"
, qb "Ord" "Int" "<=" "<="
, qb "Ord" "Int" ">=" ">="
, qb "Eq" "Double" "==" "==="
, qb "Ord" "Double" "<" "<"
, qb "Ord" "Double" ">" ">"
, qb "Ord" "Double" "<=" "<="
, qb "Ord" "Double" ">=" ">="
]
where
qb intf ty op jsop =
( qualify "Prelude.Interfaces" $
SN $
WhereN
0
(qualify "Prelude.Interfaces" $
SN $ ImplementationN (qualifyN "Prelude.Interfaces" intf) [ty])
(SN $ MethodN $ UN op)
, (2, \[x, y] -> JsBinOp jsop x y))
specialCall :: Name -> Maybe SSig
specialCall n = Map.lookup n callSpecializeDB
|
uuhan/Idris-dev
|
src/IRTS/JavaScript/Specialize.hs
|
bsd-3-clause
| 3,088 | 0 | 14 | 780 | 973 | 516 | 457 | 75 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards #-}
module Allegro.Font
( loadFont
, FontFlags (..)
, defaultFontFlags
-- * Fonts
, Font
, fontLineHeight
, fontAscent
, fontDescent
-- * Dimensions of text using this font
, textWidth
, textDimensions
-- * Drawing with the font
, drawText
, Color(..)
, Point
, drawJustifiedText
, Alignment(..)
-- * Exceptions
, FailedToLoadFont(..)
) where
import Allegro.Types
import Allegro.C.Types
import Allegro.C.Font
import qualified Control.Exception as X
import Control.Monad ( when )
import Data.Bits ( (.|.) )
import Data.Typeable ( Typeable )
import Foreign ( Ptr, nullPtr, alloca, peek
, ForeignPtr, newForeignPtr, withForeignPtr
)
import Foreign.C.Types(CInt)
import Foreign.C.String(withCString)
import System.IO.Unsafe(unsafeDupablePerformIO)
newtype Font = Font (ForeignPtr FONT)
withFontPtr :: Font -> (Ptr FONT -> IO a) -> IO a
withFontPtr (Font q) = withForeignPtr q
data FontFlags = FontFlags { noKerning, monochrome, noAutoHint :: Bool }
defaultFontFlags :: FontFlags
defaultFontFlags = FontFlags { noKerning = False
, monochrome = False
, noAutoHint = False }
loadFont :: FilePath
-> Int -- ^ Font size
-> IO Font
loadFont file size = loadFontWithFlags file size defaultFontFlags
loadFontWithFlags
:: FilePath
-> Int -- ^ Font size
-> FontFlags
-> IO Font
loadFontWithFlags file sz FontFlags { .. } =
do ptr <- withCString file $ \f -> al_load_font f (fromIntegral sz) fs
when (ptr == nullPtr) $ X.throwIO (FailedToLoadFont file)
Font `fmap` newForeignPtr al_destroy_font_addr ptr
where
fs = (if noKerning then ttf_no_kerning else 0) .|.
(if monochrome then ttf_monochrome else 0) .|.
(if noAutoHint then ttf_no_auto_hint else 0)
data FailedToLoadFont = FailedToLoadFont FilePath
deriving (Typeable,Show)
instance X.Exception FailedToLoadFont
-- Get some information about a font.
-- This a ssumes that a font won't be changes, hence it is pure.
fontInfo :: (Ptr FONT -> IO CInt) -> Font -> Int
fontInfo info f = fromIntegral $ unsafeDupablePerformIO $ withFontPtr f info
fontLineHeight :: Font -> Int
fontLineHeight = fontInfo al_get_font_line_height
fontAscent :: Font -> Int
fontAscent = fontInfo al_get_font_ascent
fontDescent :: Font -> Int
fontDescent = fontInfo al_get_font_descent
textWidth :: Font -> String -> Int
textWidth f x = fontInfo (\p -> withCString x (al_get_text_width p)) f
textDimensions :: Font -> String -> (Int,Int,Int,Int)
textDimensions f t =
unsafeDupablePerformIO $
alloca $ \px ->
alloca $ \py ->
alloca $ \pw ->
alloca $ \ph ->
withCString t $ \ps -> do withFontPtr f $ \pf ->
al_get_text_dimensions pf ps px py pw ph
x <- peek px
y <- peek py
w <- peek pw
h <- peek ph
return ( fromIntegral x
, fromIntegral y
, fromIntegral w
, fromIntegral h
)
data Alignment = AlignLeft | AlignCenter | AlignRight
deriving (Eq,Show)
alignFlags :: Alignment -> CInt
alignFlags al = case al of
AlignLeft -> align_left
AlignCenter -> align_center
AlignRight -> align_right
drawText :: Font -> Color -> Point -> Alignment -> String -> IO ()
drawText font Color { .. } (x,y) al txt =
withCString txt $ \sp ->
withFontPtr font $ \fp -> shal_draw_text fp (realToFrac cRed)
(realToFrac cGreen)
(realToFrac cBlue)
(realToFrac cAlpha)
(realToFrac x)
(realToFrac y)
(alignFlags al)
sp
drawJustifiedText :: Font -- ^ use this font
-> Color -- ^ text should have this color
-> Point -- ^ position
-> Float -- ^ right text boundary
-> Float -- ^ maximum space between words
-> Alignment -- ^ text alignement within boundary
-> String -- ^ text to draw
-> IO ()
drawJustifiedText font Color { .. } (x1,y) x2 diff al txt =
withCString txt $ \sp ->
withFontPtr font $ \fp -> shal_draw_justified_text fp
(realToFrac cRed)
(realToFrac cGreen)
(realToFrac cBlue)
(realToFrac cAlpha)
(realToFrac x1)
(realToFrac x2)
(realToFrac y)
(realToFrac diff)
(alignFlags al)
sp
|
yav/allegro
|
src/Allegro/Font.hs
|
bsd-3-clause
| 5,305 | 0 | 19 | 2,049 | 1,243 | 674 | 569 | 124 | 4 |
module LempelZiv where
import BinaryTree
import Bit(intWToBits, incrementInt, pruneZeroes)
data Chunk a = Chunk a Bool |
TerminalChunk a
deriving Show
parse n Leaf chunk bs = (newNode n, chunk, bs)
parse _ t@(Branch x _ _) _ [] = (t, TerminalChunk x, [])
parse n (Branch x l r) _ (b:bs) =
let chunk = Chunk x b
in if b
then let (r', chunk', bs') = parse n r chunk bs
in (Branch x l r', chunk', bs')
else let (l', chunk', bs') = parse n l chunk bs
in (Branch x l' r, chunk', bs')
completeParse p = completeParse' p 1 (newNode 0)
completeParse' _ _ _ [] = []
completeParse' p n tree bs =
let (tree', chunk, bs') = p n tree (TerminalChunk Nothing) bs
in chunk : completeParse' p (n+1) tree' bs'
bitsNeeded = bitsNeeded' 1 1 0
bitsNeeded' n m logM =
logM : if n < m
then bitsNeeded' (n+1) m logM
else bitsNeeded' (n+1) (2*m) (logM+1)
serialize = concatMap serialize' . zip bitsNeeded
serialize' (m, TerminalChunk (Just k)) = intWToBits m [] k
serialize' (m, Chunk (Just k) b) = intWToBits m [] k ++ [b]
serialize' _ = error "Chunk contained Nothing."
deserialize = deserialize' bitsNeeded
deserialize' (m:ms) bs =
let (k, bs') = splitAt m bs
in case bs' of
[] -> [TerminalChunk k]
(b:bs'') -> Chunk k b : deserialize' ms bs''
deserialize' [] _ = error "List of split sizes is too short."
translate _ bs Nothing = bs
translate tree bs (Just (TerminalChunk k)) =
translate tree bs $ safeGetValue tree k
translate tree bs (Just (Chunk k b)) =
translate tree (b:bs) $ safeGetValue tree k
{-----------------------------}
parse2 _ t chunk [] =
case t of
Branch x Leaf Leaf -> (t, TerminalChunk x, [])
Branch x (Branch _ _ _) (Branch _ _ _) -> (t, TerminalChunk x, [])
_ -> (t, chunk, [])
parse2 n Leaf chunk bs = (newNode n, chunk, bs)
parse2 n (Branch x l r) _ (b:bs) =
let chunk = case (l, r, b) of
(Branch _ _ _, Leaf, True) -> TerminalChunk x
(Leaf, Branch _ _ _, False) -> TerminalChunk x
_ -> Chunk x b
in if b
then let (r', chunk', bs') = parse2 n r chunk bs
in (Branch x l r', chunk', bs')
else let (l', chunk', bs') = parse2 n l chunk bs
in (Branch x l' r, chunk', bs')
deserialize2 = deserialize2' (append ([], Leaf) (False, False)) bitsNeeded
deserialize2' _ (_:_) [] = []
deserialize2' t (m:ms) bs =
let (k, bs') = splitAt m bs
k' = incrementInt k
inferedBit = inferOtherChild $ safeGetValue (snd t) k'
t' = append t (False, False)
in case (inferedBit, bs') of
(Just _, b:[]) -> [Chunk k b]
(Just iB, _) -> Chunk k iB : deserialize2' (markChild iB t' k') ms bs'
(Nothing, []) -> [TerminalChunk k]
(Nothing, b:bs'') -> Chunk k b : deserialize2' (markChild b t' k') ms bs''
deserialize2' _ [] _ = error "List of split sizes is too short."
markChild b (n, tree) k
= (n, modifyNode (markChild' b) tree (pruneZeroes k))
markChild' True (Just (b, _)) = Just (b, True)
markChild' False (Just (_, b)) = Just (True, b)
markChild' _ Nothing = error "Cannot mark the child of a leaf."
inferOtherChild (Just (True, False)) = Just True
inferOtherChild (Just (False, True)) = Just False
inferOtherChild _ = Nothing
{---------------------------------------------------------}
encodeLempelZiv = serialize . completeParse parse
decodeLempelZiv bits =
let chunks = deserialize bits
tree = listToTree chunks
in concatMap (translate tree [] . Just) chunks
encodeLempelZiv2 = serialize . completeParse parse2
decodeLempelZiv2 bits =
let chunks = deserialize2 bits
tree = listToTree chunks
in concatMap (translate tree [] . Just) chunks
|
cullina/Extractor
|
src/LempelZiv.hs
|
bsd-3-clause
| 4,016 | 0 | 13 | 1,226 | 1,701 | 876 | 825 | 88 | 6 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP, ConstraintKinds, DeriveDataTypeable, FlexibleContexts, MultiWayIf, NamedFieldPuns,
OverloadedStrings, PackageImports, RankNTypes, RecordWildCards, ScopedTypeVariables,
TemplateHaskell, TupleSections #-}
-- | Run commands in Docker containers
module Stack.Docker
(cleanup
,CleanupOpts(..)
,CleanupAction(..)
,dockerCleanupCmdName
,dockerCmdName
,dockerHelpOptName
,dockerPullCmdName
,entrypoint
,preventInContainer
,pull
,reexecWithOptionalContainer
,reset
,reExecArgName
,StackDockerException(..)
) where
import Stack.Prelude
import Control.Monad.Writer (execWriter,runWriter,tell)
import qualified Crypto.Hash as Hash (Digest, MD5, hash)
import Data.Aeson.Extended (FromJSON(..),(.:),(.:?),(.!=),eitherDecode)
import Data.ByteString.Builder (stringUtf8,charUtf8,toLazyByteString)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Char (isSpace,toUpper,isAscii,isDigit)
import Data.Conduit.List (sinkNull)
import Data.List (dropWhileEnd,intercalate,isPrefixOf,isInfixOf)
import Data.List.Extra (trim, nubOrd)
import qualified Data.Map.Strict as Map
import Data.Ord (Down(..))
import Data.Streaming.Process (ProcessExitedUnsuccessfully(..))
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time (UTCTime,LocalTime(..),diffDays,utcToLocalTime,getZonedTime,ZonedTime(..))
import Data.Version (showVersion)
import GHC.Exts (sortWith)
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO hiding (canonicalizePath)
import qualified Paths_stack as Meta
import Stack.Config (getInContainer)
import Stack.Constants
import Stack.Constants.Config
import Stack.Docker.GlobalDB
import Stack.Types.PackageIndex
import Stack.Types.Runner
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Docker
import Stack.Setup (ensureDockerStackExe)
import System.Directory (canonicalizePath,getHomeDirectory)
import System.Environment (getEnv,getEnvironment,getProgName,getArgs,getExecutablePath)
import System.Exit (exitSuccess, exitWith, ExitCode(..))
import qualified System.FilePath as FP
import System.IO (stderr,stdin,stdout,hIsTerminalDevice, hClose)
import System.IO.Error (isDoesNotExistError)
import System.IO.Unsafe (unsafePerformIO)
import qualified System.PosixCompat.User as User
import qualified System.PosixCompat.Files as Files
import System.Process (CreateProcess(..), StdStream(..), waitForProcess)
import System.Process.PagerEditor (editByteString)
import System.Process.Read
import System.Process.Run
import Text.Printf (printf)
#ifndef WINDOWS
import Control.Concurrent (threadDelay)
import System.Posix.Signals
import qualified System.Posix.User as PosixUser
#endif
-- | If Docker is enabled, re-runs the currently running OS command in a Docker container.
-- Otherwise, runs the inner action.
--
-- This takes an optional release action which should be taken IFF control is
-- transferring away from the current process to the intra-container one. The main use
-- for this is releasing a lock. After launching reexecution, the host process becomes
-- nothing but an manager for the call into docker and thus may not hold the lock.
reexecWithOptionalContainer
:: HasConfig env
=> Maybe (Path Abs Dir)
-> Maybe (RIO env ())
-> IO ()
-> Maybe (RIO env ())
-> Maybe (RIO env ())
-> RIO env ()
reexecWithOptionalContainer mprojectRoot =
execWithOptionalContainer mprojectRoot getCmdArgs
where
getCmdArgs docker envOverride imageInfo isRemoteDocker = do
config <- view configL
deUser <-
if fromMaybe (not isRemoteDocker) (dockerSetUser docker)
then liftIO $ do
duUid <- User.getEffectiveUserID
duGid <- User.getEffectiveGroupID
duGroups <- nubOrd <$> User.getGroups
duUmask <- Files.setFileCreationMask 0o022
-- Only way to get old umask seems to be to change it, so set it back afterward
_ <- Files.setFileCreationMask duUmask
return (Just DockerUser{..})
else return Nothing
args <-
fmap
(["--" ++ reExecArgName ++ "=" ++ showVersion Meta.version
,"--" ++ dockerEntrypointArgName
,show DockerEntrypoint{..}] ++)
(liftIO getArgs)
case dockerStackExe (configDocker config) of
Just DockerStackExeHost
| configPlatform config == dockerContainerPlatform -> do
exePath <- liftIO getExecutablePath
cmdArgs args exePath
| otherwise -> throwIO UnsupportedStackExeHostPlatformException
Just DockerStackExeImage -> do
progName <- liftIO getProgName
return (FP.takeBaseName progName, args, [], [])
Just (DockerStackExePath path) -> do
exePath <- liftIO $ canonicalizePath (toFilePath path)
cmdArgs args exePath
Just DockerStackExeDownload -> exeDownload args
Nothing
| configPlatform config == dockerContainerPlatform -> do
(exePath,exeTimestamp,misCompatible) <-
liftIO $
do exePath <- liftIO getExecutablePath
exeTimestamp <- resolveFile' exePath >>= getModificationTime
isKnown <-
liftIO $
getDockerImageExe
config
(iiId imageInfo)
exePath
exeTimestamp
return (exePath, exeTimestamp, isKnown)
case misCompatible of
Just True -> cmdArgs args exePath
Just False -> exeDownload args
Nothing -> do
e <-
try $
sinkProcessStderrStdout
Nothing
envOverride
"docker"
[ "run"
, "-v"
, exePath ++ ":" ++ "/tmp/stack"
, iiId imageInfo
, "/tmp/stack"
, "--version"]
sinkNull
sinkNull
let compatible =
case e of
Left (ProcessExitedUnsuccessfully _ _) ->
False
Right _ -> True
liftIO $
setDockerImageExe
config
(iiId imageInfo)
exePath
exeTimestamp
compatible
if compatible
then cmdArgs args exePath
else exeDownload args
Nothing -> exeDownload args
exeDownload args = do
exePath <- ensureDockerStackExe dockerContainerPlatform
cmdArgs args (toFilePath exePath)
cmdArgs args exePath = do
let mountPath = hostBinDir FP.</> FP.takeBaseName exePath
return (mountPath, args, [], [Mount exePath mountPath])
-- | If Docker is enabled, re-runs the OS command returned by the second argument in a
-- Docker container. Otherwise, runs the inner action.
--
-- This takes an optional release action just like `reexecWithOptionalContainer`.
execWithOptionalContainer
:: HasConfig env
=> Maybe (Path Abs Dir)
-> GetCmdArgs env
-> Maybe (RIO env ())
-> IO ()
-> Maybe (RIO env ())
-> Maybe (RIO env ())
-> RIO env ()
execWithOptionalContainer mprojectRoot getCmdArgs mbefore inner mafter mrelease =
do config <- view configL
inContainer <- getInContainer
isReExec <- view reExecL
if | inContainer && not isReExec && (isJust mbefore || isJust mafter) ->
throwIO OnlyOnHostException
| inContainer ->
liftIO (do inner
exitSuccess)
| not (dockerEnable (configDocker config)) ->
do fromMaybeAction mbefore
liftIO inner
fromMaybeAction mafter
liftIO exitSuccess
| otherwise ->
do fromMaybeAction mrelease
runContainerAndExit
getCmdArgs
mprojectRoot
(fromMaybeAction mbefore)
(fromMaybeAction mafter)
where
fromMaybeAction Nothing = return ()
fromMaybeAction (Just hook) = hook
-- | Error if running in a container.
preventInContainer :: MonadIO m => m () -> m ()
preventInContainer inner =
do inContainer <- getInContainer
if inContainer
then throwIO OnlyOnHostException
else inner
-- | Run a command in a new Docker container, then exit the process.
runContainerAndExit
:: HasConfig env
=> GetCmdArgs env
-> Maybe (Path Abs Dir) -- ^ Project root (maybe)
-> RIO env () -- ^ Action to run before
-> RIO env () -- ^ Action to run after
-> RIO env ()
runContainerAndExit getCmdArgs
mprojectRoot
before
after =
do config <- view configL
let docker = configDocker config
envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride docker
(env,isStdinTerminal,isStderrTerminal,homeDir) <- liftIO $
(,,,)
<$> getEnvironment
<*> hIsTerminalDevice stdin
<*> hIsTerminalDevice stderr
<*> (parseAbsDir =<< getHomeDirectory)
isStdoutTerminal <- view terminalL
let dockerHost = lookup "DOCKER_HOST" env
dockerCertPath = lookup "DOCKER_CERT_PATH" env
bamboo = lookup "bamboo_buildKey" env
jenkins = lookup "JENKINS_HOME" env
msshAuthSock = lookup "SSH_AUTH_SOCK" env
muserEnv = lookup "USER" env
isRemoteDocker = maybe False (isPrefixOf "tcp://") dockerHost
image = dockerImage docker
when (isRemoteDocker &&
maybe False (isInfixOf "boot2docker") dockerCertPath)
(logWarn "Warning: Using boot2docker is NOT supported, and not likely to perform well.")
maybeImageInfo <- inspect envOverride image
imageInfo@Inspect{..} <- case maybeImageInfo of
Just ii -> return ii
Nothing
| dockerAutoPull docker ->
do pullImage envOverride docker image
mii2 <- inspect envOverride image
case mii2 of
Just ii2 -> return ii2
Nothing -> throwM (InspectFailedException image)
| otherwise -> throwM (NotPulledException image)
sandboxDir <- projectDockerSandboxDir projectRoot
let ImageConfig {..} = iiConfig
imageEnvVars = map (break (== '=')) icEnv
platformVariant = show $ hashRepoName image
stackRoot = configStackRoot config
sandboxHomeDir = sandboxDir </> homeDirName
isTerm = not (dockerDetach docker) &&
isStdinTerminal &&
isStdoutTerminal &&
isStderrTerminal
keepStdinOpen = not (dockerDetach docker) &&
-- Workaround for https://github.com/docker/docker/issues/12319
-- This is fixed in Docker 1.9.1, but will leave the workaround
-- in place for now, for users who haven't upgraded yet.
(isTerm || (isNothing bamboo && isNothing jenkins))
hostBinDirPath <- parseAbsDir hostBinDir
newPathEnv <- augmentPath
[ hostBinDirPath
, sandboxHomeDir </> $(mkRelDir ".local/bin")]
(T.pack <$> lookupImageEnv "PATH" imageEnvVars)
(cmnd,args,envVars,extraMount) <- getCmdArgs docker envOverride imageInfo isRemoteDocker
pwd <- getCurrentDir
liftIO
(do updateDockerImageLastUsed config iiId (toFilePath projectRoot)
mapM_ ensureDir [sandboxHomeDir, stackRoot])
-- Since $HOME is now mounted in the same place in the container we can
-- just symlink $HOME/.ssh to the right place for the stack docker user
let sshDir = homeDir </> sshRelDir
sshDirExists <- doesDirExist sshDir
sshSandboxDirExists <-
liftIO
(Files.fileExist
(toFilePathNoTrailingSep (sandboxHomeDir </> sshRelDir)))
when (sshDirExists && not sshSandboxDirExists)
(liftIO
(Files.createSymbolicLink
(toFilePathNoTrailingSep sshDir)
(toFilePathNoTrailingSep (sandboxHomeDir </> sshRelDir))))
containerID <- (trim . decodeUtf8) <$> readDockerProcess
envOverride
(Just projectRoot)
(concat
[["create"
,"--net=host"
,"-e",inContainerEnvVar ++ "=1"
,"-e",stackRootEnvVar ++ "=" ++ toFilePathNoTrailingSep stackRoot
,"-e",platformVariantEnvVar ++ "=dk" ++ platformVariant
,"-e","HOME=" ++ toFilePathNoTrailingSep sandboxHomeDir
,"-e","PATH=" ++ T.unpack newPathEnv
,"-e","PWD=" ++ toFilePathNoTrailingSep pwd
,"-v",toFilePathNoTrailingSep homeDir ++ ":" ++ toFilePathNoTrailingSep homeDir
,"-v",toFilePathNoTrailingSep stackRoot ++ ":" ++ toFilePathNoTrailingSep stackRoot
,"-v",toFilePathNoTrailingSep projectRoot ++ ":" ++ toFilePathNoTrailingSep projectRoot
,"-v",toFilePathNoTrailingSep sandboxHomeDir ++ ":" ++ toFilePathNoTrailingSep sandboxHomeDir
,"-w",toFilePathNoTrailingSep pwd]
,case muserEnv of
Nothing -> []
Just userEnv -> ["-e","USER=" ++ userEnv]
,case msshAuthSock of
Nothing -> []
Just sshAuthSock ->
["-e","SSH_AUTH_SOCK=" ++ sshAuthSock
,"-v",sshAuthSock ++ ":" ++ sshAuthSock]
-- Disable the deprecated entrypoint in FP Complete-generated images
,["--entrypoint=/usr/bin/env"
| isJust (lookupImageEnv oldSandboxIdEnvVar imageEnvVars) &&
(icEntrypoint == ["/usr/local/sbin/docker-entrypoint"] ||
icEntrypoint == ["/root/entrypoint.sh"])]
,concatMap (\(k,v) -> ["-e", k ++ "=" ++ v]) envVars
,concatMap mountArg (extraMount ++ dockerMount docker)
,concatMap (\nv -> ["-e", nv]) (dockerEnv docker)
,case dockerContainerName docker of
Just name -> ["--name=" ++ name]
Nothing -> []
,["-t" | isTerm]
,["-i" | keepStdinOpen]
,dockerRunArgs docker
,[image]
,[cmnd]
,args])
before
#ifndef WINDOWS
run <- askRunInIO
oldHandlers <- forM [sigINT,sigABRT,sigHUP,sigPIPE,sigTERM,sigUSR1,sigUSR2] $ \sig -> do
let sigHandler = run $ do
readProcessNull Nothing envOverride "docker"
["kill","--signal=" ++ show sig,containerID]
when (sig `elem` [sigTERM,sigABRT]) $ do
-- Give the container 30 seconds to exit gracefully, then send a sigKILL to force it
liftIO $ threadDelay 30000000
readProcessNull Nothing envOverride "docker" ["kill",containerID]
oldHandler <- liftIO $ installHandler sig (Catch sigHandler) Nothing
return (sig, oldHandler)
#endif
let cmd = Cmd Nothing
"docker"
envOverride
(concat [["start"]
,["-a" | not (dockerDetach docker)]
,["-i" | keepStdinOpen]
,[containerID]])
e <- finally
(try $ callProcess'
(\cp -> cp { delegate_ctlc = False })
cmd)
(do unless (dockerPersist docker || dockerDetach docker) $
catch
(readProcessNull Nothing envOverride "docker" ["rm","-f",containerID])
(\(_::ReadProcessException) -> return ())
#ifndef WINDOWS
forM_ oldHandlers $ \(sig,oldHandler) ->
liftIO $ installHandler sig oldHandler Nothing
#endif
)
case e of
Left (ProcessExitedUnsuccessfully _ ec) -> liftIO (exitWith ec)
Right () -> do after
liftIO exitSuccess
where
-- This is using a hash of the Docker repository (without tag or digest) to ensure
-- binaries/libraries aren't shared between Docker and host (or incompatible Docker images)
hashRepoName :: String -> Hash.Digest Hash.MD5
hashRepoName = Hash.hash . BS.pack . takeWhile (\c -> c /= ':' && c /= '@')
lookupImageEnv name vars =
case lookup name vars of
Just ('=':val) -> Just val
_ -> Nothing
mountArg (Mount host container) = ["-v",host ++ ":" ++ container]
projectRoot = fromMaybeProjectRoot mprojectRoot
sshRelDir = $(mkRelDir ".ssh/")
-- | Clean-up old docker images and containers.
cleanup :: HasConfig env => CleanupOpts -> RIO env ()
cleanup opts =
do config <- view configL
let docker = configDocker config
envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride docker
let runDocker = readDockerProcess envOverride Nothing
imagesOut <- runDocker ["images","--no-trunc","-f","dangling=false"]
danglingImagesOut <- runDocker ["images","--no-trunc","-f","dangling=true"]
runningContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=running"]
restartingContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=restarting"]
exitedContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=exited"]
pausedContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=paused"]
let imageRepos = parseImagesOut imagesOut
danglingImageHashes = Map.keys (parseImagesOut danglingImagesOut)
runningContainers = parseContainersOut runningContainersOut ++
parseContainersOut restartingContainersOut
stoppedContainers = parseContainersOut exitedContainersOut ++
parseContainersOut pausedContainersOut
inspectMap <- inspects envOverride
(Map.keys imageRepos ++
danglingImageHashes ++
map fst stoppedContainers ++
map fst runningContainers)
(imagesLastUsed,curTime) <-
liftIO ((,) <$> getDockerImagesLastUsed config
<*> getZonedTime)
let planWriter = buildPlan curTime
imagesLastUsed
imageRepos
danglingImageHashes
stoppedContainers
runningContainers
inspectMap
plan = toLazyByteString (execWriter planWriter)
plan' <- case dcAction opts of
CleanupInteractive ->
liftIO (editByteString (intercalate "-" [stackProgName
,dockerCmdName
,dockerCleanupCmdName
,"plan"])
plan)
CleanupImmediate -> return plan
CleanupDryRun -> do liftIO (LBS.hPut stdout plan)
return LBS.empty
mapM_ (performPlanLine envOverride)
(reverse (filter filterPlanLine (lines (LBS.unpack plan'))))
allImageHashesOut <- runDocker ["images","-aq","--no-trunc"]
liftIO (pruneDockerImagesLastUsed config (lines (decodeUtf8 allImageHashesOut)))
where
filterPlanLine line =
case line of
c:_ | isSpace c -> False
_ -> True
performPlanLine envOverride line =
case filter (not . null) (words (takeWhile (/= '#') line)) of
[] -> return ()
(c:_):t:v:_ ->
do args <- if | toUpper c == 'R' && t == imageStr ->
do logInfo (concatT ["Removing image: '",v,"'"])
return ["rmi",v]
| toUpper c == 'R' && t == containerStr ->
do logInfo (concatT ["Removing container: '",v,"'"])
return ["rm","-f",v]
| otherwise -> throwM (InvalidCleanupCommandException line)
e <- try (readDockerProcess envOverride Nothing args)
case e of
Left ex@ProcessFailed{} ->
logError (concatT ["Could not remove: '",v,"': ", show ex])
Left e' -> throwM e'
Right _ -> return ()
_ -> throwM (InvalidCleanupCommandException line)
parseImagesOut = Map.fromListWith (++) . map parseImageRepo . drop 1 . lines . decodeUtf8
where parseImageRepo :: String -> (String, [String])
parseImageRepo line =
case words line of
repo:tag:hash:_
| repo == "<none>" -> (hash,[])
| tag == "<none>" -> (hash,[repo])
| otherwise -> (hash,[repo ++ ":" ++ tag])
_ -> impureThrow (InvalidImagesOutputException line)
parseContainersOut = map parseContainer . drop 1 . lines . decodeUtf8
where parseContainer line =
case words line of
hash:image:rest | last:_ <- reverse rest -> (hash,(image,last))
_ -> impureThrow (InvalidPSOutputException line)
buildPlan curTime
imagesLastUsed
imageRepos
danglingImageHashes
stoppedContainers
runningContainers
inspectMap =
do case dcAction opts of
CleanupInteractive ->
do buildStrLn
(concat
["# STACK DOCKER CLEANUP PLAN"
,"\n#"
,"\n# When you leave the editor, the lines in this plan will be processed."
,"\n#"
,"\n# Lines that begin with 'R' denote an image or container that will be."
,"\n# removed. You may change the first character to/from 'R' to remove/keep"
,"\n# and image or container that would otherwise be kept/removed."
,"\n#"
,"\n# To cancel the cleanup, delete all lines in this file."
,"\n#"
,"\n# By default, the following images/containers will be removed:"
,"\n#"])
buildDefault dcRemoveKnownImagesLastUsedDaysAgo "Known images last used"
buildDefault dcRemoveUnknownImagesCreatedDaysAgo "Unknown images created"
buildDefault dcRemoveDanglingImagesCreatedDaysAgo "Dangling images created"
buildDefault dcRemoveStoppedContainersCreatedDaysAgo "Stopped containers created"
buildDefault dcRemoveRunningContainersCreatedDaysAgo "Running containers created"
buildStrLn
(concat
["#"
,"\n# The default plan can be adjusted using command-line arguments."
,"\n# Run '" ++ unwords [stackProgName, dockerCmdName, dockerCleanupCmdName] ++
" --help' for details."
,"\n#"])
_ -> buildStrLn
(unlines
["# Lines that begin with 'R' denote an image or container that will be."
,"# removed."])
buildSection "KNOWN IMAGES (pulled/used by stack)"
imagesLastUsed
buildKnownImage
buildSection "UNKNOWN IMAGES (not managed by stack)"
(sortCreated (Map.toList (foldl' (\m (h,_) -> Map.delete h m)
imageRepos
imagesLastUsed)))
buildUnknownImage
buildSection "DANGLING IMAGES (no named references and not depended on by other images)"
(sortCreated (map (,()) danglingImageHashes))
buildDanglingImage
buildSection "STOPPED CONTAINERS"
(sortCreated stoppedContainers)
(buildContainer (dcRemoveStoppedContainersCreatedDaysAgo opts))
buildSection "RUNNING CONTAINERS"
(sortCreated runningContainers)
(buildContainer (dcRemoveRunningContainersCreatedDaysAgo opts))
where
buildDefault accessor description =
case accessor opts of
Just days -> buildStrLn ("# - " ++ description ++ " at least " ++ showDays days ++ ".")
Nothing -> return ()
sortCreated =
sortWith (\(_,_,x) -> Down x) .
mapMaybe (\(h,r) ->
case Map.lookup h inspectMap of
Nothing -> Nothing
Just ii -> Just (h,r,iiCreated ii))
buildSection sectionHead items itemBuilder =
do let (anyWrote,b) = runWriter (forM items itemBuilder)
when (or anyWrote) $
do buildSectionHead sectionHead
tell b
buildKnownImage (imageHash,lastUsedProjects) =
case Map.lookup imageHash imageRepos of
Just repos@(_:_) ->
do case lastUsedProjects of
(l,_):_ -> forM_ repos (buildImageTime (dcRemoveKnownImagesLastUsedDaysAgo opts) l)
_ -> forM_ repos buildKeepImage
forM_ lastUsedProjects buildProject
buildInspect imageHash
return True
_ -> return False
buildUnknownImage (hash, repos, created) =
case repos of
[] -> return False
_ -> do forM_ repos (buildImageTime (dcRemoveUnknownImagesCreatedDaysAgo opts) created)
buildInspect hash
return True
buildDanglingImage (hash, (), created) =
do buildImageTime (dcRemoveDanglingImagesCreatedDaysAgo opts) created hash
buildInspect hash
return True
buildContainer removeAge (hash,(image,name),created) =
do let disp = name ++ " (image: " ++ image ++ ")"
buildTime containerStr removeAge created disp
buildInspect hash
return True
buildProject (lastUsedTime, projectPath) =
buildInfo ("Last used " ++
showDaysAgo lastUsedTime ++
" in " ++
projectPath)
buildInspect hash =
case Map.lookup hash inspectMap of
Just Inspect{iiCreated,iiVirtualSize} ->
buildInfo ("Created " ++
showDaysAgo iiCreated ++
maybe ""
(\s -> " (size: " ++
printf "%g" (fromIntegral s / 1024.0 / 1024.0 :: Float) ++
"M)")
iiVirtualSize)
Nothing -> return ()
showDays days =
case days of
0 -> "today"
1 -> "yesterday"
n -> show n ++ " days ago"
showDaysAgo oldTime = showDays (daysAgo oldTime)
daysAgo oldTime =
let ZonedTime (LocalTime today _) zone = curTime
LocalTime oldDay _ = utcToLocalTime zone oldTime
in diffDays today oldDay
buildImageTime = buildTime imageStr
buildTime t removeAge time disp =
case removeAge of
Just d | daysAgo time >= d -> buildStrLn ("R " ++ t ++ " " ++ disp)
_ -> buildKeep t disp
buildKeep t d = buildStrLn (" " ++ t ++ " " ++ d)
buildKeepImage = buildKeep imageStr
buildSectionHead s = buildStrLn ("\n#\n# " ++ s ++ "\n#\n")
buildInfo = buildStrLn . (" # " ++)
buildStrLn l = do buildStr l
tell (charUtf8 '\n')
buildStr = tell . stringUtf8
imageStr = "image"
containerStr = "container"
-- | Inspect Docker image or container.
inspect :: (MonadUnliftIO m,MonadLogger m)
=> EnvOverride -> String -> m (Maybe Inspect)
inspect envOverride image =
do results <- inspects envOverride [image]
case Map.toList results of
[] -> return Nothing
[(_,i)] -> return (Just i)
_ -> throwIO (InvalidInspectOutputException "expect a single result")
-- | Inspect multiple Docker images and/or containers.
inspects :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> [String] -> m (Map String Inspect)
inspects _ [] = return Map.empty
inspects envOverride images =
do maybeInspectOut <-
try (readDockerProcess envOverride Nothing ("inspect" : images))
case maybeInspectOut of
Right inspectOut ->
-- filtering with 'isAscii' to workaround @docker inspect@ output containing invalid UTF-8
case eitherDecode (LBS.pack (filter isAscii (decodeUtf8 inspectOut))) of
Left msg -> throwIO (InvalidInspectOutputException msg)
Right results -> return (Map.fromList (map (\r -> (iiId r,r)) results))
Left (ProcessFailed _ _ _ err)
| any (`LBS.isPrefixOf` err) missingImagePrefixes -> return Map.empty
Left e -> throwIO e
where missingImagePrefixes = ["Error: No such image", "Error: No such object:"]
-- | Pull latest version of configured Docker image from registry.
pull :: HasConfig env => RIO env ()
pull =
do config <- view configL
let docker = configDocker config
envOverride <- getEnvOverride (configPlatform config)
checkDockerVersion envOverride docker
pullImage envOverride docker (dockerImage docker)
-- | Pull Docker image from registry.
pullImage :: (MonadLogger m,MonadIO m,MonadThrow m)
=> EnvOverride -> DockerOpts -> String -> m ()
pullImage envOverride docker image =
do logInfo (concatT ["Pulling image from registry: '",image,"'"])
when (dockerRegistryLogin docker)
(do logInfo "You may need to log in."
callProcess $ Cmd
Nothing
"docker"
envOverride
(concat
[["login"]
,maybe [] (\n -> ["--username=" ++ n]) (dockerRegistryUsername docker)
,maybe [] (\p -> ["--password=" ++ p]) (dockerRegistryPassword docker)
,[takeWhile (/= '/') image]]))
-- We redirect the stdout of the process to stderr so that the output
-- of @docker pull@ will not interfere with the output of other
-- commands when using --auto-docker-pull. See issue #2733.
let stdoutToStderr cp = cp
{ std_out = UseHandle stderr
, std_err = UseHandle stderr
, std_in = CreatePipe
}
(Just hin, _, _, ph) <- createProcess' "pullImage" stdoutToStderr $
Cmd Nothing "docker" envOverride ["pull",image]
liftIO (hClose hin)
ec <- liftIO (waitForProcess ph)
case ec of
ExitSuccess -> return ()
ExitFailure _ -> throwIO (PullFailedException image)
-- | Check docker version (throws exception if incorrect)
checkDockerVersion
:: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> DockerOpts -> m ()
checkDockerVersion envOverride docker =
do dockerExists <- doesExecutableExist envOverride "docker"
unless dockerExists (throwIO DockerNotInstalledException)
dockerVersionOut <- readDockerProcess envOverride Nothing ["--version"]
case words (decodeUtf8 dockerVersionOut) of
(_:_:v:_) ->
case parseVersionFromString (stripVersion v) of
Just v'
| v' < minimumDockerVersion ->
throwIO (DockerTooOldException minimumDockerVersion v')
| v' `elem` prohibitedDockerVersions ->
throwIO (DockerVersionProhibitedException prohibitedDockerVersions v')
| not (v' `withinRange` dockerRequireDockerVersion docker) ->
throwIO (BadDockerVersionException (dockerRequireDockerVersion docker) v')
| otherwise ->
return ()
_ -> throwIO InvalidVersionOutputException
_ -> throwIO InvalidVersionOutputException
where minimumDockerVersion = $(mkVersion "1.6.0")
prohibitedDockerVersions = []
stripVersion v = takeWhile (/= '-') (dropWhileEnd (not . isDigit) v)
-- | Remove the project's Docker sandbox.
reset :: (MonadIO m, MonadReader env m, HasConfig env)
=> Maybe (Path Abs Dir) -> Bool -> m ()
reset maybeProjectRoot keepHome = do
dockerSandboxDir <- projectDockerSandboxDir projectRoot
liftIO (removeDirectoryContents
dockerSandboxDir
[homeDirName | keepHome]
[])
where projectRoot = fromMaybeProjectRoot maybeProjectRoot
-- | The Docker container "entrypoint": special actions performed when first entering
-- a container, such as switching the UID/GID to the "outside-Docker" user's.
entrypoint :: (MonadUnliftIO m, MonadLogger m, MonadThrow m)
=> Config -> DockerEntrypoint -> m ()
entrypoint config@Config{..} DockerEntrypoint{..} =
modifyMVar_ entrypointMVar $ \alreadyRan -> do
-- Only run the entrypoint once
unless alreadyRan $ do
envOverride <- getEnvOverride configPlatform
homeDir <- liftIO $ parseAbsDir =<< getEnv "HOME"
-- Get the UserEntry for the 'stack' user in the image, if it exists
estackUserEntry0 <- liftIO $ tryJust (guard . isDoesNotExistError) $
User.getUserEntryForName stackUserName
-- Switch UID/GID if needed, and update user's home directory
case deUser of
Nothing -> return ()
Just (DockerUser 0 _ _ _) -> return ()
Just du -> updateOrCreateStackUser envOverride estackUserEntry0 homeDir du
case estackUserEntry0 of
Left _ -> return ()
Right ue -> do
-- If the 'stack' user exists in the image, copy any build plans and package indices from
-- its original home directory to the host's stack root, to avoid needing to download them
origStackHomeDir <- liftIO $ parseAbsDir (User.homeDirectory ue)
let origStackRoot = origStackHomeDir </> $(mkRelDir ("." ++ stackProgName))
buildPlanDirExists <- doesDirExist (buildPlanDir origStackRoot)
when buildPlanDirExists $ do
(_, buildPlans) <- listDir (buildPlanDir origStackRoot)
forM_ buildPlans $ \srcBuildPlan -> do
let destBuildPlan = buildPlanDir configStackRoot </> filename srcBuildPlan
exists <- doesFileExist destBuildPlan
unless exists $ do
ensureDir (parent destBuildPlan)
copyFile srcBuildPlan destBuildPlan
forM_ configPackageIndices $ \pkgIdx -> do
msrcIndex <- flip runReaderT (config{configStackRoot = origStackRoot}) $ do
srcIndex <- configPackageIndex (indexName pkgIdx)
exists <- doesFileExist srcIndex
return $ if exists
then Just srcIndex
else Nothing
case msrcIndex of
Nothing -> return ()
Just srcIndex -> do
flip runReaderT config $ do
destIndex <- configPackageIndex (indexName pkgIdx)
exists <- doesFileExist destIndex
unless exists $ do
ensureDir (parent destIndex)
copyFile srcIndex destIndex
return True
where
updateOrCreateStackUser envOverride estackUserEntry homeDir DockerUser{..} = do
case estackUserEntry of
Left _ -> do
-- If no 'stack' user in image, create one with correct UID/GID and home directory
readProcessNull Nothing envOverride "groupadd"
["-o"
,"--gid",show duGid
,stackUserName]
readProcessNull Nothing envOverride "useradd"
["-oN"
,"--uid",show duUid
,"--gid",show duGid
,"--home",toFilePathNoTrailingSep homeDir
,stackUserName]
Right _ -> do
-- If there is already a 'stack' user in the image, adjust its UID/GID and home directory
readProcessNull Nothing envOverride "usermod"
["-o"
,"--uid",show duUid
,"--home",toFilePathNoTrailingSep homeDir
,stackUserName]
readProcessNull Nothing envOverride "groupmod"
["-o"
,"--gid",show duGid
,stackUserName]
forM_ duGroups $ \gid -> do
readProcessNull Nothing envOverride "groupadd"
["-o"
,"--gid",show gid
,"group" ++ show gid]
-- 'setuid' to the wanted UID and GID
liftIO $ do
User.setGroupID duGid
#ifndef WINDOWS
PosixUser.setGroups duGroups
#endif
User.setUserID duUid
_ <- Files.setFileCreationMask duUmask
return ()
stackUserName = "stack"::String
-- | MVar used to ensure the Docker entrypoint is performed exactly once
entrypointMVar :: MVar Bool
{-# NOINLINE entrypointMVar #-}
entrypointMVar = unsafePerformIO (newMVar False)
-- | Remove the contents of a directory, without removing the directory itself.
-- This is used instead of 'FS.removeTree' to clear bind-mounted directories, since
-- removing the root of the bind-mount won't work.
removeDirectoryContents :: Path Abs Dir -- ^ Directory to remove contents of
-> [Path Rel Dir] -- ^ Top-level directory names to exclude from removal
-> [Path Rel File] -- ^ Top-level file names to exclude from removal
-> IO ()
removeDirectoryContents path excludeDirs excludeFiles =
do isRootDir <- doesDirExist path
when isRootDir
(do (lsd,lsf) <- listDir path
forM_ lsd
(\d -> unless (dirname d `elem` excludeDirs)
(removeDirRecur d))
forM_ lsf
(\f -> unless (filename f `elem` excludeFiles)
(removeFile f)))
-- | Produce a strict 'S.ByteString' from the stdout of a
-- process. Throws a 'ReadProcessException' exception if the
-- process fails. Logs process's stderr using @logError@.
readDockerProcess
:: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> Maybe (Path Abs Dir) -> [String] -> m BS.ByteString
readDockerProcess envOverride mpwd = readProcessStdout mpwd envOverride "docker"
-- | Name of home directory within docker sandbox.
homeDirName :: Path Rel Dir
homeDirName = $(mkRelDir "_home/")
-- | Directory where 'stack' executable is bind-mounted in Docker container
hostBinDir :: FilePath
hostBinDir = "/opt/host/bin"
-- | Convenience function to decode ByteString to String.
decodeUtf8 :: BS.ByteString -> String
decodeUtf8 bs = T.unpack (T.decodeUtf8 bs)
-- | Convenience function constructing message for @log*@.
concatT :: [String] -> Text
concatT = T.pack . concat
-- | Fail with friendly error if project root not set.
fromMaybeProjectRoot :: Maybe (Path Abs Dir) -> Path Abs Dir
fromMaybeProjectRoot = fromMaybe (impureThrow CannotDetermineProjectRootException)
-- | Environment variable that contained the old sandbox ID.
-- | Use of this variable is deprecated, and only used to detect old images.
oldSandboxIdEnvVar :: String
oldSandboxIdEnvVar = "DOCKER_SANDBOX_ID"
-- | Options for 'cleanup'.
data CleanupOpts = CleanupOpts
{ dcAction :: !CleanupAction
, dcRemoveKnownImagesLastUsedDaysAgo :: !(Maybe Integer)
, dcRemoveUnknownImagesCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveDanglingImagesCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveStoppedContainersCreatedDaysAgo :: !(Maybe Integer)
, dcRemoveRunningContainersCreatedDaysAgo :: !(Maybe Integer) }
deriving (Show)
-- | Cleanup action.
data CleanupAction = CleanupInteractive
| CleanupImmediate
| CleanupDryRun
deriving (Show)
-- | Parsed result of @docker inspect@.
data Inspect = Inspect
{iiConfig :: ImageConfig
,iiCreated :: UTCTime
,iiId :: String
,iiVirtualSize :: Maybe Integer}
deriving (Show)
-- | Parse @docker inspect@ output.
instance FromJSON Inspect where
parseJSON v =
do o <- parseJSON v
Inspect <$> o .: "Config"
<*> o .: "Created"
<*> o .: "Id"
<*> o .:? "VirtualSize"
-- | Parsed @Config@ section of @docker inspect@ output.
data ImageConfig = ImageConfig
{icEnv :: [String]
,icEntrypoint :: [String]}
deriving (Show)
-- | Parse @Config@ section of @docker inspect@ output.
instance FromJSON ImageConfig where
parseJSON v =
do o <- parseJSON v
ImageConfig
<$> fmap join (o .:? "Env") .!= []
<*> fmap join (o .:? "Entrypoint") .!= []
-- | Function to get command and arguments to run in Docker container
type GetCmdArgs env
= DockerOpts
-> EnvOverride
-> Inspect
-> Bool
-> RIO env (FilePath,[String],[(String,String)],[Mount])
|
MichielDerhaeg/stack
|
src/Stack/Docker.hs
|
bsd-3-clause
| 42,290 | 0 | 30 | 14,129 | 9,337 | 4,721 | 4,616 | 836 | 22 |
module Hackage.Twitter.Bot.Internal
(
) where
|
KevinCotrone/hackage-twitter-bot
|
src/Hackage/Twitter/Bot/Internal.hs
|
bsd-3-clause
| 54 | 0 | 3 | 13 | 11 | 8 | 3 | 2 | 0 |
module Message (
className ,
intfName ,
mtdName ,
paramKind ,
static ,
public ,
protected ,
private ,
eventHandler) where
mkName::String->String->String
mkName caption name = caption ++ " " ++ name
className, intfName, paramKind, mtdName::String->String
className = mkName "класс"
intfName = mkName "интерфейс"
mtdName = mkName "метод"
paramKind "importing" = "Импорт"
paramKind "exporting" = "Экспорт"
paramKind "changing" = "Изменение"
paramKind "returning" = "Возврат"
paramKind "tables" = "Таблица"
paramKind _ = ""
static, public, protected, private::String
static = "статический"
public = "общий"
protected = "защищённый"
private = "личный"
eventHandler::String->String->String
eventHandler evt cls = "обработчик события " ++ evt ++ " класса " ++ cls
|
Odomontois/abapdocu
|
src/Message.hs
|
bsd-3-clause
| 942 | 0 | 7 | 179 | 217 | 124 | 93 | 29 | 1 |
{-# LANGUAGE ExistentialQuantification, Rank2Types #-}
module Util(
forceList,
gzip, universeParentBi,
exitMessage, exitMessageImpure,
getContentsUTF8, wildcardMatch
) where
import System.Exit
import System.IO
import System.IO.Unsafe
import Unsafe.Coerce
import Data.Data
import Data.Generics.Uniplate.DataOnly
import System.FilePattern
import Data.List.Extra
---------------------------------------------------------------------
-- CONTROL.DEEPSEQ
forceList :: [a] -> [a]
forceList xs = length xs `seq` xs
---------------------------------------------------------------------
-- SYSTEM.IO
exitMessage :: String -> IO a
exitMessage msg = do
hPutStrLn stderr msg
exitWith $ ExitFailure 1
exitMessageImpure :: String -> a
exitMessageImpure = unsafePerformIO . exitMessage
getContentsUTF8 :: IO String
getContentsUTF8 = do
hSetEncoding stdin utf8
getContents
---------------------------------------------------------------------
-- DATA.GENERICS
data Box = forall a . Data a => Box a
gzip :: Data a => (forall b . Data b => b -> b -> c) -> a -> a -> Maybe [c]
gzip f x y | toConstr x /= toConstr y = Nothing
| otherwise = Just $ zipWith op (gmapQ Box x) (gmapQ Box y)
-- unsafeCoerce is safe because gmapQ on the same constr gives the same fields
-- in the same order
where op (Box x) (Box y) = f x (unsafeCoerce y)
---------------------------------------------------------------------
-- DATA.GENERICS.UNIPLATE.OPERATIONS
universeParent :: Data a => a -> [(Maybe a, a)]
universeParent x = (Nothing,x) : f x
where
f :: Data a => a -> [(Maybe a, a)]
f x = concat [(Just x, y) : f y | y <- children x]
universeParentBi :: (Data a, Data b) => a -> [(Maybe b, b)]
universeParentBi = concatMap universeParent . childrenBi
---------------------------------------------------------------------
-- SYSTEM.FILEPATTERN
-- | Returns true if the pattern matches the string. For example:
--
-- >>> let isSpec = wildcardMatch "**.*Spec"
-- >>> isSpec "Example"
-- False
-- >>> isSpec "ExampleSpec"
-- True
-- >>> isSpec "Namespaced.ExampleSpec"
-- True
-- >>> isSpec "Deeply.Nested.ExampleSpec"
-- True
--
-- See this issue for details: <https://github.com/ndmitchell/hlint/issues/402>.
wildcardMatch :: FilePattern -> String -> Bool
wildcardMatch p m = let f = replace "." "/" in f p ?== f m
|
ndmitchell/hlint
|
src/Util.hs
|
bsd-3-clause
| 2,387 | 0 | 11 | 433 | 599 | 323 | 276 | 39 | 1 |
-- |
-- Functions that are marked with the suffix 'R' retry automatically in case of
-- failure up to a certain number of times. However, they will return after
-- about 20 seconds in the worst case. Exceptions: 'letOrdersExecuteR' and
-- 'submitOrder'.
{-# LANGUAGE OverloadedStrings #-}
module Network.MtGoxAPI.HttpAPI
( getOrderCountR
, submitBtcBuyOrder
, submitBtcSellOrder
, getOrderResultR
, getWalletHistoryR
, getPrivateInfoR
, getBitcoinDepositAddressR
, withdrawBitcoins
, letOrdersExecuteR
, submitOrder
, OrderStats(..)
) where
import Control.Error
import Control.Monad
import Control.Monad.IO.Class
import Control.Watchdog
import Data.Aeson
import Data.Digest.Pure.SHA
import Network.Curl
import Network.HTTP.Base (urlEncodeVars)
import qualified Control.Arrow as A
import qualified Data.Attoparsec as AP
import qualified Data.ByteString as B
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Lazy.Char8 as BL8
import qualified Data.HashMap.Strict as H
import qualified Data.Text as T
import Network.MtGoxAPI.Credentials
import Network.MtGoxAPI.CurlWrapper
import Network.MtGoxAPI.StreamAuthCommands
import Network.MtGoxAPI.Types
data HttpApiResult = HttpApiSuccess Value
| HttpApiFailure
deriving (Show)
data OrderStats = OrderStats { usdEarned :: Integer
, usdSpent :: Integer
, usdFee :: Integer
}
deriving (Show)
instance FromJSON HttpApiResult where
parseJSON (Object o) = case H.lookup "result" o of
Just "success" -> case H.lookup "return" o of
Just v -> return $ HttpApiSuccess v
Nothing -> return HttpApiFailure
Just _ -> return HttpApiFailure
Nothing -> return HttpApiFailure
parseJSON _ = return HttpApiFailure
mtGoxApi :: String
mtGoxApi = "https://mtgox.com/api/"
watchdogSettings :: WatchdogAction ()
watchdogSettings = do
setInitialDelay 250000 -- 250 ms
setMaximumRetries 6
-- will fail after:
-- 0.25 + 0.5 + 1 + 2 + 4 + 8 seconds = 15.75 seconds
parseReply :: FromJSON a => String -> Value -> a
parseReply method v =
case fromJSON v of
Success r -> r
Error _ -> error ("Unexpected result when calling method " ++ method)
robustApiCall :: Maybe WatchdogLogger-> IO (Either String b) -> IO (Either String b)
robustApiCall mLogger f = watchdog $ do
watchdogSettings
case mLogger of
Just logger -> setLoggingAction logger
Nothing -> return ()
watchImpatiently f
callApi :: CurlHandle -> MtGoxCredentials-> URLString-> [(String, String)]-> IO (Either String HttpApiResult)
callApi curlHandle mtGoxCred uri parameters = do
nonce <- getNonce
let parameters' = ("nonce", T.unpack nonce) : parameters
(headers, body) = compileRequest mtGoxCred parameters'
(status, payload) <- performCurlRequest curlHandle uri
[ CurlHttpHeaders headers
, CurlPostFields [body]
]
return $ case status of
CurlOK -> case AP.parseOnly json (B8.pack payload) of
Left err' -> Left $ "JSON parse error: " ++ err'
Right jsonV -> case fromJSON jsonV of
(Error err'') -> Left $ "API parse error: " ++ err''
(Success v) -> Right v :: Either String HttpApiResult
errMsg -> Left $ "Curl error: " ++ show errMsg
compileRequest :: MtGoxCredentials -> [(String, String)] -> ([String], String)
compileRequest credentials parameters =
let authSecretDecoded = mgcAuthSecretDecoded credentials
authKey = mgcAuthKey credentials
body = urlEncodeVars parameters
hmac = hmacSha512 (BL.fromChunks [authSecretDecoded]) (BL8.pack body)
hmacFormatted = B64.encode . foldl1 B.append
. BL.toChunks . bytestringDigest $ hmac
headers = [ "Rest-Key: " ++ B8.unpack authKey
, "Rest-Sign: " ++ B8.unpack hmacFormatted
]
in (headers, body)
getOrderCountR :: Maybe WatchdogLogger -> CurlHandle -> MtGoxCredentials -> IO (Either String OpenOrderCount)
getOrderCountR mLogger curlHandle mtGoxCreds = do
let uri = mtGoxApi ++ "1/generic/private/orders"
v <- robustApiCall mLogger $ callApi curlHandle mtGoxCreds uri []
return $ case v of
Left errMsg -> Left errMsg
Right HttpApiFailure -> Left "HttpApiFailure when doing getOrderCountR"
Right (HttpApiSuccess v') ->
Right (parseReply "getOrderCountR" v') :: Either String OpenOrderCount
submitBtcBuyOrder :: CurlHandle -> MtGoxCredentials -> Integer -> IO (Either String Order)
submitBtcBuyOrder curlHandle mtGoxCreds amount = do
let uri = mtGoxApi ++ "1/BTCEUR/private/order/add"
parameters = [ ("type", "bid")
, ("amount_int", show amount)
, ("prefer_fiat_fee", "1")
]
v <- callApi curlHandle mtGoxCreds uri parameters
return $ case v of
Left errMsg -> Left errMsg
Right HttpApiFailure -> Left "HttpApiFailure when doing submitBtcBuyOrder"
Right (HttpApiSuccess v') ->
Right (parseReply "submitBtcBuyOrder" v') :: Either String Order
submitBtcSellOrder :: CurlHandle -> MtGoxCredentials -> Integer -> IO (Either String Order)
submitBtcSellOrder curlHandle mtGoxCreds amount = do
let uri = mtGoxApi ++ "1/BTCEUR/private/order/add"
parameters = [ ("type", "ask")
, ("amount_int", show amount)
, ("prefer_fiat_fee", "1")
]
v <- callApi curlHandle mtGoxCreds uri parameters
return $ case v of
Left errMsg -> Left errMsg
Right HttpApiFailure -> Left "HttpApiFailure when doing submitBtcSellOrder"
Right (HttpApiSuccess v') ->
Right (parseReply "submitBtcSellOrder" v') :: Either String Order
getOrderResultR :: Maybe WatchdogLogger-> CurlHandle -> MtGoxCredentials-> OrderType-> OrderID-> IO (Either String OrderResult)
getOrderResultR mLogger curlHandle mtGoxCreds orderType orderID = do
let uri = mtGoxApi ++ "1/generic/private/order/result"
parameters = [ ("type", case orderType of
OrderTypeBuyBTC -> "bid"
OrderTypeSellBTC -> "ask")
, ("order", T.unpack (oid orderID))
]
v <- robustApiCall mLogger $ callApi curlHandle mtGoxCreds uri parameters
return $ case v of
Left errMsg -> Left errMsg
Right HttpApiFailure -> Left "HttpApiFailure when doing getOrderResultR"
Right (HttpApiSuccess v') ->
Right (parseReply "getOrderResultR" v') :: Either String OrderResult
getWalletHistoryR :: Maybe WatchdogLogger-> CurlHandle -> MtGoxCredentials-> TradeID-> IO (Either String WalletHistory)
getWalletHistoryR mLogger curlHandle mtGoxCreds tradeID = do
let uri = mtGoxApi ++ "1/generic/private/wallet/history"
parameters = [ ("currency", "EUR")
, ("trade_id", T.unpack (tid tradeID))
]
v <- robustApiCall mLogger $ callApi curlHandle mtGoxCreds uri parameters
return $ case v of
Left errMsg -> Left errMsg
Right HttpApiFailure -> Left "HttpApiFailure when doing getWalletHistoryR"
Right (HttpApiSuccess v') ->
Right (parseReply "getWalletHistoryR" v') :: Either String WalletHistory
getPrivateInfoR :: Maybe WatchdogLogger-> CurlHandle -> MtGoxCredentials -> IO (Either String PrivateInfo)
getPrivateInfoR mLogger curlHandle mtGoxCreds = do
let uri = mtGoxApi ++ "1/generic/private/info"
v <- robustApiCall mLogger $ callApi curlHandle mtGoxCreds uri []
return $ case v of
Left errMsg -> Left errMsg
Right HttpApiFailure -> Left "HttpApiFailure when doing getPrivateInfoR"
Right (HttpApiSuccess v') ->
Right (parseReply "getPrivateInfoR" v') :: Either String PrivateInfo
getBitcoinDepositAddressR :: Maybe WatchdogLogger-> CurlHandle-> MtGoxCredentials-> IO (Either String BitcoinDepositAddress)
getBitcoinDepositAddressR mLogger curlHandle mtGoxCreds = do
let uri = mtGoxApi ++ "1/generic/bitcoin/address"
v <- robustApiCall mLogger $ callApi curlHandle mtGoxCreds uri []
return $ case v of
Left errMsg -> Left errMsg
Right HttpApiFailure -> Left "HttpApiFailure when doing getBitcoinDepositAddressR"
Right (HttpApiSuccess v') ->
Right (parseReply "getBitcoinDepositAddressR" v') :: Either String BitcoinDepositAddress
withdrawBitcoins :: CurlHandle -> MtGoxCredentials-> BitcoinAddress-> Integer-> IO (Either String WithdrawResult)
withdrawBitcoins curlHandle mtGoxCreds (BitcoinAddress addr) amount = do
let uri = mtGoxApi ++ "1/generic/bitcoin/send_simple"
parameters = [ ("address", T.unpack addr)
, ("amount_int", show amount)
]
v <- callApi curlHandle mtGoxCreds uri parameters
return $ case v of
Left errMsg -> Left errMsg
Right HttpApiFailure -> Left "HttpApiFailure when doing withdrawBitcoins"
Right (HttpApiSuccess v') ->
Right (parseReply "withdrawBitcoins" v') :: Either String WithdrawResult
-- | Will not return until all orders have been executed. It will give up after
-- about 3 minutes, if there are persistent errors or still open orders.
letOrdersExecuteR :: Maybe WatchdogLogger-> CurlHandle-> MtGoxCredentials-> IO (Either String ())
letOrdersExecuteR mLogger curlHandle mtGoxCreds =
watchdog $ do
watchdogSettings
setLoggingAction silentLogger {- no logging -}
watchImpatiently task
where
task = do
orderCount <- getOrderCountR mLogger curlHandle mtGoxCreds
return $ case orderCount of
Left errMsg -> Left errMsg
Right (OpenOrderCount count) ->
if count > 0
then Left "still outstanding orders"
else Right ()
processWalletHistories :: [WalletHistory] -> OrderStats
processWalletHistories histories =
let entries = concatMap whEntries histories
amounts = map (weType A.&&& weAmount) entries
usdEarnedL = filter ((USDEarned ==) . fst) amounts
usdSpentL = filter ((USDSpent ==) . fst) amounts
usdFeeL = filter ((USDFee ==) . fst) amounts
in OrderStats { usdEarned = sum (map snd usdEarnedL)
, usdSpent = sum (map snd usdSpentL)
, usdFee = sum (map snd usdFeeL)
}
-- | Submit an order and return 'OrderStats'. In case of some non-critical
-- errors things are re-tried automatically, but if API errors happen or network
-- errors occur during critical phases (like placing the order) a 'Left' with
-- the error is returned. Should not block longer than about 3 minutes.
submitOrder :: Maybe WatchdogLogger-> CurlHandle -> MtGoxCredentials-> OrderType-> Integer-> IO (Either String OrderStats)
submitOrder mLogger curlHandle mtGoxCreds orderType amount = runEitherT $ do
-- step 1: make sure network connection is present
-- and no orders are pending
EitherT $ letOrdersExecuteR mLogger curlHandle mtGoxCreds
-- step 2: submit order
order <- EitherT $ case orderType of
OrderTypeBuyBTC ->
submitBtcBuyOrder curlHandle mtGoxCreds amount
OrderTypeSellBTC ->
submitBtcSellOrder curlHandle mtGoxCreds amount
-- step 3: wait for order to complete
r <- liftIO $ letOrdersExecuteR mLogger curlHandle mtGoxCreds
case r of
Left errMsg -> left $ "Warning: After submitting order the call"
++ " to letOrdersExecuteR failed ("
++ errMsg ++ ")"
Right _ -> return ()
-- step 4: get trade ids
let orderID = oOrderID order
orderResult <- EitherT $ getOrderResultR mLogger curlHandle mtGoxCreds
orderType orderID
let tradeIDs = orTradeIDs orderResult
-- step 5: collect wallet entries for all trade ids
histories <- forM tradeIDs $ \tradeID -> EitherT (getWalletHistory tradeID)
return $ processWalletHistories histories
where
getWalletHistory = getWalletHistoryR mLogger curlHandle mtGoxCreds
|
javgh/mtgoxapi
|
Network/MtGoxAPI/HttpAPI.hs
|
bsd-3-clause
| 12,678 | 0 | 18 | 3,409 | 2,973 | 1,497 | 1,476 | 224 | 4 |
module MixFix.Lexer
( Token(..)
, LexicalCategory (..)
, TokenPos
, tokenize
, tokenizer
, isTkSymbol
, isTkName
, isTkOpen
, isTkClose
, isTkNumber
) where
import Text.ParserCombinators.Parsec hiding (token, tokens)
import Control.Applicative ((<*), (*>), (<$>), (<*>), (<$))
import qualified Text.Parsec.Prim as N
import Text.Parsec.Pos
import Control.Monad.Identity
import Data.Char
-- | Lexical Category
data LexicalCategory = LEFT_PAREN
| RIGHT_PAREN
| OPEN
| CLOSE
| SYMBOL
| NAME
| NUMBER
| EOF
deriving (Show, Eq)
-- | The token data
data Token = Token { lexCategory :: LexicalCategory, lexValue :: String }
deriving (Show,Eq)
-- | Token with position
type TokenPos = (Token, SourcePos)
-- | True if the token is in the symbol lexical category
isTkSymbol :: Token -> Bool
isTkSymbol tk = lexCategory tk == SYMBOL
isTkOpen :: Token -> Bool
isTkOpen tk = lexCategory tk == OPEN
isTkClose :: Token -> Bool
isTkClose tk = lexCategory tk == CLOSE
isTkName :: Token -> Bool
isTkName tk = lexCategory tk == NAME
isTkNumber :: Token -> Bool
isTkNumber tk = lexCategory tk == NUMBER
isCategory :: GeneralCategory -> Char -> Bool
isCategory cat c = generalCategory c == cat
open :: Parser Token
open = Token OPEN . return <$> satisfy (isCategory OpenPunctuation)
close :: Parser Token
close = Token CLOSE . return <$> satisfy (isCategory ClosePunctuation)
symbol :: Parser Token
symbol = Token SYMBOL <$>
many1 (satisfy (\c -> isSymbol c || isCategory OtherPunctuation c
|| isCategory DashPunctuation c))
number :: Parser Token
number = Token NUMBER <$> ((++) <$> many1 digit <*>
option "" ((:) <$> char '.' <*> many1 digit))
name :: Parser Token
name = Token NAME <$> ((:) <$> letter <*> many alphaNum)
reserved :: Char -> LexicalCategory -> Parser Token
reserved s tk = Token tk . return <$> char s
punctuator = [
('(',LEFT_PAREN)
,(')',RIGHT_PAREN)
]
end :: Parser Token
end = Token EOF "" <$ eof
token :: Parser Token
token = choice $ [ reserved c tk | (c,tk) <- punctuator ]
++[name,number,open,close,symbol]
parsePos :: Parser Token -> Parser TokenPos
parsePos p = flip (,) <$> getPosition <*> p
tokens :: Parser [TokenPos]
tokens = spaces *> many (parsePos token <* spaces)
tokenize :: SourceName -> String -> Either ParseError [TokenPos]
tokenize = runParser (do { xs <- tokens ; x <- parsePos end; return (xs++[x]) }) ()
tokenizer :: SourceName -> String -> [Either ParseError TokenPos]
tokenizer src str = loop initialState
where runParser = runPT' src (spaces *> parsePos (token <|> end))
initialState = (State str (initialPos src) ())
loop st = case runIdentity $ runParser st of
Right (tkp@(Token EOF _,_),_) -> [Right tkp]
Right (x,st') -> Right x:loop st'
Left err -> [Left err]
-- runPT' :: (Stream s m t)
-- => SourceName -> ParsecT s u m a -> u -> s -> m (Either ParseError (a,State s u))
runPT' src p st0
= do res <- N.runParsecT p st0
r <- parserReply res
case r of
N.Ok x st _ -> return (Right (x,st))
N.Error err -> return (Left err)
where
parserReply res
= case res of
N.Consumed r -> r
N.Empty r -> r
|
pachopepe/mfxparser
|
src/MixFix/Lexer.hs
|
bsd-3-clause
| 3,608 | 0 | 14 | 1,106 | 1,173 | 627 | 546 | 88 | 3 |
module BowlingGame.KataSpec (spec) where
import Test.Hspec
import BowlingGame.Kata (score, startGame, roll, Game)
spec :: Spec
spec =
describe "Bowling Game" $ do
it "processes gutter game" $
score (rollMany 20 0 startGame) `shouldBe` 0
it "processes all ones" $
score (rollMany 20 1 startGame) `shouldBe` 20
it "processes one spare" $
score (rollSpare $ roll 3 $ rollMany 17 0 startGame) `shouldBe` 16
it "processes one strike" $
score (rollStrike $ roll 4 $ roll 3 $ rollMany 16 0 startGame) `shouldBe` 24
it "processes perfect game" $
score (rollMany 12 10 startGame) `shouldBe` 300
rollMany :: Int -> Int -> Game -> Game
rollMany times pin game
| times == 0 = game
| otherwise = rollMany (times - 1) pin (roll pin game)
rollSpare :: Game -> Game
rollSpare = roll 4 . roll 6
rollStrike :: Game -> Game
rollStrike = roll 10
|
Alex-Diez/haskell-tdd-kata
|
BowlingGameKata/BowlingGameDay07/test/BowlingGame/KataSpec.hs
|
bsd-3-clause
| 945 | 0 | 15 | 265 | 337 | 169 | 168 | 24 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
module Options
( CLIOptions (..)
, getOptions
, PrintMode(..)
, UOM(..)
) where
import Universum
import qualified NeatInterpolation as N
import Options.Applicative (Parser, eitherReader, execParser, flag,
flag', footerDoc, fullDesc, header, help, helper, info,
long, metavar, option, progDesc, short, strOption, switch)
import Text.PrettyPrint.ANSI.Leijen (Doc)
import qualified Pos.Client.CLI as CLI
data PrintMode =
Human
-- ^ Render using `Buildable` instances.
| AsciiTable
-- ^ Render the data as an ASCII Table.
| CSV
-- ^ Render the data in CSV format.
-- Unit of measure in use
data UOM = Bytes
| KB
| MB
| GB
| Adaptive
data CLIOptions = CLIOptions
{ dbPath :: !FilePath
-- ^ Path to the DB to analyse.
, uom :: UOM
, printMode :: PrintMode
, incremental :: !Bool
-- ^ Wether or not render the report
-- incrementally (i.e. one row at time).
, commonArgs :: !CLI.CommonArgs
}
optionsParser :: Parser CLIOptions
optionsParser = CLIOptions <$> parseDbPath
<*> parseUOM
<*> (fromMaybe AsciiTable <$> parsePrintMode)
<*> parseIncremental
<*> CLI.commonArgsParser
parseDbPath :: Parser FilePath
parseDbPath = strOption (long "db" <> metavar "FILEPATH"
<> help "Location of the database where the blockchain is stored."
)
parsePrintMode :: Parser (Maybe PrintMode)
parsePrintMode = optional (option (eitherReader readPrintModeE)
(long "print-mode" <> metavar "[human|csv|table]"
<> help "Select the desidered rendering mode, one between 'human', 'csv' or 'table'."
)
)
where
readPrintModeE :: String -> Either String PrintMode
readPrintModeE "human" = Right Human
readPrintModeE "table" = Right AsciiTable
readPrintModeE "csv" = Right CSV
readPrintModeE _ = Right AsciiTable -- A sensible default, for now.
parseUOM :: Parser UOM
parseUOM = (parseKB <|> parseMB <|> parseGB <|> parseAdaptive)
-- If the parser above fails, default to bytes (even if not passed from the CLI).
<|> parseBytes
parseBytes :: Parser UOM
parseBytes = flag Bytes Bytes (short 'b' <> help "Display block counts in bytes (B).")
parseKB :: Parser UOM
parseKB = flag' KB (short 'k' <> help "Display block counts in kilobytes (KB).")
parseMB :: Parser UOM
parseMB = flag' MB (short 'm' <> help "Display block counts in megabytes (MB).")
parseGB :: Parser UOM
parseGB = flag' GB (short 'g' <> help "Display block counts in gigabytes (GB).")
parseAdaptive :: Parser UOM
parseAdaptive = flag' Adaptive (short 'a' <> help "Display block counts using an adaptive multiplier.")
parseIncremental :: Parser Bool
parseIncremental = switch (short 'i' <> long "incremental" <> help incrementalHelp)
getOptions :: IO CLIOptions
getOptions = execParser programInfo
where
programInfo = info (helper <*> optionsParser) $
fullDesc <> progDesc "Analyze a blockchain and spit out useful metrics."
<> header "Cardano SL blockchain generator"
<> footerDoc usageExample
usageExample :: Maybe Doc
usageExample = (Just . fromString @Doc . toString @Text) [N.text|
Command example:
cardano-blockchain-analyser --db /path/to/existing/db
|]
incrementalHelp :: String
incrementalHelp = toString [N.text| Run in incremental mode. In this mode, table output will
be disabled and rendered as a .csv, as is not possible to generate nice-looking tables
whilst reading the blockchain one block at time. You almost always want to be using this
mode for huge blockchains, as it's much more memory efficient.
|]
|
input-output-hk/pos-haskell-prototype
|
tools/src/blockchain-analyser/Options.hs
|
mit
| 4,065 | 0 | 13 | 1,173 | 750 | 408 | 342 | -1 | -1 |
--
--
--
-----------------
-- Exercise 11.2.
-----------------
--
--
--
module E'11''2 where
import Prelude hiding ( id )
id :: a -> a
id x = x
f :: Int -> Bool -- Example "f":
f 0 = False
f 1 = True
f _ = undefined
-- Behaviour and type instance of "(id . f)" :
------------------------------------------------
--
-- "(id . f)" means first apply "f", then apply "id". The application
-- of "f" results in a ":: Bool". Therefore the instance of the most general
-- type of "id" has to be "Bool -> Bool". "(id . f)" returns a function and its
-- effect is the same as if we would just use "f".
--
-- The most important fact here is: function application binds more tightly
-- than function composition.
-- GHCi> (id . f) 0
-- False
-- Behaviour and type instance of "(f . id)" :
------------------------------------------------
--
-- "(f . id)" means first apply "id", then apply "f". Because "f"
-- needs an argument of type ":: Int", the expression is only correctly typed,
-- if the inner function results in an integer. Therefore the instance
-- of the most general type of "id" has to be "Int -> Int". (f . id) returns
-- a function and its effect is the same as if we would just use "f".
--
-- The most important fact here is: function application binds more tightly
-- than function composition.
-- GHCi> (f . id) 0
-- False
-- Behaviour and type instance of "id f" :
--------------------------------------------
--
-- It is just a function application.
--
-- The identity of a function is the function itself.
-- Not just in mainstream mathematics / year 2014,
-- in Haskell too:
--
-- Proposition: ( id f ) = f
--
--
-- Proof: ( id f )
-- | id
-- = ( f )
-- = f
--
--
-- The function "id" is applied to the function "f". The result is the function "f".
-- "id" is used with the type instance "(Int -> Bool) -> (Int -> Bool)".
-- GHCi> (id f) 0
-- False
-- What type does "f" have if "f id" is properly typed?
-----------------------------------------------------------
--
-- The question for us is, what most general type does "f" need
-- so that "f id" is valid too and doesn't result in a type error.
--
-- (I think the answers will be based on the assumptions a reader
-- makes at this time. I don't know the type inference algorithm yet.
-- At this moment I'm limited to use basic rules, like binding power
-- of function application in contrast to function composition and
-- an investigation of what types the functions have that are in-
-- volved in some expression.)
--
-- "f" is applied to "id". That means "f" is a higher order
-- function and expects a function argument. But this is not all.
-- My answer to this question is: it depends. Because the type of "f"
-- does not only depend on its argument where it is used. its important
-- how "f" uses its argument(s) too. This is why we have to examine the
-- definition of "f" as well and look out for argument bindings if we want
-- to infer the type correctly. We have several possibilities:
--
--
-- Case 1: "f" doesn't use its argument (the id function isn't applied to anything).
--
-- The type of "f" is "a -> b".
--
--
-- Case 2: "f" uses its argument (the id function is applied to something).
--
-- The type of "f" is "(a -> b) -> b".
--
--
-- Case 3: "f" uses its argument and the argument type is restricted.
--
-- The type of "f" is "(a -> a) -> b".
--
--
-- Case 4: "f" uses its argument and the argument and the result types are restricted.
--
-- The type of "f" is "(a -> a) -> a".
--
--
-- Case 1 and 2 are the most/more general ones.
--
--
-- Question: Did I miss cases? My mind says that partial application of "f"
-- would extend my cases infinitely, but luckily partial application wasn't
-- introduced at this time in the book. Anything else?
-- GHCi experiments:
-- GHCi> f id
--
-- <interactive>:20:3:
-- Couldn't match expected type `Int' with actual type `a0 -> a0'
-- Probable cause: `id' is applied to too few arguments
-- In the first argument of `f', namely `id'
-- In the expression: f id
{- GHCi>
let f :: (a -> a) -> IO () ; f _ = print "f"
f id
-}
-- "f"
{- GHCi>
let f a = undefined
let t = f id
:t f
-}
-- f :: t -> t1
{- GHCi>
let f a = a undefined
let t = f id
:t f
-}
-- f :: (t1 -> t) -> t
-- Question: I'm curious: what is "(id . 1)"? Or in general: "(f . (l :: 'Const'))".
-- I typed it into GHCi (let d = id . 1) which told me that it is a valid
-- definition with an inferred type "d :: Num (a -> c) => a -> c". Why?
-- Could this definition make sense in any situations? Is it even applicable?
|
pascal-knodel/haskell-craft
|
_/links/E'11''2.hs
|
mit
| 4,714 | 0 | 5 | 1,115 | 186 | 158 | 28 | 8 | 1 |
type Reader a x = a -> x
|
hmemcpy/milewski-ctfp-pdf
|
src/content/2.5/code/haskell/snippet01.hs
|
gpl-3.0
| 24 | 0 | 5 | 7 | 13 | 8 | 5 | 1 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.RDS.DescribeDBClusterParameters
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the detailed parameter list for a particular DB cluster
-- parameter group.
--
-- For more information on Amazon Aurora, see
-- <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Aurora.html Aurora on Amazon RDS>
-- in the /Amazon RDS User Guide./
--
-- /See:/ <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_DescribeDBClusterParameters.html AWS API Reference> for DescribeDBClusterParameters.
module Network.AWS.RDS.DescribeDBClusterParameters
(
-- * Creating a Request
describeDBClusterParameters
, DescribeDBClusterParameters
-- * Request Lenses
, ddcpFilters
, ddcpMarker
, ddcpMaxRecords
, ddcpSource
, ddcpDBClusterParameterGroupName
-- * Destructuring the Response
, describeDBClusterParametersResponse
, DescribeDBClusterParametersResponse
-- * Response Lenses
, ddcprsMarker
, ddcprsParameters
, ddcprsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.RDS.Types
import Network.AWS.RDS.Types.Product
import Network.AWS.Request
import Network.AWS.Response
-- |
--
-- /See:/ 'describeDBClusterParameters' smart constructor.
data DescribeDBClusterParameters = DescribeDBClusterParameters'
{ _ddcpFilters :: !(Maybe [Filter])
, _ddcpMarker :: !(Maybe Text)
, _ddcpMaxRecords :: !(Maybe Int)
, _ddcpSource :: !(Maybe Text)
, _ddcpDBClusterParameterGroupName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeDBClusterParameters' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddcpFilters'
--
-- * 'ddcpMarker'
--
-- * 'ddcpMaxRecords'
--
-- * 'ddcpSource'
--
-- * 'ddcpDBClusterParameterGroupName'
describeDBClusterParameters
:: Text -- ^ 'ddcpDBClusterParameterGroupName'
-> DescribeDBClusterParameters
describeDBClusterParameters pDBClusterParameterGroupName_ =
DescribeDBClusterParameters'
{ _ddcpFilters = Nothing
, _ddcpMarker = Nothing
, _ddcpMaxRecords = Nothing
, _ddcpSource = Nothing
, _ddcpDBClusterParameterGroupName = pDBClusterParameterGroupName_
}
-- | This parameter is not currently supported.
ddcpFilters :: Lens' DescribeDBClusterParameters [Filter]
ddcpFilters = lens _ddcpFilters (\ s a -> s{_ddcpFilters = a}) . _Default . _Coerce;
-- | An optional pagination token provided by a previous
-- 'DescribeDBClusterParameters' request. If this parameter is specified,
-- the response includes only records beyond the marker, up to the value
-- specified by 'MaxRecords'.
ddcpMarker :: Lens' DescribeDBClusterParameters (Maybe Text)
ddcpMarker = lens _ddcpMarker (\ s a -> s{_ddcpMarker = a});
-- | The maximum number of records to include in the response. If more
-- records exist than the specified 'MaxRecords' value, a pagination token
-- called a marker is included in the response so that the remaining
-- results can be retrieved.
--
-- Default: 100
--
-- Constraints: Minimum 20, maximum 100.
ddcpMaxRecords :: Lens' DescribeDBClusterParameters (Maybe Int)
ddcpMaxRecords = lens _ddcpMaxRecords (\ s a -> s{_ddcpMaxRecords = a});
-- | A value that indicates to return only parameters for a specific source.
-- Parameter sources can be 'engine', 'service', or 'customer'.
ddcpSource :: Lens' DescribeDBClusterParameters (Maybe Text)
ddcpSource = lens _ddcpSource (\ s a -> s{_ddcpSource = a});
-- | The name of a specific DB cluster parameter group to return parameter
-- details for.
--
-- Constraints:
--
-- - Must be 1 to 255 alphanumeric characters
-- - First character must be a letter
-- - Cannot end with a hyphen or contain two consecutive hyphens
ddcpDBClusterParameterGroupName :: Lens' DescribeDBClusterParameters Text
ddcpDBClusterParameterGroupName = lens _ddcpDBClusterParameterGroupName (\ s a -> s{_ddcpDBClusterParameterGroupName = a});
instance AWSRequest DescribeDBClusterParameters where
type Rs DescribeDBClusterParameters =
DescribeDBClusterParametersResponse
request = postQuery rDS
response
= receiveXMLWrapper
"DescribeDBClusterParametersResult"
(\ s h x ->
DescribeDBClusterParametersResponse' <$>
(x .@? "Marker") <*>
(x .@? "Parameters" .!@ mempty >>=
may (parseXMLList "Parameter"))
<*> (pure (fromEnum s)))
instance ToHeaders DescribeDBClusterParameters where
toHeaders = const mempty
instance ToPath DescribeDBClusterParameters where
toPath = const "/"
instance ToQuery DescribeDBClusterParameters where
toQuery DescribeDBClusterParameters'{..}
= mconcat
["Action" =:
("DescribeDBClusterParameters" :: ByteString),
"Version" =: ("2014-10-31" :: ByteString),
"Filters" =:
toQuery (toQueryList "Filter" <$> _ddcpFilters),
"Marker" =: _ddcpMarker,
"MaxRecords" =: _ddcpMaxRecords,
"Source" =: _ddcpSource,
"DBClusterParameterGroupName" =:
_ddcpDBClusterParameterGroupName]
-- | Provides details about a DB cluster parameter group including the
-- parameters in the DB cluster parameter group.
--
-- /See:/ 'describeDBClusterParametersResponse' smart constructor.
data DescribeDBClusterParametersResponse = DescribeDBClusterParametersResponse'
{ _ddcprsMarker :: !(Maybe Text)
, _ddcprsParameters :: !(Maybe [Parameter])
, _ddcprsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeDBClusterParametersResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddcprsMarker'
--
-- * 'ddcprsParameters'
--
-- * 'ddcprsResponseStatus'
describeDBClusterParametersResponse
:: Int -- ^ 'ddcprsResponseStatus'
-> DescribeDBClusterParametersResponse
describeDBClusterParametersResponse pResponseStatus_ =
DescribeDBClusterParametersResponse'
{ _ddcprsMarker = Nothing
, _ddcprsParameters = Nothing
, _ddcprsResponseStatus = pResponseStatus_
}
-- | An optional pagination token provided by a previous
-- DescribeDBClusterParameters request. If this parameter is specified, the
-- response includes only records beyond the marker, up to the value
-- specified by 'MaxRecords' .
ddcprsMarker :: Lens' DescribeDBClusterParametersResponse (Maybe Text)
ddcprsMarker = lens _ddcprsMarker (\ s a -> s{_ddcprsMarker = a});
-- | Provides a list of parameters for the DB cluster parameter group.
ddcprsParameters :: Lens' DescribeDBClusterParametersResponse [Parameter]
ddcprsParameters = lens _ddcprsParameters (\ s a -> s{_ddcprsParameters = a}) . _Default . _Coerce;
-- | The response status code.
ddcprsResponseStatus :: Lens' DescribeDBClusterParametersResponse Int
ddcprsResponseStatus = lens _ddcprsResponseStatus (\ s a -> s{_ddcprsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-rds/gen/Network/AWS/RDS/DescribeDBClusterParameters.hs
|
mpl-2.0
| 7,904 | 0 | 15 | 1,619 | 1,015 | 610 | 405 | 119 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.KMS.UpdateKeyDescription
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the description of a key.
--
-- /See:/ <http://docs.aws.amazon.com/kms/latest/APIReference/API_UpdateKeyDescription.html AWS API Reference> for UpdateKeyDescription.
module Network.AWS.KMS.UpdateKeyDescription
(
-- * Creating a Request
updateKeyDescription
, UpdateKeyDescription
-- * Request Lenses
, ukdKeyId
, ukdDescription
-- * Destructuring the Response
, updateKeyDescriptionResponse
, UpdateKeyDescriptionResponse
) where
import Network.AWS.KMS.Types
import Network.AWS.KMS.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'updateKeyDescription' smart constructor.
data UpdateKeyDescription = UpdateKeyDescription'
{ _ukdKeyId :: !Text
, _ukdDescription :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateKeyDescription' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ukdKeyId'
--
-- * 'ukdDescription'
updateKeyDescription
:: Text -- ^ 'ukdKeyId'
-> Text -- ^ 'ukdDescription'
-> UpdateKeyDescription
updateKeyDescription pKeyId_ pDescription_ =
UpdateKeyDescription'
{ _ukdKeyId = pKeyId_
, _ukdDescription = pDescription_
}
-- | A unique identifier for the customer master key. This value can be a
-- globally unique identifier or the fully specified ARN to a key.
--
-- - Key ARN Example -
-- arn:aws:kms:us-east-1:123456789012:key\/12345678-1234-1234-1234-123456789012
-- - Globally Unique Key ID Example -
-- 12345678-1234-1234-1234-123456789012
ukdKeyId :: Lens' UpdateKeyDescription Text
ukdKeyId = lens _ukdKeyId (\ s a -> s{_ukdKeyId = a});
-- | New description for the key.
ukdDescription :: Lens' UpdateKeyDescription Text
ukdDescription = lens _ukdDescription (\ s a -> s{_ukdDescription = a});
instance AWSRequest UpdateKeyDescription where
type Rs UpdateKeyDescription =
UpdateKeyDescriptionResponse
request = postJSON kMS
response = receiveNull UpdateKeyDescriptionResponse'
instance ToHeaders UpdateKeyDescription where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("TrentService.UpdateKeyDescription" :: ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON UpdateKeyDescription where
toJSON UpdateKeyDescription'{..}
= object
(catMaybes
[Just ("KeyId" .= _ukdKeyId),
Just ("Description" .= _ukdDescription)])
instance ToPath UpdateKeyDescription where
toPath = const "/"
instance ToQuery UpdateKeyDescription where
toQuery = const mempty
-- | /See:/ 'updateKeyDescriptionResponse' smart constructor.
data UpdateKeyDescriptionResponse =
UpdateKeyDescriptionResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateKeyDescriptionResponse' with the minimum fields required to make a request.
--
updateKeyDescriptionResponse
:: UpdateKeyDescriptionResponse
updateKeyDescriptionResponse = UpdateKeyDescriptionResponse'
|
fmapfmapfmap/amazonka
|
amazonka-kms/gen/Network/AWS/KMS/UpdateKeyDescription.hs
|
mpl-2.0
| 3,987 | 0 | 12 | 850 | 484 | 291 | 193 | 70 | 1 |
{-
(c) The AQUA Project, Glasgow University, 1994-1998
\section[TysPrim]{Wired-in knowledge about primitive types}
-}
{-# LANGUAGE CPP #-}
-- | This module defines TyCons that can't be expressed in Haskell.
-- They are all, therefore, wired-in TyCons. C.f module TysWiredIn
module TysPrim(
mkPrimTyConName, -- For implicit parameters in TysWiredIn only
mkTemplateKindVars, mkTemplateTyVars, mkTemplateTyVarsFrom,
mkTemplateKiTyVars,
mkTemplateTyConBinders, mkTemplateKindTyConBinders,
mkTemplateAnonTyConBinders,
alphaTyVars, alphaTyVar, betaTyVar, gammaTyVar, deltaTyVar,
alphaTys, alphaTy, betaTy, gammaTy, deltaTy,
runtimeRep1TyVar, runtimeRep2TyVar, runtimeRep1Ty, runtimeRep2Ty,
openAlphaTy, openBetaTy, openAlphaTyVar, openBetaTyVar,
-- Kind constructors...
tYPETyConName, unliftedTypeKindTyConName,
-- Kinds
tYPE,
funTyCon, funTyConName,
primTyCons,
charPrimTyCon, charPrimTy, charPrimTyConName,
intPrimTyCon, intPrimTy, intPrimTyConName,
wordPrimTyCon, wordPrimTy, wordPrimTyConName,
addrPrimTyCon, addrPrimTy, addrPrimTyConName,
floatPrimTyCon, floatPrimTy, floatPrimTyConName,
doublePrimTyCon, doublePrimTy, doublePrimTyConName,
voidPrimTyCon, voidPrimTy,
statePrimTyCon, mkStatePrimTy,
realWorldTyCon, realWorldTy, realWorldStatePrimTy,
proxyPrimTyCon, mkProxyPrimTy,
arrayPrimTyCon, mkArrayPrimTy,
byteArrayPrimTyCon, byteArrayPrimTy,
arrayArrayPrimTyCon, mkArrayArrayPrimTy,
smallArrayPrimTyCon, mkSmallArrayPrimTy,
mutableArrayPrimTyCon, mkMutableArrayPrimTy,
mutableByteArrayPrimTyCon, mkMutableByteArrayPrimTy,
mutableArrayArrayPrimTyCon, mkMutableArrayArrayPrimTy,
smallMutableArrayPrimTyCon, mkSmallMutableArrayPrimTy,
mutVarPrimTyCon, mkMutVarPrimTy,
mVarPrimTyCon, mkMVarPrimTy,
tVarPrimTyCon, mkTVarPrimTy,
stablePtrPrimTyCon, mkStablePtrPrimTy,
stableNamePrimTyCon, mkStableNamePrimTy,
compactPrimTyCon, compactPrimTy,
bcoPrimTyCon, bcoPrimTy,
weakPrimTyCon, mkWeakPrimTy,
threadIdPrimTyCon, threadIdPrimTy,
int32PrimTyCon, int32PrimTy,
word32PrimTyCon, word32PrimTy,
int64PrimTyCon, int64PrimTy,
word64PrimTyCon, word64PrimTy,
eqPrimTyCon, -- ty1 ~# ty2
eqReprPrimTyCon, -- ty1 ~R# ty2 (at role Representational)
eqPhantPrimTyCon, -- ty1 ~P# ty2 (at role Phantom)
-- * SIMD
#include "primop-vector-tys-exports.hs-incl"
) where
#include "HsVersions.h"
import {-# SOURCE #-} TysWiredIn
( runtimeRepTy, liftedTypeKind
, vecRepDataConTyCon, ptrRepUnliftedDataConTyCon
, voidRepDataConTy, intRepDataConTy
, wordRepDataConTy, int64RepDataConTy, word64RepDataConTy, addrRepDataConTy
, floatRepDataConTy, doubleRepDataConTy
, vec2DataConTy, vec4DataConTy, vec8DataConTy, vec16DataConTy, vec32DataConTy
, vec64DataConTy
, int8ElemRepDataConTy, int16ElemRepDataConTy, int32ElemRepDataConTy
, int64ElemRepDataConTy, word8ElemRepDataConTy, word16ElemRepDataConTy
, word32ElemRepDataConTy, word64ElemRepDataConTy, floatElemRepDataConTy
, doubleElemRepDataConTy )
import Var ( TyVar, mkTyVar )
import Name
import TyCon
import SrcLoc
import Unique
import PrelNames
import FastString
import Outputable
import TyCoRep -- Doesn't need special access, but this is easier to avoid
-- import loops which show up if you import Type instead
import Data.Char
{-
************************************************************************
* *
\subsection{Primitive type constructors}
* *
************************************************************************
-}
primTyCons :: [TyCon]
primTyCons
= [ addrPrimTyCon
, arrayPrimTyCon
, byteArrayPrimTyCon
, arrayArrayPrimTyCon
, smallArrayPrimTyCon
, charPrimTyCon
, doublePrimTyCon
, floatPrimTyCon
, intPrimTyCon
, int32PrimTyCon
, int64PrimTyCon
, bcoPrimTyCon
, weakPrimTyCon
, mutableArrayPrimTyCon
, mutableByteArrayPrimTyCon
, mutableArrayArrayPrimTyCon
, smallMutableArrayPrimTyCon
, mVarPrimTyCon
, tVarPrimTyCon
, mutVarPrimTyCon
, realWorldTyCon
, stablePtrPrimTyCon
, stableNamePrimTyCon
, compactPrimTyCon
, statePrimTyCon
, voidPrimTyCon
, proxyPrimTyCon
, threadIdPrimTyCon
, wordPrimTyCon
, word32PrimTyCon
, word64PrimTyCon
, eqPrimTyCon
, eqReprPrimTyCon
, eqPhantPrimTyCon
, unliftedTypeKindTyCon
, tYPETyCon
#include "primop-vector-tycons.hs-incl"
]
mkPrimTc :: FastString -> Unique -> TyCon -> Name
mkPrimTc fs unique tycon
= mkWiredInName gHC_PRIM (mkTcOccFS fs)
unique
(ATyCon tycon) -- Relevant TyCon
UserSyntax
mkBuiltInPrimTc :: FastString -> Unique -> TyCon -> Name
mkBuiltInPrimTc fs unique tycon
= mkWiredInName gHC_PRIM (mkTcOccFS fs)
unique
(ATyCon tycon) -- Relevant TyCon
BuiltInSyntax
charPrimTyConName, intPrimTyConName, int32PrimTyConName, int64PrimTyConName, wordPrimTyConName, word32PrimTyConName, word64PrimTyConName, addrPrimTyConName, floatPrimTyConName, doublePrimTyConName, statePrimTyConName, proxyPrimTyConName, realWorldTyConName, arrayPrimTyConName, arrayArrayPrimTyConName, smallArrayPrimTyConName, byteArrayPrimTyConName, mutableArrayPrimTyConName, mutableByteArrayPrimTyConName, mutableArrayArrayPrimTyConName, smallMutableArrayPrimTyConName, mutVarPrimTyConName, mVarPrimTyConName, tVarPrimTyConName, stablePtrPrimTyConName, stableNamePrimTyConName, compactPrimTyConName, bcoPrimTyConName, weakPrimTyConName, threadIdPrimTyConName, eqPrimTyConName, eqReprPrimTyConName, eqPhantPrimTyConName, voidPrimTyConName :: Name
charPrimTyConName = mkPrimTc (fsLit "Char#") charPrimTyConKey charPrimTyCon
intPrimTyConName = mkPrimTc (fsLit "Int#") intPrimTyConKey intPrimTyCon
int32PrimTyConName = mkPrimTc (fsLit "Int32#") int32PrimTyConKey int32PrimTyCon
int64PrimTyConName = mkPrimTc (fsLit "Int64#") int64PrimTyConKey int64PrimTyCon
wordPrimTyConName = mkPrimTc (fsLit "Word#") wordPrimTyConKey wordPrimTyCon
word32PrimTyConName = mkPrimTc (fsLit "Word32#") word32PrimTyConKey word32PrimTyCon
word64PrimTyConName = mkPrimTc (fsLit "Word64#") word64PrimTyConKey word64PrimTyCon
addrPrimTyConName = mkPrimTc (fsLit "Addr#") addrPrimTyConKey addrPrimTyCon
floatPrimTyConName = mkPrimTc (fsLit "Float#") floatPrimTyConKey floatPrimTyCon
doublePrimTyConName = mkPrimTc (fsLit "Double#") doublePrimTyConKey doublePrimTyCon
statePrimTyConName = mkPrimTc (fsLit "State#") statePrimTyConKey statePrimTyCon
voidPrimTyConName = mkPrimTc (fsLit "Void#") voidPrimTyConKey voidPrimTyCon
proxyPrimTyConName = mkPrimTc (fsLit "Proxy#") proxyPrimTyConKey proxyPrimTyCon
eqPrimTyConName = mkPrimTc (fsLit "~#") eqPrimTyConKey eqPrimTyCon
eqReprPrimTyConName = mkBuiltInPrimTc (fsLit "~R#") eqReprPrimTyConKey eqReprPrimTyCon
eqPhantPrimTyConName = mkBuiltInPrimTc (fsLit "~P#") eqPhantPrimTyConKey eqPhantPrimTyCon
realWorldTyConName = mkPrimTc (fsLit "RealWorld") realWorldTyConKey realWorldTyCon
arrayPrimTyConName = mkPrimTc (fsLit "Array#") arrayPrimTyConKey arrayPrimTyCon
byteArrayPrimTyConName = mkPrimTc (fsLit "ByteArray#") byteArrayPrimTyConKey byteArrayPrimTyCon
arrayArrayPrimTyConName = mkPrimTc (fsLit "ArrayArray#") arrayArrayPrimTyConKey arrayArrayPrimTyCon
smallArrayPrimTyConName = mkPrimTc (fsLit "SmallArray#") smallArrayPrimTyConKey smallArrayPrimTyCon
mutableArrayPrimTyConName = mkPrimTc (fsLit "MutableArray#") mutableArrayPrimTyConKey mutableArrayPrimTyCon
mutableByteArrayPrimTyConName = mkPrimTc (fsLit "MutableByteArray#") mutableByteArrayPrimTyConKey mutableByteArrayPrimTyCon
mutableArrayArrayPrimTyConName= mkPrimTc (fsLit "MutableArrayArray#") mutableArrayArrayPrimTyConKey mutableArrayArrayPrimTyCon
smallMutableArrayPrimTyConName= mkPrimTc (fsLit "SmallMutableArray#") smallMutableArrayPrimTyConKey smallMutableArrayPrimTyCon
mutVarPrimTyConName = mkPrimTc (fsLit "MutVar#") mutVarPrimTyConKey mutVarPrimTyCon
mVarPrimTyConName = mkPrimTc (fsLit "MVar#") mVarPrimTyConKey mVarPrimTyCon
tVarPrimTyConName = mkPrimTc (fsLit "TVar#") tVarPrimTyConKey tVarPrimTyCon
stablePtrPrimTyConName = mkPrimTc (fsLit "StablePtr#") stablePtrPrimTyConKey stablePtrPrimTyCon
stableNamePrimTyConName = mkPrimTc (fsLit "StableName#") stableNamePrimTyConKey stableNamePrimTyCon
compactPrimTyConName = mkPrimTc (fsLit "Compact#") compactPrimTyConKey compactPrimTyCon
bcoPrimTyConName = mkPrimTc (fsLit "BCO#") bcoPrimTyConKey bcoPrimTyCon
weakPrimTyConName = mkPrimTc (fsLit "Weak#") weakPrimTyConKey weakPrimTyCon
threadIdPrimTyConName = mkPrimTc (fsLit "ThreadId#") threadIdPrimTyConKey threadIdPrimTyCon
{-
************************************************************************
* *
\subsection{Support code}
* *
************************************************************************
alphaTyVars is a list of type variables for use in templates:
["a", "b", ..., "z", "t1", "t2", ... ]
-}
mkTemplateKindVars :: [Kind] -> [TyVar]
-- k0 with unique (mkAlphaTyVarUnique 0)
-- k1 with unique (mkAlphaTyVarUnique 1)
-- ... etc
mkTemplateKindVars kinds
= [ mkTyVar name kind
| (kind, u) <- kinds `zip` [0..]
, let occ = mkTyVarOccFS (mkFastString ('k' : show u))
name = mkInternalName (mkAlphaTyVarUnique u) occ noSrcSpan
]
mkTemplateTyVarsFrom :: Int -> [Kind] -> [TyVar]
-- a with unique (mkAlphaTyVarUnique n)
-- b with unique (mkAlphaTyVarUnique n+1)
-- ... etc
-- Typically called as
-- mkTemplateTyVarsFrom (legth kv_bndrs) kinds
-- where kv_bndrs are the kind-level binders of a TyCon
mkTemplateTyVarsFrom n kinds
= [ mkTyVar name kind
| (kind, index) <- zip kinds [0..],
let ch_ord = index + ord 'a'
name_str | ch_ord <= ord 'z' = [chr ch_ord]
| otherwise = 't':show index
uniq = mkAlphaTyVarUnique (index + n)
name = mkInternalName uniq occ noSrcSpan
occ = mkTyVarOccFS (mkFastString name_str)
]
mkTemplateTyVars :: [Kind] -> [TyVar]
mkTemplateTyVars = mkTemplateTyVarsFrom 1
mkTemplateTyConBinders
:: [Kind] -- [k1, .., kn] Kinds of kind-forall'd vars
-> ([Kind] -> [Kind]) -- Arg is [kv1:k1, ..., kvn:kn]
-- same length as first arg
-- Result is anon arg kinds
-> [TyConBinder]
mkTemplateTyConBinders kind_var_kinds mk_anon_arg_kinds
= kv_bndrs ++ tv_bndrs
where
kv_bndrs = mkTemplateKindTyConBinders kind_var_kinds
anon_kinds = mk_anon_arg_kinds (mkTyVarTys (binderVars kv_bndrs))
tv_bndrs = mkTemplateAnonTyConBindersFrom (length kv_bndrs) anon_kinds
mkTemplateKiTyVars
:: [Kind] -- [k1, .., kn] Kinds of kind-forall'd vars
-> ([Kind] -> [Kind]) -- Arg is [kv1:k1, ..., kvn:kn]
-- same length as first arg
-- Result is anon arg kinds [ak1, .., akm]
-> [TyVar] -- [kv1:k1, ..., kvn:kn, av1:ak1, ..., avm:akm]
-- Example: if you want the tyvars for
-- forall (r:RuntimeRep) (a:TYPE r) (b:*). blah
-- call mkTemplateKiTyVars [RuntimeRep] (\[r]. [TYPE r, *)
mkTemplateKiTyVars kind_var_kinds mk_arg_kinds
= kv_bndrs ++ tv_bndrs
where
kv_bndrs = mkTemplateKindVars kind_var_kinds
anon_kinds = mk_arg_kinds (mkTyVarTys kv_bndrs)
tv_bndrs = mkTemplateTyVarsFrom (length kv_bndrs) anon_kinds
mkTemplateKindTyConBinders :: [Kind] -> [TyConBinder]
-- Makes named, Specified binders
mkTemplateKindTyConBinders kinds = [mkNamedTyConBinder Specified tv | tv <- mkTemplateKindVars kinds]
mkTemplateAnonTyConBinders :: [Kind] -> [TyConBinder]
mkTemplateAnonTyConBinders kinds = map mkAnonTyConBinder (mkTemplateTyVars kinds)
mkTemplateAnonTyConBindersFrom :: Int -> [Kind] -> [TyConBinder]
mkTemplateAnonTyConBindersFrom n kinds = map mkAnonTyConBinder (mkTemplateTyVarsFrom n kinds)
alphaTyVars :: [TyVar]
alphaTyVars = mkTemplateTyVars $ repeat liftedTypeKind
alphaTyVar, betaTyVar, gammaTyVar, deltaTyVar :: TyVar
(alphaTyVar:betaTyVar:gammaTyVar:deltaTyVar:_) = alphaTyVars
alphaTys :: [Type]
alphaTys = mkTyVarTys alphaTyVars
alphaTy, betaTy, gammaTy, deltaTy :: Type
(alphaTy:betaTy:gammaTy:deltaTy:_) = alphaTys
runtimeRep1TyVar, runtimeRep2TyVar :: TyVar
(runtimeRep1TyVar : runtimeRep2TyVar : _)
= drop 16 (mkTemplateTyVars (repeat runtimeRepTy)) -- selects 'q','r'
runtimeRep1Ty, runtimeRep2Ty :: Type
runtimeRep1Ty = mkTyVarTy runtimeRep1TyVar
runtimeRep2Ty = mkTyVarTy runtimeRep2TyVar
openAlphaTyVar, openBetaTyVar :: TyVar
[openAlphaTyVar,openBetaTyVar]
= mkTemplateTyVars [tYPE runtimeRep1Ty, tYPE runtimeRep2Ty]
openAlphaTy, openBetaTy :: Type
openAlphaTy = mkTyVarTy openAlphaTyVar
openBetaTy = mkTyVarTy openBetaTyVar
{-
************************************************************************
* *
FunTyCon
* *
************************************************************************
-}
funTyConName :: Name
funTyConName = mkPrimTyConName (fsLit "(->)") funTyConKey funTyCon
funTyCon :: TyCon
funTyCon = mkFunTyCon funTyConName tc_bndrs tc_rep_nm
where
tc_bndrs = mkTemplateAnonTyConBinders [liftedTypeKind, liftedTypeKind]
-- You might think that (->) should have type (?? -> ? -> *), and you'd be right
-- But if we do that we get kind errors when saying
-- instance Control.Arrow (->)
-- because the expected kind is (*->*->*). The trouble is that the
-- expected/actual stuff in the unifier does not go contra-variant, whereas
-- the kind sub-typing does. Sigh. It really only matters if you use (->) in
-- a prefix way, thus: (->) Int# Int#. And this is unusual.
-- because they are never in scope in the source
tc_rep_nm = mkPrelTyConRepName funTyConName
{-
************************************************************************
* *
Kinds
* *
************************************************************************
Note [TYPE and RuntimeRep]
~~~~~~~~~~~~~~~~~~~~~~~~~~
All types that classify values have a kind of the form (TYPE rr), where
data RuntimeRep -- Defined in ghc-prim:GHC.Types
= PtrRepLifted
| PtrRepUnlifted
| IntRep
| FloatRep
.. etc ..
rr :: RuntimeRep
TYPE :: RuntimeRep -> TYPE 'PtrRepLifted -- Built in
So for example:
Int :: TYPE 'PtrRepLifted
Array# Int :: TYPE 'PtrRepUnlifted
Int# :: TYPE 'IntRep
Float# :: TYPE 'FloatRep
Maybe :: TYPE 'PtrRepLifted -> TYPE 'PtrRepLifted
We abbreviate '*' specially:
type * = TYPE 'PtrRepLifted
The 'rr' parameter tells us how the value is represented at runime.
Generally speaking, you can't be polymorphic in 'rr'. E.g
f :: forall (rr:RuntimeRep) (a:TYPE rr). a -> [a]
f = /\(rr:RuntimeRep) (a:rr) \(a:rr). ...
This is no good: we could not generate code code for 'f', because the
calling convention for 'f' varies depending on whether the argument is
a a Int, Int#, or Float#. (You could imagine generating specialised
code, one for each instantiation of 'rr', but we don't do that.)
Certain functions CAN be runtime-rep-polymorphic, because the code
generator never has to manipulate a value of type 'a :: TYPE rr'.
* error :: forall (rr:RuntimeRep) (a:TYPE rr). String -> a
Code generator never has to manipulate the return value.
* unsafeCoerce#, defined in MkId.unsafeCoerceId:
Always inlined to be a no-op
unsafeCoerce# :: forall (r1 :: RuntimeRep) (r2 :: RuntimeRep)
(a :: TYPE r1) (b :: TYPE r2).
a -> b
* Unboxed tuples, and unboxed sums, defined in TysWiredIn
Always inlined, and hence specialised to the call site
(#,#) :: forall (r1 :: RuntimeRep) (r2 :: RuntimeRep)
(a :: TYPE r1) (b :: TYPE r2).
a -> b -> TYPE 'UnboxedTupleRep
See Note [Unboxed tuple kinds]
Note [Unboxed tuple kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~
What kind does (# Int, Float# #) have?
The "right" answer would be
TYPE ('UnboxedTupleRep [PtrRepLifted, FloatRep])
Currently we do not do this. We just have
(# Int, Float# #) :: TYPE 'UnboxedTupleRep
which does not tell us exactly how is is represented.
Note [PrimRep and kindPrimRep]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As part of its source code, in TyCon, GHC has
data PrimRep = PtrRep | IntRep | FloatRep | ...etc...
Notice that
* RuntimeRep is part of the syntax tree of the program being compiled
(defined in a library: ghc-prim:GHC.Types)
* PrimRep is part of GHC's source code.
(defined in TyCon)
We need to get from one to the other; that is what kindPrimRep does.
Suppose we have a value
(v :: t) where (t :: k)
Given this kind
k = TyConApp "TYPE" [rep]
GHC needs to be able to figure out how 'v' is represented at runtime.
It expects 'rep' to be form
TyConApp rr_dc args
where 'rr_dc' is a promoteed data constructor from RuntimeRep. So
now we need to go from 'dc' to the correponding PrimRep. We store this
PrimRep in the promoted data constructor itself: see TyCon.promDcRepInfo.
-}
tYPETyCon, unliftedTypeKindTyCon :: TyCon
tYPETyConName, unliftedTypeKindTyConName :: Name
tYPETyCon = mkKindTyCon tYPETyConName
(mkTemplateAnonTyConBinders [runtimeRepTy])
liftedTypeKind
[Nominal]
(mkPrelTyConRepName tYPETyConName)
-- See Note [TYPE and RuntimeRep]
-- NB: unlifted is wired in because there is no way to parse it in
-- Haskell. That's the only reason for wiring it in.
unliftedTypeKindTyCon = mkSynonymTyCon unliftedTypeKindTyConName
[] liftedTypeKind []
(tYPE (TyConApp ptrRepUnliftedDataConTyCon []))
True -- no foralls
True -- family free
--------------------------
-- ... and now their names
-- If you edit these, you may need to update the GHC formalism
-- See Note [GHC Formalism] in coreSyn/CoreLint.hs
tYPETyConName = mkPrimTyConName (fsLit "TYPE") tYPETyConKey tYPETyCon
unliftedTypeKindTyConName = mkPrimTyConName (fsLit "#") unliftedTypeKindTyConKey unliftedTypeKindTyCon
mkPrimTyConName :: FastString -> Unique -> TyCon -> Name
mkPrimTyConName = mkPrimTcName BuiltInSyntax
-- All of the super kinds and kinds are defined in Prim,
-- and use BuiltInSyntax, because they are never in scope in the source
mkPrimTcName :: BuiltInSyntax -> FastString -> Unique -> TyCon -> Name
mkPrimTcName built_in_syntax occ key tycon
= mkWiredInName gHC_PRIM (mkTcOccFS occ) key (ATyCon tycon) built_in_syntax
-----------------------------
-- | Given a RuntimeRep, applies TYPE to it.
-- see Note [TYPE and RuntimeRep]
tYPE :: Type -> Type
tYPE rr = TyConApp tYPETyCon [rr]
{-
************************************************************************
* *
\subsection[TysPrim-basic]{Basic primitive types (@Char#@, @Int#@, etc.)}
* *
************************************************************************
-}
-- only used herein
pcPrimTyCon :: Name -> [Role] -> PrimRep -> TyCon
pcPrimTyCon name roles rep
= mkPrimTyCon name binders result_kind roles
where
binders = mkTemplateAnonTyConBinders (map (const liftedTypeKind) roles)
result_kind = tYPE rr
rr = case rep of
VoidRep -> voidRepDataConTy
PtrRep -> TyConApp ptrRepUnliftedDataConTyCon []
IntRep -> intRepDataConTy
WordRep -> wordRepDataConTy
Int64Rep -> int64RepDataConTy
Word64Rep -> word64RepDataConTy
AddrRep -> addrRepDataConTy
FloatRep -> floatRepDataConTy
DoubleRep -> doubleRepDataConTy
VecRep n elem -> TyConApp vecRepDataConTyCon [n', elem']
where
n' = case n of
2 -> vec2DataConTy
4 -> vec4DataConTy
8 -> vec8DataConTy
16 -> vec16DataConTy
32 -> vec32DataConTy
64 -> vec64DataConTy
_ -> pprPanic "Disallowed VecCount" (ppr n)
elem' = case elem of
Int8ElemRep -> int8ElemRepDataConTy
Int16ElemRep -> int16ElemRepDataConTy
Int32ElemRep -> int32ElemRepDataConTy
Int64ElemRep -> int64ElemRepDataConTy
Word8ElemRep -> word8ElemRepDataConTy
Word16ElemRep -> word16ElemRepDataConTy
Word32ElemRep -> word32ElemRepDataConTy
Word64ElemRep -> word64ElemRepDataConTy
FloatElemRep -> floatElemRepDataConTy
DoubleElemRep -> doubleElemRepDataConTy
pcPrimTyCon0 :: Name -> PrimRep -> TyCon
pcPrimTyCon0 name rep
= pcPrimTyCon name [] rep
charPrimTy :: Type
charPrimTy = mkTyConTy charPrimTyCon
charPrimTyCon :: TyCon
charPrimTyCon = pcPrimTyCon0 charPrimTyConName WordRep
intPrimTy :: Type
intPrimTy = mkTyConTy intPrimTyCon
intPrimTyCon :: TyCon
intPrimTyCon = pcPrimTyCon0 intPrimTyConName IntRep
int32PrimTy :: Type
int32PrimTy = mkTyConTy int32PrimTyCon
int32PrimTyCon :: TyCon
int32PrimTyCon = pcPrimTyCon0 int32PrimTyConName IntRep
int64PrimTy :: Type
int64PrimTy = mkTyConTy int64PrimTyCon
int64PrimTyCon :: TyCon
int64PrimTyCon = pcPrimTyCon0 int64PrimTyConName Int64Rep
wordPrimTy :: Type
wordPrimTy = mkTyConTy wordPrimTyCon
wordPrimTyCon :: TyCon
wordPrimTyCon = pcPrimTyCon0 wordPrimTyConName WordRep
word32PrimTy :: Type
word32PrimTy = mkTyConTy word32PrimTyCon
word32PrimTyCon :: TyCon
word32PrimTyCon = pcPrimTyCon0 word32PrimTyConName WordRep
word64PrimTy :: Type
word64PrimTy = mkTyConTy word64PrimTyCon
word64PrimTyCon :: TyCon
word64PrimTyCon = pcPrimTyCon0 word64PrimTyConName Word64Rep
addrPrimTy :: Type
addrPrimTy = mkTyConTy addrPrimTyCon
addrPrimTyCon :: TyCon
addrPrimTyCon = pcPrimTyCon0 addrPrimTyConName AddrRep
floatPrimTy :: Type
floatPrimTy = mkTyConTy floatPrimTyCon
floatPrimTyCon :: TyCon
floatPrimTyCon = pcPrimTyCon0 floatPrimTyConName FloatRep
doublePrimTy :: Type
doublePrimTy = mkTyConTy doublePrimTyCon
doublePrimTyCon :: TyCon
doublePrimTyCon = pcPrimTyCon0 doublePrimTyConName DoubleRep
{-
************************************************************************
* *
\subsection[TysPrim-state]{The @State#@ type (and @_RealWorld@ types)}
* *
************************************************************************
Note [The equality types story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC sports a veritable menagerie of equality types:
Hetero? Levity Result Role Defining module
------------------------------------------------------------
~# hetero unlifted # nominal GHC.Prim
~~ hetero lifted Constraint nominal GHC.Types
~ homo lifted Constraint nominal Data.Type.Equality
:~: homo lifted * nominal Data.Type.Equality
~R# hetero unlifted # repr GHC.Prim
Coercible homo lifted Constraint repr GHC.Types
Coercion homo lifted * repr Data.Type.Coercion
~P# hetero unlifted phantom GHC.Prim
Recall that "hetero" means the equality can related types of different
kinds. Knowing that (t1 ~# t2) or (t1 ~R# t2) or even that (t1 ~P# t2)
also means that (k1 ~# k2), where (t1 :: k1) and (t2 :: k2).
To produce less confusion for end users, when not dumping and without
-fprint-equality-relations, each of these groups is printed as the bottommost
listed equality. That is, (~#) and (~~) are both rendered as (~) in
error messages, and (~R#) is rendered as Coercible.
Let's take these one at a time:
--------------------------
(~#) :: forall k1 k2. k1 -> k2 -> #
--------------------------
This is The Type Of Equality in GHC. It classifies nominal coercions.
This type is used in the solver for recording equality constraints.
It responds "yes" to Type.isEqPred and classifies as an EqPred in
Type.classifyPredType.
All wanted constraints of this type are built with coercion holes.
(See Note [Coercion holes] in TyCoRep.) But see also
Note [Deferred errors for coercion holes] in TcErrors to see how
equality constraints are deferred.
Within GHC, ~# is called eqPrimTyCon, and it is defined in TysPrim.
--------------------------
(~~) :: forall k1 k2. k1 -> k2 -> Constraint
--------------------------
This is (almost) an ordinary class, defined as if by
class a ~# b => a ~~ b
instance a ~# b => a ~~ b
Here's what's unusual about it:
* We can't actually declare it that way because we don't have syntax for ~#.
And ~# isn't a constraint, so even if we could write it, it wouldn't kind
check.
* Users cannot write instances of it.
* It is "naturally coherent". This means that the solver won't hesitate to
solve a goal of type (a ~~ b) even if there is, say (Int ~~ c) in the
context. (Normally, it waits to learn more, just in case the given
influences what happens next.) This is quite like having
IncoherentInstances enabled.
* It always terminates. That is, in the UndecidableInstances checks, we
don't worry if a (~~) constraint is too big, as we know that solving
equality terminates.
On the other hand, this behaves just like any class w.r.t. eager superclass
unpacking in the solver. So a lifted equality given quickly becomes an unlifted
equality given. This is good, because the solver knows all about unlifted
equalities. There is some special-casing in TcInteract.matchClassInst to
pretend that there is an instance of this class, as we can't write the instance
in Haskell.
Within GHC, ~~ is called heqTyCon, and it is defined in TysWiredIn.
--------------------------
(~) :: forall k. k -> k -> Constraint
--------------------------
This is defined in Data.Type.Equality:
class a ~~ b => (a :: k) ~ (b :: k)
instance a ~~ b => a ~ b
This is even more so an ordinary class than (~~), with the following exceptions:
* Users cannot write instances of it.
* It is "naturally coherent". (See (~~).)
* (~) is magical syntax, as ~ is a reserved symbol. It cannot be exported
or imported.
* It always terminates.
Within GHC, ~ is called eqTyCon, and it is defined in PrelNames. Note that
it is *not* wired in.
--------------------------
(:~:) :: forall k. k -> k -> *
--------------------------
This is a perfectly ordinary GADT, wrapping (~). It is not defined within
GHC at all.
--------------------------
(~R#) :: forall k1 k2. k1 -> k2 -> #
--------------------------
The is the representational analogue of ~#. This is the type of representational
equalities that the solver works on. All wanted constraints of this type are
built with coercion holes.
Within GHC, ~R# is called eqReprPrimTyCon, and it is defined in TysPrim.
--------------------------
Coercible :: forall k. k -> k -> Constraint
--------------------------
This is quite like (~~) in the way it's defined and treated within GHC, but
it's homogeneous. Homogeneity helps with type inference (as GHC can solve one
kind from the other) and, in my (Richard's) estimation, will be more intuitive
for users.
An alternative design included HCoercible (like (~~)) and Coercible (like (~)).
One annoyance was that we want `coerce :: Coercible a b => a -> b`, and
we need the type of coerce to be fully wired-in. So the HCoercible/Coercible
split required that both types be fully wired-in. Instead of doing this,
I just got rid of HCoercible, as I'm not sure who would use it, anyway.
Within GHC, Coercible is called coercibleTyCon, and it is defined in
TysWiredIn.
--------------------------
Coercion :: forall k. k -> k -> *
--------------------------
This is a perfectly ordinary GADT, wrapping Coercible. It is not defined
within GHC at all.
--------------------------
(~P#) :: forall k1 k2. k1 -> k2 -> #
--------------------------
This is the phantom analogue of ~# and it is barely used at all.
(The solver has no idea about this one.) Here is the motivation:
data Phant a = MkPhant
type role Phant phantom
Phant <Int, Bool>_P :: Phant Int ~P# Phant Bool
We just need to have something to put on that last line. You probably
don't need to worry about it.
Note [The State# TyCon]
~~~~~~~~~~~~~~~~~~~~~~~
State# is the primitive, unlifted type of states. It has one type parameter,
thus
State# RealWorld
or
State# s
where s is a type variable. The only purpose of the type parameter is to
keep different state threads separate. It is represented by nothing at all.
The type parameter to State# is intended to keep separate threads separate.
Even though this parameter is not used in the definition of State#, it is
given role Nominal to enforce its intended use.
-}
mkStatePrimTy :: Type -> Type
mkStatePrimTy ty = TyConApp statePrimTyCon [ty]
statePrimTyCon :: TyCon -- See Note [The State# TyCon]
statePrimTyCon = pcPrimTyCon statePrimTyConName [Nominal] VoidRep
{-
RealWorld is deeply magical. It is *primitive*, but it is not
*unlifted* (hence ptrArg). We never manipulate values of type
RealWorld; it's only used in the type system, to parameterise State#.
-}
realWorldTyCon :: TyCon
realWorldTyCon = mkLiftedPrimTyCon realWorldTyConName [] liftedTypeKind []
realWorldTy :: Type
realWorldTy = mkTyConTy realWorldTyCon
realWorldStatePrimTy :: Type
realWorldStatePrimTy = mkStatePrimTy realWorldTy -- State# RealWorld
-- Note: the ``state-pairing'' types are not truly primitive,
-- so they are defined in \tr{TysWiredIn.hs}, not here.
voidPrimTy :: Type
voidPrimTy = TyConApp voidPrimTyCon []
voidPrimTyCon :: TyCon
voidPrimTyCon = pcPrimTyCon voidPrimTyConName [] VoidRep
mkProxyPrimTy :: Type -> Type -> Type
mkProxyPrimTy k ty = TyConApp proxyPrimTyCon [k, ty]
proxyPrimTyCon :: TyCon
proxyPrimTyCon = mkPrimTyCon proxyPrimTyConName binders res_kind [Nominal,Nominal]
where
-- Kind: forall k. k -> Void#
binders = mkTemplateTyConBinders [liftedTypeKind] (\ks-> ks)
res_kind = tYPE voidRepDataConTy
{- *********************************************************************
* *
Primitive equality constraints
See Note [The equality types story]
* *
********************************************************************* -}
eqPrimTyCon :: TyCon -- The representation type for equality predicates
-- See Note [The equality types story]
eqPrimTyCon = mkPrimTyCon eqPrimTyConName binders res_kind roles
where
-- Kind :: forall k1 k2. k1 -> k2 -> Void#
binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks)
res_kind = tYPE voidRepDataConTy
roles = [Nominal, Nominal, Nominal, Nominal]
-- like eqPrimTyCon, but the type for *Representational* coercions
-- this should only ever appear as the type of a covar. Its role is
-- interpreted in coercionRole
eqReprPrimTyCon :: TyCon -- See Note [The equality types story]
eqReprPrimTyCon = mkPrimTyCon eqReprPrimTyConName binders res_kind roles
where
-- Kind :: forall k1 k2. k1 -> k2 -> Void#
binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks)
res_kind = tYPE voidRepDataConTy
roles = [Nominal, Nominal, Representational, Representational]
-- like eqPrimTyCon, but the type for *Phantom* coercions.
-- This is only used to make higher-order equalities. Nothing
-- should ever actually have this type!
eqPhantPrimTyCon :: TyCon
eqPhantPrimTyCon = mkPrimTyCon eqPhantPrimTyConName binders res_kind roles
where
-- Kind :: forall k1 k2. k1 -> k2 -> Void#
binders = mkTemplateTyConBinders [liftedTypeKind, liftedTypeKind] (\ks -> ks)
res_kind = tYPE voidRepDataConTy
roles = [Nominal, Nominal, Phantom, Phantom]
{- *********************************************************************
* *
The primitive array types
* *
********************************************************************* -}
arrayPrimTyCon, mutableArrayPrimTyCon, mutableByteArrayPrimTyCon,
byteArrayPrimTyCon, arrayArrayPrimTyCon, mutableArrayArrayPrimTyCon,
smallArrayPrimTyCon, smallMutableArrayPrimTyCon :: TyCon
arrayPrimTyCon = pcPrimTyCon arrayPrimTyConName [Representational] PtrRep
mutableArrayPrimTyCon = pcPrimTyCon mutableArrayPrimTyConName [Nominal, Representational] PtrRep
mutableByteArrayPrimTyCon = pcPrimTyCon mutableByteArrayPrimTyConName [Nominal] PtrRep
byteArrayPrimTyCon = pcPrimTyCon0 byteArrayPrimTyConName PtrRep
arrayArrayPrimTyCon = pcPrimTyCon0 arrayArrayPrimTyConName PtrRep
mutableArrayArrayPrimTyCon = pcPrimTyCon mutableArrayArrayPrimTyConName [Nominal] PtrRep
smallArrayPrimTyCon = pcPrimTyCon smallArrayPrimTyConName [Representational] PtrRep
smallMutableArrayPrimTyCon = pcPrimTyCon smallMutableArrayPrimTyConName [Nominal, Representational] PtrRep
mkArrayPrimTy :: Type -> Type
mkArrayPrimTy elt = TyConApp arrayPrimTyCon [elt]
byteArrayPrimTy :: Type
byteArrayPrimTy = mkTyConTy byteArrayPrimTyCon
mkArrayArrayPrimTy :: Type
mkArrayArrayPrimTy = mkTyConTy arrayArrayPrimTyCon
mkSmallArrayPrimTy :: Type -> Type
mkSmallArrayPrimTy elt = TyConApp smallArrayPrimTyCon [elt]
mkMutableArrayPrimTy :: Type -> Type -> Type
mkMutableArrayPrimTy s elt = TyConApp mutableArrayPrimTyCon [s, elt]
mkMutableByteArrayPrimTy :: Type -> Type
mkMutableByteArrayPrimTy s = TyConApp mutableByteArrayPrimTyCon [s]
mkMutableArrayArrayPrimTy :: Type -> Type
mkMutableArrayArrayPrimTy s = TyConApp mutableArrayArrayPrimTyCon [s]
mkSmallMutableArrayPrimTy :: Type -> Type -> Type
mkSmallMutableArrayPrimTy s elt = TyConApp smallMutableArrayPrimTyCon [s, elt]
{- *********************************************************************
* *
The mutable variable type
* *
********************************************************************* -}
mutVarPrimTyCon :: TyCon
mutVarPrimTyCon = pcPrimTyCon mutVarPrimTyConName [Nominal, Representational] PtrRep
mkMutVarPrimTy :: Type -> Type -> Type
mkMutVarPrimTy s elt = TyConApp mutVarPrimTyCon [s, elt]
{-
************************************************************************
* *
\subsection[TysPrim-synch-var]{The synchronizing variable type}
* *
************************************************************************
-}
mVarPrimTyCon :: TyCon
mVarPrimTyCon = pcPrimTyCon mVarPrimTyConName [Nominal, Representational] PtrRep
mkMVarPrimTy :: Type -> Type -> Type
mkMVarPrimTy s elt = TyConApp mVarPrimTyCon [s, elt]
{-
************************************************************************
* *
\subsection[TysPrim-stm-var]{The transactional variable type}
* *
************************************************************************
-}
tVarPrimTyCon :: TyCon
tVarPrimTyCon = pcPrimTyCon tVarPrimTyConName [Nominal, Representational] PtrRep
mkTVarPrimTy :: Type -> Type -> Type
mkTVarPrimTy s elt = TyConApp tVarPrimTyCon [s, elt]
{-
************************************************************************
* *
\subsection[TysPrim-stable-ptrs]{The stable-pointer type}
* *
************************************************************************
-}
stablePtrPrimTyCon :: TyCon
stablePtrPrimTyCon = pcPrimTyCon stablePtrPrimTyConName [Representational] AddrRep
mkStablePtrPrimTy :: Type -> Type
mkStablePtrPrimTy ty = TyConApp stablePtrPrimTyCon [ty]
{-
************************************************************************
* *
\subsection[TysPrim-stable-names]{The stable-name type}
* *
************************************************************************
-}
stableNamePrimTyCon :: TyCon
stableNamePrimTyCon = pcPrimTyCon stableNamePrimTyConName [Representational] PtrRep
mkStableNamePrimTy :: Type -> Type
mkStableNamePrimTy ty = TyConApp stableNamePrimTyCon [ty]
{-
************************************************************************
* *
\subsection[TysPrim-compact-nfdata]{The Compact NFData (CNF) type}
* *
************************************************************************
-}
compactPrimTyCon :: TyCon
compactPrimTyCon = pcPrimTyCon0 compactPrimTyConName PtrRep
compactPrimTy :: Type
compactPrimTy = mkTyConTy compactPrimTyCon
{-
************************************************************************
* *
\subsection[TysPrim-BCOs]{The ``bytecode object'' type}
* *
************************************************************************
-}
bcoPrimTy :: Type
bcoPrimTy = mkTyConTy bcoPrimTyCon
bcoPrimTyCon :: TyCon
bcoPrimTyCon = pcPrimTyCon0 bcoPrimTyConName PtrRep
{-
************************************************************************
* *
\subsection[TysPrim-Weak]{The ``weak pointer'' type}
* *
************************************************************************
-}
weakPrimTyCon :: TyCon
weakPrimTyCon = pcPrimTyCon weakPrimTyConName [Representational] PtrRep
mkWeakPrimTy :: Type -> Type
mkWeakPrimTy v = TyConApp weakPrimTyCon [v]
{-
************************************************************************
* *
\subsection[TysPrim-thread-ids]{The ``thread id'' type}
* *
************************************************************************
A thread id is represented by a pointer to the TSO itself, to ensure
that they are always unique and we can always find the TSO for a given
thread id. However, this has the unfortunate consequence that a
ThreadId# for a given thread is treated as a root by the garbage
collector and can keep TSOs around for too long.
Hence the programmer API for thread manipulation uses a weak pointer
to the thread id internally.
-}
threadIdPrimTy :: Type
threadIdPrimTy = mkTyConTy threadIdPrimTyCon
threadIdPrimTyCon :: TyCon
threadIdPrimTyCon = pcPrimTyCon0 threadIdPrimTyConName PtrRep
{-
************************************************************************
* *
\subsection{SIMD vector types}
* *
************************************************************************
-}
#include "primop-vector-tys.hs-incl"
|
olsner/ghc
|
compiler/prelude/TysPrim.hs
|
bsd-3-clause
| 41,501 | 0 | 17 | 10,364 | 4,319 | 2,441 | 1,878 | 421 | 25 |
-- TODO: check why this is needed:
{-# OPTIONS_GHC -fsimpl-tick-factor=150 #-}
module Distribution.Server.Features.LegacyRedirects (
legacyRedirectsFeature
) where
import Distribution.Server.Framework
import Distribution.Server.Features.Upload
import Distribution.Package
( PackageIdentifier(..), packageName, PackageId )
import Distribution.Text
( display, simpleParse )
import Data.Version ( Version (..) )
import qualified System.FilePath.Posix as Posix (joinPath, splitExtension)
-- | A feature to provide redirection for URLs that existed in the first
-- incarnation of the hackage server.
--
legacyRedirectsFeature :: UploadFeature -> HackageFeature
legacyRedirectsFeature upload = (emptyHackageFeature "legacy") {
-- get rid of trailing resource and manually create a mapping?
featureResources =
[ (resourceAt "/..") {
resourceGet = [("", \_ -> serveLegacyGets)]
, resourcePost = [("", \_ -> serveLegacyPosts upload)]
}
]
, featureState = []
}
-- | Support for the old URL scheme from the first version of hackage.
--
-- | POST for package upload, particularly for cabal-install compatibility.
--
-- "check" no longer exists; it's now "candidates", and probably
-- provides too different functionality to redirect
serveLegacyPosts :: UploadFeature -> ServerPartE Response
serveLegacyPosts upload = msum
[ dir "packages" $ msum
[ dir "upload" $ movedUpload
--, postedMove "check" "/check"
]
, dir "cgi-bin" $ dir "hackage-scripts" $ msum
[ dir "protected" $ dir "upload-pkg" $ movedUpload
--, postedMove "check" "/check"
]
, dir "upload" movedUpload
]
where
-- We assume we don't need to serve a fancy HTML response
movedUpload :: ServerPartE Response
movedUpload = nullDir >> do
upResult <- uploadPackage upload
ok $ toResponse $ unlines $ uploadWarnings upResult
-- | GETs, both for cabal-install to use, and for links scattered throughout the web.
serveLegacyGets :: ServerPartE Response
serveLegacyGets = msum
[ simpleMove "00-index.tar.gz" "/packages/index.tar.gz"
, simpleMove "00-index.tar" "/packages/index.tar"
, dir "packages" $ msum
[ dir "archive" $ serveArchiveTree
, simpleMove "hackage.html" "/"
, simpleMove "00-index.tar.gz" "/packages/index.tar.gz"
--also search.html, advancedsearch.html, accounts.html, and admin.html
]
, dir "cgi-bin" $ dir "hackage-scripts" $ msum
[ dir "package" $ path $ \packageId -> method GET >> nullDir >>
(movedPermanently ("/package/" ++ display (packageId :: PackageId)) $
toResponse "")
]
, dir "package" $ path $ \fileName -> method GET >> nullDir >>
case Posix.splitExtension fileName of
(fileName', ".gz") -> case Posix.splitExtension fileName' of
(packageStr, ".tar") -> case simpleParse packageStr of
Just pkgid ->
movedPermanently (packageTarball pkgid) $ toResponse ""
_ -> mzero
_ -> mzero
_ -> mzero
, simpleMove "recent" "/packages/recent"
, simpleMove "recent.html" "/packages/recent.html"
, simpleMove "recent.rss" "/packages/recent.rss"
]
-- Some of the old-style paths may contain a version number
-- or the text 'latest'. We represent the path '$pkgName/latest'
-- as a package id of '$pkgName' in the new url schemes.
data VersionOrLatest
= V Version
| Latest
instance FromReqURI VersionOrLatest where
fromReqURI "latest" = Just Latest
fromReqURI str = V <$> fromReqURI str
volToVersion :: VersionOrLatest -> Version
volToVersion Latest = Version [] []
volToVersion (V v) = v
serveArchiveTree :: ServerPartE Response
serveArchiveTree = msum
[ simpleMove "pkg-list.html" "/packages/"
, dir "package" $ path $ \fileName -> method GET >> nullDir >>
case Posix.splitExtension fileName of
(fileName', ".gz") -> case Posix.splitExtension fileName' of
(packageStr, ".tar") -> case simpleParse packageStr of
Just pkgid ->
movedPermanently (packageTarball pkgid) $ toResponse ""
_ -> mzero
_ -> mzero
_ -> mzero
, simpleMove "00-index.tar.gz" "/packages/index.tar.gz"
, simpleMove "recent.html" "/packages/recent"
, simpleMove "recent.rss" "/packages/recent.rss"
, simpleMove "00-hoogle.tar.gz" "/packages/hoogle.tar.gz"
, path $ \name -> do
msum
[ path $ \version ->
let pkgid = PackageIdentifier {pkgName = name, pkgVersion = volToVersion version}
in msum
[ simpleMove (display pkgid ++ ".tar.gz") (packageTarball pkgid)
, simpleMove (display name ++ ".cabal") (cabalPath pkgid)
, dir "doc" $ dir "html" $ remainingPath $ \paths ->
let doc = Posix.joinPath paths
in simpleMoveTo (docPath pkgid doc)
]
]
]
where
docPath pkgid file = "/package/" ++ display pkgid ++ "/" ++ "docs/" ++ file
cabalPath pkgid = "/package/" ++ display pkgid ++ "/"
++ display (packageName pkgid) ++ ".cabal"
packageTarball :: PackageId -> String
packageTarball pkgid = "/package/" ++ display pkgid
++ "/" ++ display pkgid ++ ".tar.gz"
-- HTTP 301 is suitable for permanently redirecting pages
simpleMove :: String -> String -> ServerPartE Response
simpleMove from to = dir from $ simpleMoveTo to
simpleMoveTo :: String -> ServerPartE Response
simpleMoveTo to = method GET >> nullDir >> movedPermanently to (toResponse "")
|
agrafix/hackage-server
|
Distribution/Server/Features/LegacyRedirects.hs
|
bsd-3-clause
| 5,541 | 0 | 26 | 1,279 | 1,248 | 647 | 601 | 98 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Main where
import Control.Monad (void)
import Data.Maybe (fromMaybe)
#if !(MIN_VERSION_base(4,11,0))
import Data.Monoid
#endif
import qualified Graphics.Vty as V
import Lens.Micro ((^.))
import qualified Brick.AttrMap as A
import qualified Brick.Main as M
import Brick.Types (Widget)
import qualified Brick.Types as T
import Brick.Util (fg, on)
import qualified Brick.Widgets.Border as B
import qualified Brick.Widgets.Center as C
import Brick.Widgets.Core (hLimit, str, vBox, vLimit, withAttr, (<+>))
import qualified Brick.Widgets.List as L
import qualified Data.Vector as Vec
drawUI :: (Show a) => L.List () a -> [Widget ()]
drawUI l = [ui]
where
label = str "Item " <+> cur <+> str " of " <+> total
cur = case l^.(L.listSelectedL) of
Nothing -> str "-"
Just i -> str (show (i + 1))
total = str $ show $ Vec.length $ l^.(L.listElementsL)
box = B.borderWithLabel label $
hLimit 25 $
vLimit 15 $
L.renderList listDrawElement True l
ui = C.vCenter $ vBox [ C.hCenter box
, str " "
, C.hCenter $ str "Press +/- to add/remove list elements."
, C.hCenter $ str "Press Esc to exit."
]
appEvent :: L.List () Char -> T.BrickEvent () e -> T.EventM () (T.Next (L.List () Char))
appEvent l (T.VtyEvent e) =
case e of
V.EvKey (V.KChar '+') [] ->
let el = nextElement (L.listElements l)
pos = Vec.length $ l^.(L.listElementsL)
in M.continue $ L.listInsert pos el l
V.EvKey (V.KChar '-') [] ->
case l^.(L.listSelectedL) of
Nothing -> M.continue l
Just i -> M.continue $ L.listRemove i l
V.EvKey V.KEsc [] -> M.halt l
ev -> M.continue =<< (L.handleListEventVi L.handleListEvent) ev l
where
nextElement :: Vec.Vector Char -> Char
nextElement v = fromMaybe '?' $ Vec.find (flip Vec.notElem v) (Vec.fromList ['a' .. 'z'])
appEvent l _ = M.continue l
listDrawElement :: (Show a) => Bool -> a -> Widget ()
listDrawElement sel a =
let selStr s = if sel
then withAttr customAttr (str $ "<" <> s <> ">")
else str s
in C.hCenter $ str "Item " <+> (selStr $ show a)
initialState :: L.List () Char
initialState = L.list () (Vec.fromList ['a','b','c']) 1
customAttr :: A.AttrName
customAttr = L.listSelectedAttr <> "custom"
theMap :: A.AttrMap
theMap = A.attrMap V.defAttr
[ (L.listAttr, V.white `on` V.blue)
, (L.listSelectedAttr, V.blue `on` V.white)
, (customAttr, fg V.cyan)
]
theApp :: M.App (L.List () Char) e ()
theApp =
M.App { M.appDraw = drawUI
, M.appChooseCursor = M.showFirstCursor
, M.appHandleEvent = appEvent
, M.appStartEvent = return
, M.appAttrMap = const theMap
}
main :: IO ()
main = void $ M.defaultMain theApp initialState
|
sjakobi/brick
|
programs/ListViDemo.hs
|
bsd-3-clause
| 3,114 | 0 | 15 | 951 | 1,115 | 598 | 517 | 73 | 5 |
{-
Copyright (C) 2006-2010 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.Docbook
Copyright : Copyright (C) 2006-2010 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to Docbook XML.
-}
module Text.Pandoc.Writers.Docbook ( writeDocbook) where
import Text.Pandoc.Definition
import Text.Pandoc.XML
import Text.Pandoc.Shared
import Text.Pandoc.Templates (renderTemplate)
import Text.Pandoc.Readers.TeXMath
import Data.List ( isPrefixOf, intercalate, isSuffixOf )
import Data.Char ( toLower )
import Text.Pandoc.Highlighting ( languages, languagesByExtension )
import Text.Pandoc.Pretty
import Text.TeXMath
import qualified Text.XML.Light as Xml
import Data.Generics (everywhere, mkT)
-- | Convert list of authors to a docbook <author> section
authorToDocbook :: WriterOptions -> [Inline] -> Doc
authorToDocbook opts name' =
let name = render Nothing $ inlinesToDocbook opts name'
in if ',' `elem` name
then -- last name first
let (lastname, rest) = break (==',') name
firstname = removeLeadingSpace rest in
inTagsSimple "firstname" (text $ escapeStringForXML firstname) <>
inTagsSimple "surname" (text $ escapeStringForXML lastname)
else -- last name last
let namewords = words name
lengthname = length namewords
(firstname, lastname) = case lengthname of
0 -> ("","")
1 -> ("", name)
n -> (intercalate " " (take (n-1) namewords), last namewords)
in inTagsSimple "firstname" (text $ escapeStringForXML firstname) $$
inTagsSimple "surname" (text $ escapeStringForXML lastname)
-- | Convert Pandoc document to string in Docbook format.
writeDocbook :: WriterOptions -> Pandoc -> String
writeDocbook opts (Pandoc (Meta tit auths dat) blocks) =
let title = inlinesToDocbook opts tit
authors = map (authorToDocbook opts) auths
date = inlinesToDocbook opts dat
elements = hierarchicalize blocks
colwidth = if writerWrapText opts
then Just $ writerColumns opts
else Nothing
render' = render colwidth
opts' = if "/book>" `isSuffixOf`
(removeTrailingSpace $ writerTemplate opts)
then opts{ writerChapters = True }
else opts
startLvl = if writerChapters opts' then 0 else 1
main = render' $ vcat (map (elementToDocbook opts' startLvl) elements)
context = writerVariables opts ++
[ ("body", main)
, ("title", render' title)
, ("date", render' date) ] ++
[ ("author", render' a) | a <- authors ] ++
[ ("mathml", "yes") | case writerHTMLMathMethod opts of
MathML _ -> True
_ -> False ]
in if writerStandalone opts
then renderTemplate context $ writerTemplate opts
else main
-- | Convert an Element to Docbook.
elementToDocbook :: WriterOptions -> Int -> Element -> Doc
elementToDocbook opts _ (Blk block) = blockToDocbook opts block
elementToDocbook opts lvl (Sec _ _num id' title elements) =
-- Docbook doesn't allow sections with no content, so insert some if needed
let elements' = if null elements
then [Blk (Para [])]
else elements
tag = case lvl of
n | n == 0 -> "chapter"
| n >= 1 && n <= 5 -> "sect" ++ show n
| otherwise -> "simplesect"
in inTags True tag [("id",id')] $
inTagsSimple "title" (inlinesToDocbook opts title) $$
vcat (map (elementToDocbook opts (lvl + 1)) elements')
-- | Convert a list of Pandoc blocks to Docbook.
blocksToDocbook :: WriterOptions -> [Block] -> Doc
blocksToDocbook opts = vcat . map (blockToDocbook opts)
-- | Auxiliary function to convert Plain block to Para.
plainToPara :: Block -> Block
plainToPara (Plain x) = Para x
plainToPara x = x
-- | Convert a list of pairs of terms and definitions into a list of
-- Docbook varlistentrys.
deflistItemsToDocbook :: WriterOptions -> [([Inline],[[Block]])] -> Doc
deflistItemsToDocbook opts items =
vcat $ map (\(term, defs) -> deflistItemToDocbook opts term defs) items
-- | Convert a term and a list of blocks into a Docbook varlistentry.
deflistItemToDocbook :: WriterOptions -> [Inline] -> [[Block]] -> Doc
deflistItemToDocbook opts term defs =
let def' = concatMap (map plainToPara) defs
in inTagsIndented "varlistentry" $
inTagsIndented "term" (inlinesToDocbook opts term) $$
inTagsIndented "listitem" (blocksToDocbook opts def')
-- | Convert a list of lists of blocks to a list of Docbook list items.
listItemsToDocbook :: WriterOptions -> [[Block]] -> Doc
listItemsToDocbook opts items = vcat $ map (listItemToDocbook opts) items
-- | Convert a list of blocks into a Docbook list item.
listItemToDocbook :: WriterOptions -> [Block] -> Doc
listItemToDocbook opts item =
inTagsIndented "listitem" $ blocksToDocbook opts $ map plainToPara item
-- | Convert a Pandoc block element to Docbook.
blockToDocbook :: WriterOptions -> Block -> Doc
blockToDocbook _ Null = empty
blockToDocbook _ (Header _ _) = empty -- should not occur after hierarchicalize
blockToDocbook opts (Plain lst) = inlinesToDocbook opts lst
blockToDocbook opts (Para [Image txt (src,_)]) =
let capt = inlinesToDocbook opts txt
in inTagsIndented "figure" $
inTagsSimple "title" capt $$
(inTagsIndented "mediaobject" $
(inTagsIndented "imageobject"
(selfClosingTag "imagedata" [("fileref",src)])) $$
inTagsSimple "textobject" (inTagsSimple "phrase" capt))
blockToDocbook opts (Para lst) =
inTagsIndented "para" $ inlinesToDocbook opts lst
blockToDocbook opts (BlockQuote blocks) =
inTagsIndented "blockquote" $ blocksToDocbook opts blocks
blockToDocbook _ (CodeBlock (_,classes,_) str) =
text ("<programlisting" ++ lang ++ ">") <> cr <>
flush (text (escapeStringForXML str) <> cr <> text "</programlisting>")
where lang = if null langs
then ""
else " language=\"" ++ escapeStringForXML (head langs) ++
"\""
isLang l = map toLower l `elem` map (map toLower) languages
langsFrom s = if isLang s
then [s]
else languagesByExtension . map toLower $ s
langs = concatMap langsFrom classes
blockToDocbook opts (BulletList lst) =
inTagsIndented "itemizedlist" $ listItemsToDocbook opts lst
blockToDocbook _ (OrderedList _ []) = empty
blockToDocbook opts (OrderedList (start, numstyle, _) (first:rest)) =
let attribs = case numstyle of
DefaultStyle -> []
Decimal -> [("numeration", "arabic")]
Example -> [("numeration", "arabic")]
UpperAlpha -> [("numeration", "upperalpha")]
LowerAlpha -> [("numeration", "loweralpha")]
UpperRoman -> [("numeration", "upperroman")]
LowerRoman -> [("numeration", "lowerroman")]
items = if start == 1
then listItemsToDocbook opts (first:rest)
else (inTags True "listitem" [("override",show start)]
(blocksToDocbook opts $ map plainToPara first)) $$
listItemsToDocbook opts rest
in inTags True "orderedlist" attribs items
blockToDocbook opts (DefinitionList lst) =
inTagsIndented "variablelist" $ deflistItemsToDocbook opts lst
blockToDocbook _ (RawBlock "docbook" str) = text str -- raw XML block
-- we allow html for compatibility with earlier versions of pandoc
blockToDocbook _ (RawBlock "html" str) = text str -- raw XML block
blockToDocbook _ (RawBlock _ _) = empty
blockToDocbook _ HorizontalRule = empty -- not semantic
blockToDocbook opts (Table caption aligns widths headers rows) =
let captionDoc = if null caption
then empty
else inTagsIndented "title"
(inlinesToDocbook opts caption)
tableType = if isEmpty captionDoc then "informaltable" else "table"
percent w = show (truncate (100*w) :: Integer) ++ "*"
coltags = vcat $ zipWith (\w al -> selfClosingTag "colspec"
([("colwidth", percent w) | w > 0] ++
[("align", alignmentToString al)])) widths aligns
head' = if all null headers
then empty
else inTagsIndented "thead" $
tableRowToDocbook opts headers
body' = inTagsIndented "tbody" $
vcat $ map (tableRowToDocbook opts) rows
in inTagsIndented tableType $ captionDoc $$
(inTags True "tgroup" [("cols", show (length headers))] $
coltags $$ head' $$ body')
alignmentToString :: Alignment -> [Char]
alignmentToString alignment = case alignment of
AlignLeft -> "left"
AlignRight -> "right"
AlignCenter -> "center"
AlignDefault -> "left"
tableRowToDocbook :: WriterOptions
-> [[Block]]
-> Doc
tableRowToDocbook opts cols =
inTagsIndented "row" $ vcat $ map (tableItemToDocbook opts) cols
tableItemToDocbook :: WriterOptions
-> [Block]
-> Doc
tableItemToDocbook opts item =
inTags True "entry" [] $ vcat $ map (blockToDocbook opts) item
-- | Convert a list of inline elements to Docbook.
inlinesToDocbook :: WriterOptions -> [Inline] -> Doc
inlinesToDocbook opts lst = hcat $ map (inlineToDocbook opts) lst
-- | Convert an inline element to Docbook.
inlineToDocbook :: WriterOptions -> Inline -> Doc
inlineToDocbook _ (Str str) = text $ escapeStringForXML str
inlineToDocbook opts (Emph lst) =
inTagsSimple "emphasis" $ inlinesToDocbook opts lst
inlineToDocbook opts (Strong lst) =
inTags False "emphasis" [("role", "strong")] $ inlinesToDocbook opts lst
inlineToDocbook opts (Strikeout lst) =
inTags False "emphasis" [("role", "strikethrough")] $
inlinesToDocbook opts lst
inlineToDocbook opts (Superscript lst) =
inTagsSimple "superscript" $ inlinesToDocbook opts lst
inlineToDocbook opts (Subscript lst) =
inTagsSimple "subscript" $ inlinesToDocbook opts lst
inlineToDocbook opts (SmallCaps lst) =
inTags False "emphasis" [("role", "smallcaps")] $
inlinesToDocbook opts lst
inlineToDocbook opts (Quoted _ lst) =
inTagsSimple "quote" $ inlinesToDocbook opts lst
inlineToDocbook opts (Cite _ lst) =
inlinesToDocbook opts lst
inlineToDocbook _ (Code _ str) =
inTagsSimple "literal" $ text (escapeStringForXML str)
inlineToDocbook opts (Math t str)
| isMathML (writerHTMLMathMethod opts) =
case texMathToMathML dt str of
Right r -> inTagsSimple tagtype
$ text $ Xml.ppcElement conf
$ fixNS
$ removeAttr r
Left _ -> inlinesToDocbook opts
$ readTeXMath str
| otherwise = inlinesToDocbook opts $ readTeXMath str
where (dt, tagtype) = case t of
InlineMath -> (DisplayInline,"inlineequation")
DisplayMath -> (DisplayBlock,"informalequation")
conf = Xml.useShortEmptyTags (const False) Xml.defaultConfigPP
removeAttr e = e{ Xml.elAttribs = [] }
fixNS' qname = qname{ Xml.qPrefix = Just "mml" }
fixNS = everywhere (mkT fixNS')
inlineToDocbook _ (RawInline f x) | f == "html" || f == "docbook" = text x
| otherwise = empty
inlineToDocbook _ LineBreak = inTagsSimple "literallayout" empty
inlineToDocbook _ Space = space
inlineToDocbook opts (Link txt (src, _)) =
if isPrefixOf "mailto:" src
then let src' = drop 7 src
emailLink = inTagsSimple "email" $ text $
escapeStringForXML $ src'
in case txt of
[Code _ s] | s == src' -> emailLink
_ -> inlinesToDocbook opts txt <+>
char '(' <> emailLink <> char ')'
else (if isPrefixOf "#" src
then inTags False "link" [("linkend", drop 1 src)]
else inTags False "ulink" [("url", src)]) $
inlinesToDocbook opts txt
inlineToDocbook _ (Image _ (src, tit)) =
let titleDoc = if null tit
then empty
else inTagsIndented "objectinfo" $
inTagsIndented "title" (text $ escapeStringForXML tit)
in inTagsIndented "inlinemediaobject" $ inTagsIndented "imageobject" $
titleDoc $$ selfClosingTag "imagedata" [("fileref", src)]
inlineToDocbook opts (Note contents) =
inTagsIndented "footnote" $ blocksToDocbook opts contents
isMathML :: HTMLMathMethod -> Bool
isMathML (MathML _) = True
isMathML _ = False
|
beni55/pandoc
|
src/Text/Pandoc/Writers/Docbook.hs
|
gpl-2.0
| 14,067 | 0 | 20 | 4,062 | 3,554 | 1,827 | 1,727 | 245 | 13 |
module Control.Search.Combinator.Failure (failure) where
import Control.Search.Language
import Control.Search.GeneratorInfo
import Control.Search.Generator
import Control.Monatron.Monatron hiding (Abort, L, state, cont)
import Control.Monatron.IdT
failLoop uid _super =
commentEval $ Eval { structs = ([],[])
, treeState_ = []
, evalState_ = []
, pushLeftH = \_ -> return Skip
, pushRightH = \_ -> return Skip
, nextSameH = \_ -> return Skip
, nextDiffH = \_ -> return Skip
, bodyH = \i -> cachedAbort i
, addH = \_ -> return Skip
, failH = \i -> cachedAbort i
, returnH = \i -> cachedAbort i
-- , continue = \_ -> return true
, tryH = \i -> cachedAbort i
, startTryH = \i -> cachedAbort i
, tryLH = \_ -> return Skip
, intArraysE = []
, intVarsE = []
, boolArraysE = []
, deleteH = \i -> cachedAbort i
, initH = \_ -> return $ {- DebugOutput $ "fail" ++ show uid >>> -} Skip
, toString = "fail" ++ show uid
, canBranch = return False
, complete = const $ return false
}
failure :: Search
failure =
Search { mkeval = \super -> get >>= \uid -> return (failLoop uid super)
, runsearch = runIdT
}
|
neothemachine/monadiccp
|
src/Control/Search/Combinator/Failure.hs
|
bsd-3-clause
| 1,631 | 0 | 12 | 740 | 399 | 236 | 163 | 33 | 1 |
-- |
-- Module: Statistics
-- Description: Basic bounded statistics
-- Copyright: (c) 2011 National Institute of Aerospace / Galois, Inc.
--
-- Basic bounded statistics. In the following, a bound @n@ is given stating
-- the number of periods over which to compute the statistic (@n == 1@ computes
-- it only over the current period).
{-# LANGUAGE NoImplicitPrelude #-}
module Copilot.Library.Statistics
( max, min, sum, mean, meanNow ) where
import Copilot.Language
import Copilot.Library.Utils
-- | Summation.
sum :: ( Typed a, Num a, Eq a ) => Int -> Stream a -> Stream a
sum n s = nfoldl1 n (+) s
-- | Maximum value.
max :: ( Typed a, Ord a ) => Int -> Stream a -> Stream a
max n s = nfoldl1 n largest s
where largest = \ x y -> mux ( x >= y ) x y
-- | Minimum value.
min :: ( Typed a, Ord a ) => Int -> Stream a -> Stream a
min n s = nfoldl1 n smallest s
where smallest = \ x y -> mux ( x <= y ) x y
-- | Mean value. @n@ must not overflow
-- for word size @a@ for streams over which computation is peformed.
mean :: ( Typed a, Eq a, Fractional a ) => Int -> Stream a -> Stream a
mean n s = ( sum n s ) / ( fromIntegral n )
-- | Mean value over the current set of streams passed in.
meanNow :: ( Typed a, Integral a ) => [ Stream a ] -> Stream a
meanNow [] =
badUsage "list of arguments to meanNow must be nonempty"
meanNow ls = ( foldl1 (+) ls ) `div` ( fromIntegral $ length ls )
|
fredyr/copilot-libraries
|
src/Copilot/Library/Statistics.hs
|
bsd-3-clause
| 1,412 | 0 | 10 | 324 | 415 | 224 | 191 | 19 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
module VarEnv (
-- * Var, Id and TyVar environments (maps)
VarEnv, IdEnv, TyVarEnv, CoVarEnv,
-- ** Manipulating these environments
emptyVarEnv, unitVarEnv, mkVarEnv,
elemVarEnv, varEnvElts, varEnvKeys,
extendVarEnv, extendVarEnv_C, extendVarEnv_Acc, extendVarEnvList,
plusVarEnv, plusVarEnv_C, plusVarEnv_CD, alterVarEnv,
delVarEnvList, delVarEnv,
minusVarEnv, intersectsVarEnv,
lookupVarEnv, lookupVarEnv_NF, lookupWithDefaultVarEnv,
mapVarEnv, zipVarEnv,
modifyVarEnv, modifyVarEnv_Directly,
isEmptyVarEnv, foldVarEnv,
elemVarEnvByKey, lookupVarEnv_Directly,
filterVarEnv, filterVarEnv_Directly, restrictVarEnv,
partitionVarEnv,
-- * Deterministic Var environments (maps)
DVarEnv,
-- ** Manipulating these environments
emptyDVarEnv,
extendDVarEnv,
lookupDVarEnv,
foldDVarEnv,
-- * The InScopeSet type
InScopeSet,
-- ** Operations on InScopeSets
emptyInScopeSet, mkInScopeSet, delInScopeSet,
extendInScopeSet, extendInScopeSetList, extendInScopeSetSet,
getInScopeVars, lookupInScope, lookupInScope_Directly,
unionInScope, elemInScopeSet, uniqAway,
-- * The RnEnv2 type
RnEnv2,
-- ** Operations on RnEnv2s
mkRnEnv2, rnBndr2, rnBndrs2,
rnOccL, rnOccR, inRnEnvL, inRnEnvR, rnOccL_maybe, rnOccR_maybe,
rnBndrL, rnBndrR, nukeRnEnvL, nukeRnEnvR,
delBndrL, delBndrR, delBndrsL, delBndrsR,
addRnInScopeSet,
rnEtaL, rnEtaR,
rnInScope, rnInScopeSet, lookupRnInScope,
-- * TidyEnv and its operation
TidyEnv,
emptyTidyEnv
) where
import OccName
import Var
import VarSet
import UniqFM
import UniqDFM
import Unique
import Util
import Maybes
import Outputable
import StaticFlags
import FastString
{-
************************************************************************
* *
In-scope sets
* *
************************************************************************
-}
-- | A set of variables that are in scope at some point
data InScopeSet = InScope (VarEnv Var) {-# UNPACK #-} !Int
-- The (VarEnv Var) is just a VarSet. But we write it like
-- this to remind ourselves that you can look up a Var in
-- the InScopeSet. Typically the InScopeSet contains the
-- canonical version of the variable (e.g. with an informative
-- unfolding), so this lookup is useful.
--
-- INVARIANT: the VarEnv maps (the Unique of) a variable to
-- a variable with the same Uniqua. (This was not
-- the case in the past, when we had a grevious hack
-- mapping var1 to var2.
--
-- The Int is a kind of hash-value used by uniqAway
-- For example, it might be the size of the set
-- INVARIANT: it's not zero; we use it as a multiplier in uniqAway
instance Outputable InScopeSet where
ppr (InScope s _) = ptext (sLit "InScope") <+> ppr s
emptyInScopeSet :: InScopeSet
emptyInScopeSet = InScope emptyVarSet 1
getInScopeVars :: InScopeSet -> VarEnv Var
getInScopeVars (InScope vs _) = vs
mkInScopeSet :: VarEnv Var -> InScopeSet
mkInScopeSet in_scope = InScope in_scope 1
extendInScopeSet :: InScopeSet -> Var -> InScopeSet
extendInScopeSet (InScope in_scope n) v = InScope (extendVarEnv in_scope v v) (n + 1)
extendInScopeSetList :: InScopeSet -> [Var] -> InScopeSet
extendInScopeSetList (InScope in_scope n) vs
= InScope (foldl (\s v -> extendVarEnv s v v) in_scope vs)
(n + length vs)
extendInScopeSetSet :: InScopeSet -> VarEnv Var -> InScopeSet
extendInScopeSetSet (InScope in_scope n) vs
= InScope (in_scope `plusVarEnv` vs) (n + sizeUFM vs)
delInScopeSet :: InScopeSet -> Var -> InScopeSet
delInScopeSet (InScope in_scope n) v = InScope (in_scope `delVarEnv` v) n
elemInScopeSet :: Var -> InScopeSet -> Bool
elemInScopeSet v (InScope in_scope _) = v `elemVarEnv` in_scope
-- | Look up a variable the 'InScopeSet'. This lets you map from
-- the variable's identity (unique) to its full value.
lookupInScope :: InScopeSet -> Var -> Maybe Var
lookupInScope (InScope in_scope _) v = lookupVarEnv in_scope v
lookupInScope_Directly :: InScopeSet -> Unique -> Maybe Var
lookupInScope_Directly (InScope in_scope _) uniq
= lookupVarEnv_Directly in_scope uniq
unionInScope :: InScopeSet -> InScopeSet -> InScopeSet
unionInScope (InScope s1 _) (InScope s2 n2)
= InScope (s1 `plusVarEnv` s2) n2
-- | @uniqAway in_scope v@ finds a unique that is not used in the
-- in-scope set, and gives that to v.
uniqAway :: InScopeSet -> Var -> Var
-- It starts with v's current unique, of course, in the hope that it won't
-- have to change, and thereafter uses a combination of that and the hash-code
-- found in the in-scope set
uniqAway in_scope var
| var `elemInScopeSet` in_scope = uniqAway' in_scope var -- Make a new one
| otherwise = var -- Nothing to do
uniqAway' :: InScopeSet -> Var -> Var
-- This one *always* makes up a new variable
uniqAway' (InScope set n) var
= try 1
where
orig_unique = getUnique var
try k
| debugIsOn && (k > 1000)
= pprPanic "uniqAway loop:" (ppr k <+> text "tries" <+> ppr var <+> int n)
| uniq `elemVarSetByKey` set = try (k + 1)
| debugIsOn && opt_PprStyle_Debug && (k > 3)
= pprTrace "uniqAway:" (ppr k <+> text "tries" <+> ppr var <+> int n)
setVarUnique var uniq
| otherwise = setVarUnique var uniq
where
uniq = deriveUnique orig_unique (n * k)
{-
************************************************************************
* *
Dual renaming
* *
************************************************************************
-}
-- | When we are comparing (or matching) types or terms, we are faced with
-- \"going under\" corresponding binders. E.g. when comparing:
--
-- > \x. e1 ~ \y. e2
--
-- Basically we want to rename [@x@ -> @y@] or [@y@ -> @x@], but there are lots of
-- things we must be careful of. In particular, @x@ might be free in @e2@, or
-- y in @e1@. So the idea is that we come up with a fresh binder that is free
-- in neither, and rename @x@ and @y@ respectively. That means we must maintain:
--
-- 1. A renaming for the left-hand expression
--
-- 2. A renaming for the right-hand expressions
--
-- 3. An in-scope set
--
-- Furthermore, when matching, we want to be able to have an 'occurs check',
-- to prevent:
--
-- > \x. f ~ \y. y
--
-- matching with [@f@ -> @y@]. So for each expression we want to know that set of
-- locally-bound variables. That is precisely the domain of the mappings 1.
-- and 2., but we must ensure that we always extend the mappings as we go in.
--
-- All of this information is bundled up in the 'RnEnv2'
data RnEnv2
= RV2 { envL :: VarEnv Var -- Renaming for Left term
, envR :: VarEnv Var -- Renaming for Right term
, in_scope :: InScopeSet } -- In scope in left or right terms
-- The renamings envL and envR are *guaranteed* to contain a binding
-- for every variable bound as we go into the term, even if it is not
-- renamed. That way we can ask what variables are locally bound
-- (inRnEnvL, inRnEnvR)
mkRnEnv2 :: InScopeSet -> RnEnv2
mkRnEnv2 vars = RV2 { envL = emptyVarEnv
, envR = emptyVarEnv
, in_scope = vars }
addRnInScopeSet :: RnEnv2 -> VarEnv Var -> RnEnv2
addRnInScopeSet env vs
| isEmptyVarEnv vs = env
| otherwise = env { in_scope = extendInScopeSetSet (in_scope env) vs }
rnInScope :: Var -> RnEnv2 -> Bool
rnInScope x env = x `elemInScopeSet` in_scope env
rnInScopeSet :: RnEnv2 -> InScopeSet
rnInScopeSet = in_scope
rnBndrs2 :: RnEnv2 -> [Var] -> [Var] -> RnEnv2
-- ^ Applies 'rnBndr2' to several variables: the two variable lists must be of equal length
rnBndrs2 env bsL bsR = foldl2 rnBndr2 env bsL bsR
rnBndr2 :: RnEnv2 -> Var -> Var -> RnEnv2
-- ^ @rnBndr2 env bL bR@ goes under a binder @bL@ in the Left term,
-- and binder @bR@ in the Right term.
-- It finds a new binder, @new_b@,
-- and returns an environment mapping @bL -> new_b@ and @bR -> new_b@
rnBndr2 (RV2 { envL = envL, envR = envR, in_scope = in_scope }) bL bR
= RV2 { envL = extendVarEnv envL bL new_b -- See Note
, envR = extendVarEnv envR bR new_b -- [Rebinding]
, in_scope = extendInScopeSet in_scope new_b }
where
-- Find a new binder not in scope in either term
new_b | not (bL `elemInScopeSet` in_scope) = bL
| not (bR `elemInScopeSet` in_scope) = bR
| otherwise = uniqAway' in_scope bL
-- Note [Rebinding]
-- If the new var is the same as the old one, note that
-- the extendVarEnv *deletes* any current renaming
-- E.g. (\x. \x. ...) ~ (\y. \z. ...)
--
-- Inside \x \y { [x->y], [y->y], {y} }
-- \x \z { [x->x], [y->y, z->x], {y,x} }
rnBndrL :: RnEnv2 -> Var -> (RnEnv2, Var)
-- ^ Similar to 'rnBndr2' but used when there's a binder on the left
-- side only.
rnBndrL (RV2 { envL = envL, envR = envR, in_scope = in_scope }) bL
= (RV2 { envL = extendVarEnv envL bL new_b
, envR = envR
, in_scope = extendInScopeSet in_scope new_b }, new_b)
where
new_b = uniqAway in_scope bL
rnBndrR :: RnEnv2 -> Var -> (RnEnv2, Var)
-- ^ Similar to 'rnBndr2' but used when there's a binder on the right
-- side only.
rnBndrR (RV2 { envL = envL, envR = envR, in_scope = in_scope }) bR
= (RV2 { envR = extendVarEnv envR bR new_b
, envL = envL
, in_scope = extendInScopeSet in_scope new_b }, new_b)
where
new_b = uniqAway in_scope bR
rnEtaL :: RnEnv2 -> Var -> (RnEnv2, Var)
-- ^ Similar to 'rnBndrL' but used for eta expansion
-- See Note [Eta expansion]
rnEtaL (RV2 { envL = envL, envR = envR, in_scope = in_scope }) bL
= (RV2 { envL = extendVarEnv envL bL new_b
, envR = extendVarEnv envR new_b new_b -- Note [Eta expansion]
, in_scope = extendInScopeSet in_scope new_b }, new_b)
where
new_b = uniqAway in_scope bL
rnEtaR :: RnEnv2 -> Var -> (RnEnv2, Var)
-- ^ Similar to 'rnBndr2' but used for eta expansion
-- See Note [Eta expansion]
rnEtaR (RV2 { envL = envL, envR = envR, in_scope = in_scope }) bR
= (RV2 { envL = extendVarEnv envL new_b new_b -- Note [Eta expansion]
, envR = extendVarEnv envR bR new_b
, in_scope = extendInScopeSet in_scope new_b }, new_b)
where
new_b = uniqAway in_scope bR
delBndrL, delBndrR :: RnEnv2 -> Var -> RnEnv2
delBndrL rn@(RV2 { envL = env, in_scope = in_scope }) v
= rn { envL = env `delVarEnv` v, in_scope = in_scope `extendInScopeSet` v }
delBndrR rn@(RV2 { envR = env, in_scope = in_scope }) v
= rn { envR = env `delVarEnv` v, in_scope = in_scope `extendInScopeSet` v }
delBndrsL, delBndrsR :: RnEnv2 -> [Var] -> RnEnv2
delBndrsL rn@(RV2 { envL = env, in_scope = in_scope }) v
= rn { envL = env `delVarEnvList` v, in_scope = in_scope `extendInScopeSetList` v }
delBndrsR rn@(RV2 { envR = env, in_scope = in_scope }) v
= rn { envR = env `delVarEnvList` v, in_scope = in_scope `extendInScopeSetList` v }
rnOccL, rnOccR :: RnEnv2 -> Var -> Var
-- ^ Look up the renaming of an occurrence in the left or right term
rnOccL (RV2 { envL = env }) v = lookupVarEnv env v `orElse` v
rnOccR (RV2 { envR = env }) v = lookupVarEnv env v `orElse` v
rnOccL_maybe, rnOccR_maybe :: RnEnv2 -> Var -> Maybe Var
-- ^ Look up the renaming of an occurrence in the left or right term
rnOccL_maybe (RV2 { envL = env }) v = lookupVarEnv env v
rnOccR_maybe (RV2 { envR = env }) v = lookupVarEnv env v
inRnEnvL, inRnEnvR :: RnEnv2 -> Var -> Bool
-- ^ Tells whether a variable is locally bound
inRnEnvL (RV2 { envL = env }) v = v `elemVarEnv` env
inRnEnvR (RV2 { envR = env }) v = v `elemVarEnv` env
lookupRnInScope :: RnEnv2 -> Var -> Var
lookupRnInScope env v = lookupInScope (in_scope env) v `orElse` v
nukeRnEnvL, nukeRnEnvR :: RnEnv2 -> RnEnv2
-- ^ Wipe the left or right side renaming
nukeRnEnvL env = env { envL = emptyVarEnv }
nukeRnEnvR env = env { envR = emptyVarEnv }
{-
Note [Eta expansion]
~~~~~~~~~~~~~~~~~~~~
When matching
(\x.M) ~ N
we rename x to x' with, where x' is not in scope in
either term. Then we want to behave as if we'd seen
(\x'.M) ~ (\x'.N x')
Since x' isn't in scope in N, the form (\x'. N x') doesn't
capture any variables in N. But we must nevertheless extend
the envR with a binding [x' -> x'], to support the occurs check.
For example, if we don't do this, we can get silly matches like
forall a. (\y.a) ~ v
succeeding with [a -> v y], which is bogus of course.
************************************************************************
* *
Tidying
* *
************************************************************************
-}
-- | When tidying up print names, we keep a mapping of in-scope occ-names
-- (the 'TidyOccEnv') and a Var-to-Var of the current renamings
type TidyEnv = (TidyOccEnv, VarEnv Var)
emptyTidyEnv :: TidyEnv
emptyTidyEnv = (emptyTidyOccEnv, emptyVarEnv)
{-
************************************************************************
* *
\subsection{@VarEnv@s}
* *
************************************************************************
-}
type VarEnv elt = UniqFM elt
type IdEnv elt = VarEnv elt
type TyVarEnv elt = VarEnv elt
type CoVarEnv elt = VarEnv elt
emptyVarEnv :: VarEnv a
mkVarEnv :: [(Var, a)] -> VarEnv a
zipVarEnv :: [Var] -> [a] -> VarEnv a
unitVarEnv :: Var -> a -> VarEnv a
alterVarEnv :: (Maybe a -> Maybe a) -> VarEnv a -> Var -> VarEnv a
extendVarEnv :: VarEnv a -> Var -> a -> VarEnv a
extendVarEnv_C :: (a->a->a) -> VarEnv a -> Var -> a -> VarEnv a
extendVarEnv_Acc :: (a->b->b) -> (a->b) -> VarEnv b -> Var -> a -> VarEnv b
plusVarEnv :: VarEnv a -> VarEnv a -> VarEnv a
extendVarEnvList :: VarEnv a -> [(Var, a)] -> VarEnv a
lookupVarEnv_Directly :: VarEnv a -> Unique -> Maybe a
filterVarEnv_Directly :: (Unique -> a -> Bool) -> VarEnv a -> VarEnv a
partitionVarEnv :: (a -> Bool) -> VarEnv a -> (VarEnv a, VarEnv a)
restrictVarEnv :: VarEnv a -> VarSet -> VarEnv a
delVarEnvList :: VarEnv a -> [Var] -> VarEnv a
delVarEnv :: VarEnv a -> Var -> VarEnv a
minusVarEnv :: VarEnv a -> VarEnv b -> VarEnv a
intersectsVarEnv :: VarEnv a -> VarEnv a -> Bool
plusVarEnv_C :: (a -> a -> a) -> VarEnv a -> VarEnv a -> VarEnv a
plusVarEnv_CD :: (a -> a -> a) -> VarEnv a -> a -> VarEnv a -> a -> VarEnv a
mapVarEnv :: (a -> b) -> VarEnv a -> VarEnv b
modifyVarEnv :: (a -> a) -> VarEnv a -> Var -> VarEnv a
varEnvElts :: VarEnv a -> [a]
varEnvKeys :: VarEnv a -> [Unique]
isEmptyVarEnv :: VarEnv a -> Bool
lookupVarEnv :: VarEnv a -> Var -> Maybe a
filterVarEnv :: (a -> Bool) -> VarEnv a -> VarEnv a
lookupVarEnv_NF :: VarEnv a -> Var -> a
lookupWithDefaultVarEnv :: VarEnv a -> a -> Var -> a
elemVarEnv :: Var -> VarEnv a -> Bool
elemVarEnvByKey :: Unique -> VarEnv a -> Bool
foldVarEnv :: (a -> b -> b) -> b -> VarEnv a -> b
elemVarEnv = elemUFM
elemVarEnvByKey = elemUFM_Directly
alterVarEnv = alterUFM
extendVarEnv = addToUFM
extendVarEnv_C = addToUFM_C
extendVarEnv_Acc = addToUFM_Acc
extendVarEnvList = addListToUFM
plusVarEnv_C = plusUFM_C
plusVarEnv_CD = plusUFM_CD
delVarEnvList = delListFromUFM
delVarEnv = delFromUFM
minusVarEnv = minusUFM
intersectsVarEnv e1 e2 = not (isEmptyVarEnv (e1 `intersectUFM` e2))
plusVarEnv = plusUFM
lookupVarEnv = lookupUFM
filterVarEnv = filterUFM
lookupWithDefaultVarEnv = lookupWithDefaultUFM
mapVarEnv = mapUFM
mkVarEnv = listToUFM
emptyVarEnv = emptyUFM
varEnvElts = eltsUFM
varEnvKeys = keysUFM
unitVarEnv = unitUFM
isEmptyVarEnv = isNullUFM
foldVarEnv = foldUFM
lookupVarEnv_Directly = lookupUFM_Directly
filterVarEnv_Directly = filterUFM_Directly
partitionVarEnv = partitionUFM
restrictVarEnv env vs = filterVarEnv_Directly keep env
where
keep u _ = u `elemVarSetByKey` vs
zipVarEnv tyvars tys = mkVarEnv (zipEqual "zipVarEnv" tyvars tys)
lookupVarEnv_NF env id = case lookupVarEnv env id of
Just xx -> xx
Nothing -> panic "lookupVarEnv_NF: Nothing"
{-
@modifyVarEnv@: Look up a thing in the VarEnv,
then mash it with the modify function, and put it back.
-}
modifyVarEnv mangle_fn env key
= case (lookupVarEnv env key) of
Nothing -> env
Just xx -> extendVarEnv env key (mangle_fn xx)
modifyVarEnv_Directly :: (a -> a) -> UniqFM a -> Unique -> UniqFM a
modifyVarEnv_Directly mangle_fn env key
= case (lookupUFM_Directly env key) of
Nothing -> env
Just xx -> addToUFM_Directly env key (mangle_fn xx)
-- Deterministic VarEnv
-- See Note [Deterministic UniqFM] in UniqDFM for explanation why we need
-- DVarEnv.
type DVarEnv elt = UniqDFM elt
emptyDVarEnv :: DVarEnv a
emptyDVarEnv = emptyUDFM
extendDVarEnv :: DVarEnv a -> Var -> a -> DVarEnv a
extendDVarEnv = addToUDFM
lookupDVarEnv :: DVarEnv a -> Var -> Maybe a
lookupDVarEnv = lookupUDFM
foldDVarEnv :: (a -> b -> b) -> b -> DVarEnv a -> b
foldDVarEnv = foldUDFM
|
AlexanderPankiv/ghc
|
compiler/basicTypes/VarEnv.hs
|
bsd-3-clause
| 18,260 | 0 | 13 | 4,915 | 3,899 | 2,158 | 1,741 | 258 | 2 |
module Hasql.Postgres.PTI where
import Hasql.Postgres.Prelude hiding (bool)
import qualified Database.PostgreSQL.LibPQ as PQ
-- | A Postgresql type info
data PTI = PTI { ptiOID :: !OID, ptiArrayOID :: !(Maybe OID) }
-- | A Word32 and a LibPQ representation of an OID
data OID = OID { oidWord32 :: !Word32, oidPQ :: !PQ.Oid }
mkOID :: Word32 -> OID
mkOID x =
OID x ((PQ.Oid . fromIntegral) x)
mkPTI :: Word32 -> Maybe Word32 -> PTI
mkPTI oid arrayOID =
PTI (mkOID oid) (fmap mkOID arrayOID)
-- * Constants
-------------------------
abstime = mkPTI 702 (Just 1023)
aclitem = mkPTI 1033 (Just 1034)
bit = mkPTI 1560 (Just 1561)
bool = mkPTI 16 (Just 1000)
box = mkPTI 603 (Just 1020)
bpchar = mkPTI 1042 (Just 1014)
bytea = mkPTI 17 (Just 1001)
char = mkPTI 18 (Just 1002)
cid = mkPTI 29 (Just 1012)
cidr = mkPTI 650 (Just 651)
circle = mkPTI 718 (Just 719)
cstring = mkPTI 2275 (Just 1263)
date = mkPTI 1082 (Just 1182)
daterange = mkPTI 3912 (Just 3913)
float4 = mkPTI 700 (Just 1021)
float8 = mkPTI 701 (Just 1022)
gtsvector = mkPTI 3642 (Just 3644)
inet = mkPTI 869 (Just 1041)
int2 = mkPTI 21 (Just 1005)
int2vector = mkPTI 22 (Just 1006)
int4 = mkPTI 23 (Just 1007)
int4range = mkPTI 3904 (Just 3905)
int8 = mkPTI 20 (Just 1016)
int8range = mkPTI 3926 (Just 3927)
interval = mkPTI 1186 (Just 1187)
json = mkPTI 114 (Just 199)
line = mkPTI 628 (Just 629)
lseg = mkPTI 601 (Just 1018)
macaddr = mkPTI 829 (Just 1040)
money = mkPTI 790 (Just 791)
name = mkPTI 19 (Just 1003)
numeric = mkPTI 1700 (Just 1231)
numrange = mkPTI 3906 (Just 3907)
oid = mkPTI 26 (Just 1028)
oidvector = mkPTI 30 (Just 1013)
path = mkPTI 602 (Just 1019)
point = mkPTI 600 (Just 1017)
polygon = mkPTI 604 (Just 1027)
record = mkPTI 2249 (Just 2287)
refcursor = mkPTI 1790 (Just 2201)
regclass = mkPTI 2205 (Just 2210)
regconfig = mkPTI 3734 (Just 3735)
regdictionary = mkPTI 3769 (Just 3770)
regoper = mkPTI 2203 (Just 2208)
regoperator = mkPTI 2204 (Just 2209)
regproc = mkPTI 24 (Just 1008)
regprocedure = mkPTI 2202 (Just 2207)
regtype = mkPTI 2206 (Just 2211)
reltime = mkPTI 703 (Just 1024)
text = mkPTI 25 (Just 1009)
tid = mkPTI 27 (Just 1010)
time = mkPTI 1083 (Just 1183)
timestamp = mkPTI 1114 (Just 1115)
timestamptz = mkPTI 1184 (Just 1185)
timetz = mkPTI 1266 (Just 1270)
tinterval = mkPTI 704 (Just 1025)
tsquery = mkPTI 3615 (Just 3645)
tsrange = mkPTI 3908 (Just 3909)
tstzrange = mkPTI 3910 (Just 3911)
tsvector = mkPTI 3614 (Just 3643)
txid_snapshot = mkPTI 2970 (Just 2949)
unknown = mkPTI 705 Nothing
uuid = mkPTI 2950 (Just 2951)
varbit = mkPTI 1562 (Just 1563)
varchar = mkPTI 1043 (Just 1015)
void = mkPTI 2278 Nothing
xid = mkPTI 28 (Just 1011)
xml = mkPTI 142 (Just 143)
|
nikita-volkov/hasql-postgres
|
library/Hasql/Postgres/PTI.hs
|
mit
| 3,319 | 0 | 11 | 1,143 | 1,244 | 633 | 611 | 87 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, ForeignFunctionInterface
, MagicHash
, UnboxedTuples
, ScopedTypeVariables
#-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (concurrency)
--
-- A common interface to a collection of useful concurrency
-- abstractions.
--
-----------------------------------------------------------------------------
module Control.Concurrent (
-- * Concurrent Haskell
-- $conc_intro
-- * Basic concurrency operations
ThreadId,
#ifdef __GLASGOW_HASKELL__
myThreadId,
#endif
forkIO,
#ifdef __GLASGOW_HASKELL__
forkIOWithUnmask,
killThread,
throwTo,
#endif
-- ** Threads with affinity
forkOn,
forkOnWithUnmask,
getNumCapabilities,
threadCapability,
-- * Scheduling
-- $conc_scheduling
yield, -- :: IO ()
-- ** Blocking
-- $blocking
#ifdef __GLASGOW_HASKELL__
-- ** Waiting
threadDelay, -- :: Int -> IO ()
threadWaitRead, -- :: Int -> IO ()
threadWaitWrite, -- :: Int -> IO ()
#endif
-- * Communication abstractions
module Control.Concurrent.MVar,
module Control.Concurrent.Chan,
module Control.Concurrent.QSem,
module Control.Concurrent.QSemN,
module Control.Concurrent.SampleVar,
-- * Merging of streams
#ifndef __HUGS__
mergeIO, -- :: [a] -> [a] -> IO [a]
nmergeIO, -- :: [[a]] -> IO [a]
#endif
-- $merge
#ifdef __GLASGOW_HASKELL__
-- * Bound Threads
-- $boundthreads
rtsSupportsBoundThreads,
forkOS,
isCurrentThreadBound,
runInBoundThread,
runInUnboundThread,
#endif
-- * GHC's implementation of concurrency
-- |This section describes features specific to GHC's
-- implementation of Concurrent Haskell.
-- ** Haskell threads and Operating System threads
-- $osthreads
-- ** Terminating the program
-- $termination
-- ** Pre-emption
-- $preemption
-- * Deprecated functions
forkIOUnmasked
) where
import Prelude
import Control.Exception.Base as Exception
#ifdef __GLASGOW_HASKELL__
import GHC.Exception
import GHC.Conc hiding (threadWaitRead, threadWaitWrite)
import qualified GHC.Conc
import GHC.IO ( IO(..), unsafeInterleaveIO, unsafeUnmask )
import GHC.IORef ( newIORef, readIORef, writeIORef )
import GHC.Base
import System.Posix.Types ( Fd )
import Foreign.StablePtr
import Foreign.C.Types
import Control.Monad ( when )
#ifdef mingw32_HOST_OS
import Foreign.C
import System.IO
#endif
#endif
#ifdef __HUGS__
import Hugs.ConcBase
#endif
import Control.Concurrent.MVar
import Control.Concurrent.Chan
import Control.Concurrent.QSem
import Control.Concurrent.QSemN
import Control.Concurrent.SampleVar
#ifdef __HUGS__
type ThreadId = ()
#endif
{- $conc_intro
The concurrency extension for Haskell is described in the paper
/Concurrent Haskell/
<http://www.haskell.org/ghc/docs/papers/concurrent-haskell.ps.gz>.
Concurrency is \"lightweight\", which means that both thread creation
and context switching overheads are extremely low. Scheduling of
Haskell threads is done internally in the Haskell runtime system, and
doesn't make use of any operating system-supplied thread packages.
However, if you want to interact with a foreign library that expects your
program to use the operating system-supplied thread package, you can do so
by using 'forkOS' instead of 'forkIO'.
Haskell threads can communicate via 'MVar's, a kind of synchronised
mutable variable (see "Control.Concurrent.MVar"). Several common
concurrency abstractions can be built from 'MVar's, and these are
provided by the "Control.Concurrent" library.
In GHC, threads may also communicate via exceptions.
-}
{- $conc_scheduling
Scheduling may be either pre-emptive or co-operative,
depending on the implementation of Concurrent Haskell (see below
for information related to specific compilers). In a co-operative
system, context switches only occur when you use one of the
primitives defined in this module. This means that programs such
as:
> main = forkIO (write 'a') >> write 'b'
> where write c = putChar c >> write c
will print either @aaaaaaaaaaaaaa...@ or @bbbbbbbbbbbb...@,
instead of some random interleaving of @a@s and @b@s. In
practice, cooperative multitasking is sufficient for writing
simple graphical user interfaces.
-}
{- $blocking
Different Haskell implementations have different characteristics with
regard to which operations block /all/ threads.
Using GHC without the @-threaded@ option, all foreign calls will block
all other Haskell threads in the system, although I\/O operations will
not. With the @-threaded@ option, only foreign calls with the @unsafe@
attribute will block all other threads.
Using Hugs, all I\/O operations and foreign calls will block all other
Haskell threads.
-}
#ifndef __HUGS__
max_buff_size :: Int
max_buff_size = 1
mergeIO :: [a] -> [a] -> IO [a]
nmergeIO :: [[a]] -> IO [a]
-- $merge
-- The 'mergeIO' and 'nmergeIO' functions fork one thread for each
-- input list that concurrently evaluates that list; the results are
-- merged into a single output list.
--
-- Note: Hugs does not provide these functions, since they require
-- preemptive multitasking.
mergeIO ls rs
= newEmptyMVar >>= \ tail_node ->
newMVar tail_node >>= \ tail_list ->
newQSem max_buff_size >>= \ e ->
newMVar 2 >>= \ branches_running ->
let
buff = (tail_list,e)
in
forkIO (suckIO branches_running buff ls) >>
forkIO (suckIO branches_running buff rs) >>
takeMVar tail_node >>= \ val ->
signalQSem e >>
return val
type Buffer a
= (MVar (MVar [a]), QSem)
suckIO :: MVar Int -> Buffer a -> [a] -> IO ()
suckIO branches_running buff@(tail_list,e) vs
= case vs of
[] -> takeMVar branches_running >>= \ val ->
if val == 1 then
takeMVar tail_list >>= \ node ->
putMVar node [] >>
putMVar tail_list node
else
putMVar branches_running (val-1)
(x:xs) ->
waitQSem e >>
takeMVar tail_list >>= \ node ->
newEmptyMVar >>= \ next_node ->
unsafeInterleaveIO (
takeMVar next_node >>= \ y ->
signalQSem e >>
return y) >>= \ next_node_val ->
putMVar node (x:next_node_val) >>
putMVar tail_list next_node >>
suckIO branches_running buff xs
nmergeIO lss
= let
len = length lss
in
newEmptyMVar >>= \ tail_node ->
newMVar tail_node >>= \ tail_list ->
newQSem max_buff_size >>= \ e ->
newMVar len >>= \ branches_running ->
let
buff = (tail_list,e)
in
mapIO (\ x -> forkIO (suckIO branches_running buff x)) lss >>
takeMVar tail_node >>= \ val ->
signalQSem e >>
return val
where
mapIO f xs = sequence (map f xs)
#endif /* __HUGS__ */
#ifdef __GLASGOW_HASKELL__
-- ---------------------------------------------------------------------------
-- Bound Threads
{- $boundthreads
#boundthreads#
Support for multiple operating system threads and bound threads as described
below is currently only available in the GHC runtime system if you use the
/-threaded/ option when linking.
Other Haskell systems do not currently support multiple operating system threads.
A bound thread is a haskell thread that is /bound/ to an operating system
thread. While the bound thread is still scheduled by the Haskell run-time
system, the operating system thread takes care of all the foreign calls made
by the bound thread.
To a foreign library, the bound thread will look exactly like an ordinary
operating system thread created using OS functions like @pthread_create@
or @CreateThread@.
Bound threads can be created using the 'forkOS' function below. All foreign
exported functions are run in a bound thread (bound to the OS thread that
called the function). Also, the @main@ action of every Haskell program is
run in a bound thread.
Why do we need this? Because if a foreign library is called from a thread
created using 'forkIO', it won't have access to any /thread-local state/ -
state variables that have specific values for each OS thread
(see POSIX's @pthread_key_create@ or Win32's @TlsAlloc@). Therefore, some
libraries (OpenGL, for example) will not work from a thread created using
'forkIO'. They work fine in threads created using 'forkOS' or when called
from @main@ or from a @foreign export@.
In terms of performance, 'forkOS' (aka bound) threads are much more
expensive than 'forkIO' (aka unbound) threads, because a 'forkOS'
thread is tied to a particular OS thread, whereas a 'forkIO' thread
can be run by any OS thread. Context-switching between a 'forkOS'
thread and a 'forkIO' thread is many times more expensive than between
two 'forkIO' threads.
Note in particular that the main program thread (the thread running
@Main.main@) is always a bound thread, so for good concurrency
performance you should ensure that the main thread is not doing
repeated communication with other threads in the system. Typically
this means forking subthreads to do the work using 'forkIO', and
waiting for the results in the main thread.
-}
-- | 'True' if bound threads are supported.
-- If @rtsSupportsBoundThreads@ is 'False', 'isCurrentThreadBound'
-- will always return 'False' and both 'forkOS' and 'runInBoundThread' will
-- fail.
foreign import ccall rtsSupportsBoundThreads :: Bool
{- |
Like 'forkIO', this sparks off a new thread to run the 'IO'
computation passed as the first argument, and returns the 'ThreadId'
of the newly created thread.
However, 'forkOS' creates a /bound/ thread, which is necessary if you
need to call foreign (non-Haskell) libraries that make use of
thread-local state, such as OpenGL (see "Control.Concurrent#boundthreads").
Using 'forkOS' instead of 'forkIO' makes no difference at all to the
scheduling behaviour of the Haskell runtime system. It is a common
misconception that you need to use 'forkOS' instead of 'forkIO' to
avoid blocking all the Haskell threads when making a foreign call;
this isn't the case. To allow foreign calls to be made without
blocking all the Haskell threads (with GHC), it is only necessary to
use the @-threaded@ option when linking your program, and to make sure
the foreign import is not marked @unsafe@.
-}
forkOS :: IO () -> IO ThreadId
foreign export ccall forkOS_entry
:: StablePtr (IO ()) -> IO ()
foreign import ccall "forkOS_entry" forkOS_entry_reimported
:: StablePtr (IO ()) -> IO ()
forkOS_entry :: StablePtr (IO ()) -> IO ()
forkOS_entry stableAction = do
action <- deRefStablePtr stableAction
action
foreign import ccall forkOS_createThread
:: StablePtr (IO ()) -> IO CInt
failNonThreaded :: IO a
failNonThreaded = fail $ "RTS doesn't support multiple OS threads "
++"(use ghc -threaded when linking)"
forkOS action0
| rtsSupportsBoundThreads = do
mv <- newEmptyMVar
b <- Exception.getMaskingState
let
-- async exceptions are masked in the child if they are masked
-- in the parent, as for forkIO (see #1048). forkOS_createThread
-- creates a thread with exceptions masked by default.
action1 = case b of
Unmasked -> unsafeUnmask action0
MaskedInterruptible -> action0
MaskedUninterruptible -> uninterruptibleMask_ action0
action_plus = Exception.catch action1 childHandler
entry <- newStablePtr (myThreadId >>= putMVar mv >> action_plus)
err <- forkOS_createThread entry
when (err /= 0) $ fail "Cannot create OS thread."
tid <- takeMVar mv
freeStablePtr entry
return tid
| otherwise = failNonThreaded
-- | Returns 'True' if the calling thread is /bound/, that is, if it is
-- safe to use foreign libraries that rely on thread-local state from the
-- calling thread.
isCurrentThreadBound :: IO Bool
isCurrentThreadBound = IO $ \ s# ->
case isCurrentThreadBound# s# of
(# s2#, flg #) -> (# s2#, not (flg ==# 0#) #)
{- |
Run the 'IO' computation passed as the first argument. If the calling thread
is not /bound/, a bound thread is created temporarily. @runInBoundThread@
doesn't finish until the 'IO' computation finishes.
You can wrap a series of foreign function calls that rely on thread-local state
with @runInBoundThread@ so that you can use them without knowing whether the
current thread is /bound/.
-}
runInBoundThread :: IO a -> IO a
runInBoundThread action
| rtsSupportsBoundThreads = do
bound <- isCurrentThreadBound
if bound
then action
else do
ref <- newIORef undefined
let action_plus = Exception.try action >>= writeIORef ref
bracket (newStablePtr action_plus)
freeStablePtr
(\cEntry -> forkOS_entry_reimported cEntry >> readIORef ref) >>=
unsafeResult
| otherwise = failNonThreaded
{- |
Run the 'IO' computation passed as the first argument. If the calling thread
is /bound/, an unbound thread is created temporarily using 'forkIO'.
@runInBoundThread@ doesn't finish until the 'IO' computation finishes.
Use this function /only/ in the rare case that you have actually observed a
performance loss due to the use of bound threads. A program that
doesn't need it's main thread to be bound and makes /heavy/ use of concurrency
(e.g. a web server), might want to wrap it's @main@ action in
@runInUnboundThread@.
Note that exceptions which are thrown to the current thread are thrown in turn
to the thread that is executing the given computation. This ensures there's
always a way of killing the forked thread.
-}
runInUnboundThread :: IO a -> IO a
runInUnboundThread action = do
bound <- isCurrentThreadBound
if bound
then do
mv <- newEmptyMVar
mask $ \restore -> do
tid <- forkIO $ Exception.try (restore action) >>= putMVar mv
let wait = takeMVar mv `Exception.catch` \(e :: SomeException) ->
Exception.throwTo tid e >> wait
wait >>= unsafeResult
else action
unsafeResult :: Either SomeException a -> IO a
unsafeResult = either Exception.throwIO return
#endif /* __GLASGOW_HASKELL__ */
#ifdef __GLASGOW_HASKELL__
-- ---------------------------------------------------------------------------
-- threadWaitRead/threadWaitWrite
-- | Block the current thread until data is available to read on the
-- given file descriptor (GHC only).
--
-- This will throw an 'IOError' if the file descriptor was closed
-- while this thread was blocked. To safely close a file descriptor
-- that has been used with 'threadWaitRead', use
-- 'GHC.Conc.closeFdWith'.
threadWaitRead :: Fd -> IO ()
threadWaitRead fd
#ifdef mingw32_HOST_OS
-- we have no IO manager implementing threadWaitRead on Windows.
-- fdReady does the right thing, but we have to call it in a
-- separate thread, otherwise threadWaitRead won't be interruptible,
-- and this only works with -threaded.
| threaded = withThread (waitFd fd 0)
| otherwise = case fd of
0 -> do _ <- hWaitForInput stdin (-1)
return ()
-- hWaitForInput does work properly, but we can only
-- do this for stdin since we know its FD.
_ -> error "threadWaitRead requires -threaded on Windows, or use System.IO.hWaitForInput"
#else
= GHC.Conc.threadWaitRead fd
#endif
-- | Block the current thread until data can be written to the
-- given file descriptor (GHC only).
--
-- This will throw an 'IOError' if the file descriptor was closed
-- while this thread was blocked. To safely close a file descriptor
-- that has been used with 'threadWaitWrite', use
-- 'GHC.Conc.closeFdWith'.
threadWaitWrite :: Fd -> IO ()
threadWaitWrite fd
#ifdef mingw32_HOST_OS
| threaded = withThread (waitFd fd 1)
| otherwise = error "threadWaitWrite requires -threaded on Windows"
#else
= GHC.Conc.threadWaitWrite fd
#endif
#ifdef mingw32_HOST_OS
foreign import ccall unsafe "rtsSupportsBoundThreads" threaded :: Bool
withThread :: IO a -> IO a
withThread io = do
m <- newEmptyMVar
_ <- mask_ $ forkIO $ try io >>= putMVar m
x <- takeMVar m
case x of
Right a -> return a
Left e -> throwIO (e :: IOException)
waitFd :: Fd -> CInt -> IO ()
waitFd fd write = do
throwErrnoIfMinus1_ "fdReady" $
fdReady (fromIntegral fd) write iNFINITE 0
iNFINITE :: CInt
iNFINITE = 0xFFFFFFFF -- urgh
foreign import ccall safe "fdReady"
fdReady :: CInt -> CInt -> CInt -> CInt -> IO CInt
#endif
-- ---------------------------------------------------------------------------
-- More docs
{- $osthreads
#osthreads# In GHC, threads created by 'forkIO' are lightweight threads, and
are managed entirely by the GHC runtime. Typically Haskell
threads are an order of magnitude or two more efficient (in
terms of both time and space) than operating system threads.
The downside of having lightweight threads is that only one can
run at a time, so if one thread blocks in a foreign call, for
example, the other threads cannot continue. The GHC runtime
works around this by making use of full OS threads where
necessary. When the program is built with the @-threaded@
option (to link against the multithreaded version of the
runtime), a thread making a @safe@ foreign call will not block
the other threads in the system; another OS thread will take
over running Haskell threads until the original call returns.
The runtime maintains a pool of these /worker/ threads so that
multiple Haskell threads can be involved in external calls
simultaneously.
The "System.IO" library manages multiplexing in its own way. On
Windows systems it uses @safe@ foreign calls to ensure that
threads doing I\/O operations don't block the whole runtime,
whereas on Unix systems all the currently blocked I\/O requests
are managed by a single thread (the /IO manager thread/) using
a mechanism such as @epoll@ or @kqueue@, depending on what is
provided by the host operating system.
The runtime will run a Haskell thread using any of the available
worker OS threads. If you need control over which particular OS
thread is used to run a given Haskell thread, perhaps because
you need to call a foreign library that uses OS-thread-local
state, then you need bound threads (see "Control.Concurrent#boundthreads").
If you don't use the @-threaded@ option, then the runtime does
not make use of multiple OS threads. Foreign calls will block
all other running Haskell threads until the call returns. The
"System.IO" library still does multiplexing, so there can be multiple
threads doing I\/O, and this is handled internally by the runtime using
@select@.
-}
{- $termination
In a standalone GHC program, only the main thread is
required to terminate in order for the process to terminate.
Thus all other forked threads will simply terminate at the same
time as the main thread (the terminology for this kind of
behaviour is \"daemonic threads\").
If you want the program to wait for child threads to
finish before exiting, you need to program this yourself. A
simple mechanism is to have each child thread write to an
'MVar' when it completes, and have the main
thread wait on all the 'MVar's before
exiting:
> myForkIO :: IO () -> IO (MVar ())
> myForkIO io = do
> mvar <- newEmptyMVar
> forkIO (io `finally` putMVar mvar ())
> return mvar
Note that we use 'finally' from the
"Control.Exception" module to make sure that the
'MVar' is written to even if the thread dies or
is killed for some reason.
A better method is to keep a global list of all child
threads which we should wait for at the end of the program:
> children :: MVar [MVar ()]
> children = unsafePerformIO (newMVar [])
>
> waitForChildren :: IO ()
> waitForChildren = do
> cs <- takeMVar children
> case cs of
> [] -> return ()
> m:ms -> do
> putMVar children ms
> takeMVar m
> waitForChildren
>
> forkChild :: IO () -> IO ThreadId
> forkChild io = do
> mvar <- newEmptyMVar
> childs <- takeMVar children
> putMVar children (mvar:childs)
> forkIO (io `finally` putMVar mvar ())
>
> main =
> later waitForChildren $
> ...
The main thread principle also applies to calls to Haskell from
outside, using @foreign export@. When the @foreign export@ed
function is invoked, it starts a new main thread, and it returns
when this main thread terminates. If the call causes new
threads to be forked, they may remain in the system after the
@foreign export@ed function has returned.
-}
{- $preemption
GHC implements pre-emptive multitasking: the execution of
threads are interleaved in a random fashion. More specifically,
a thread may be pre-empted whenever it allocates some memory,
which unfortunately means that tight loops which do no
allocation tend to lock out other threads (this only seems to
happen with pathological benchmark-style code, however).
The rescheduling timer runs on a 20ms granularity by
default, but this may be altered using the
@-i\<n\>@ RTS option. After a rescheduling
\"tick\" the running thread is pre-empted as soon as
possible.
One final note: the
@aaaa@ @bbbb@ example may not
work too well on GHC (see Scheduling, above), due
to the locking on a 'System.IO.Handle'. Only one thread
may hold the lock on a 'System.IO.Handle' at any one
time, so if a reschedule happens while a thread is holding the
lock, the other thread won't be able to run. The upshot is that
the switch from @aaaa@ to
@bbbbb@ happens infrequently. It can be
improved by lowering the reschedule tick period. We also have a
patch that causes a reschedule whenever a thread waiting on a
lock is woken up, but haven't found it to be useful for anything
other than this example :-)
-}
#endif /* __GLASGOW_HASKELL__ */
|
mightymoose/liquidhaskell
|
benchmarks/base-4.5.1.0/Control/Concurrent.hs
|
bsd-3-clause
| 23,468 | 0 | 24 | 5,877 | 2,061 | 1,115 | 946 | 82 | 3 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>AJAX Spider | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Индекс</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Поиск</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
ccgreen13/zap-extensions
|
src/org/zaproxy/zap/extension/spiderAjax/resources/help_ru_RU/helpset_ru_RU.hs
|
apache-2.0
| 985 | 80 | 66 | 160 | 435 | 219 | 216 | -1 | -1 |
module Main where
import DynFlags
import Control.Monad
import Data.List
-- Verify bogus flags aren't printed on flagsForCompletion and
-- allNonDeprecatedFlags:
-- * -fwarn-
-- * -fno-warn-
--
-- Should print nothing
main :: IO ()
main = mapM_ print $ fwarnFlags (flagsForCompletion True) ++ nonDepFwarnFlags
-- Get flags beginning with -fwarn- and -fno-warn-
fwarnFlags :: [String] -> [String]
fwarnFlags = filter isFwarn
where isFwarn flag = any (flip isPrefixOf $ flag) ["-fwarn-", "-fno-warn"]
-- Get suggested flags for -fwarn-, -fno-warn-
nonDepFwarnFlags :: [String]
nonDepFwarnFlags = filter isFwarn allNonDeprecatedFlags
where isFwarn "-fwarn-" = True
isFwarn "-fno-warn-" = True
isFwarn _ = False
|
ezyang/ghc
|
testsuite/tests/ghc-api/T12099.hs
|
bsd-3-clause
| 748 | 0 | 10 | 142 | 158 | 88 | 70 | 14 | 3 |
{-# LANGUAGE MagicHash #-}
-- !!! simple tests of primitive arrays
--
module Main ( main ) where
import GHC.Exts
import Data.Char ( chr )
import Control.Monad.ST
import Data.Array.ST
import Data.Array.Unboxed
import Data.Ratio
main = putStr
(test_chars ++ "\n" ++
test_ints ++ "\n" ++
test_addrs ++ "\n" ++
test_floats ++ "\n" ++
test_doubles ++ "\n" ++
test_ptrs ++ "\n")
-- Arr# Char# -------------------------------------------
-- (main effort is in packString#)
test_chars :: String
test_chars
= let arr# = f 1000
in
shows (lookup_range arr# 42# 416#) "\n"
where
f :: Int -> UArray Int Char
f size@(I# size#)
= runST (
-- allocate an array of the specified size
newArray_ (0, (size-1)) >>= \ arr# ->
-- fill in all elements; elem i has "i" put in it
fill_in arr# 0# (size# -# 1#) >>
-- freeze the puppy:
freeze arr#
)
fill_in :: STUArray s Int Char -> Int# -> Int# -> ST s ()
fill_in arr_in# first# last#
= if isTrue# (first# ># last#)
then return ()
else writeArray arr_in# (I# first#) ((chr (I# first#))) >>
fill_in arr_in# (first# +# 1#) last#
lookup_range :: UArray Int Char -> Int# -> Int# -> [Char]
lookup_range arr from# to#
= if isTrue# (from# ># to#)
then []
else (arr ! (I# from#))
: (lookup_range arr (from# +# 1#) to#)
-- Arr# Int# -------------------------------------------
test_ints :: String
test_ints
= let arr# = f 1000
in
shows (lookup_range arr# 42# 416#) "\n"
where
f :: Int -> UArray Int Int
f size@(I# size#)
= runST (
-- allocate an array of the specified size
newArray_ (0, (size-1)) >>= \ arr# ->
-- fill in all elements; elem i has i^2 put in it
fill_in arr# 0# (size# -# 1#) >>
-- freeze the puppy:
freeze arr#
)
fill_in :: STUArray s Int Int -> Int# -> Int# -> ST s ()
fill_in arr_in# first# last#
= if isTrue# (first# ># last#)
then return ()
else writeArray arr_in# (I# first#) (I# (first# *# first#)) >>
fill_in arr_in# (first# +# 1#) last#
lookup_range :: UArray Int Int -> Int# -> Int# -> [Int]
lookup_range arr from# to#
= if isTrue# (from# ># to#)
then []
else (arr ! (I# from#))
: (lookup_range arr (from# +# 1#) to#)
-- Arr# Addr# -------------------------------------------
test_addrs :: String
test_addrs
= let arr# = f 1000
in
shows (lookup_range arr# 42# 416#) "\n"
where
f :: Int -> UArray Int (Ptr ())
f size@(I# size#)
= runST (
-- allocate an array of the specified size
newArray_ (0, (size-1)) >>= \ arr# ->
-- fill in all elements; elem i has i^2 put in it
fill_in arr# 0# (size# -# 1#) >>
-- freeze the puppy:
freeze arr#
)
fill_in :: STUArray s Int (Ptr ()) -> Int# -> Int# -> ST s ()
fill_in arr_in# first# last#
= if isTrue# (first# ># last#)
then return ()
else writeArray arr_in# (I# first#)
(Ptr (int2Addr# (first# *# first#))) >>
fill_in arr_in# (first# +# 1#) last#
lookup_range :: UArray Int (Ptr ()) -> Int# -> Int# -> [ Int ]
lookup_range arr from# to#
= let
a2i (Ptr a#) = I# (addr2Int# a#)
in
if isTrue# (from# ># to#)
then []
else (a2i (arr ! (I# from#)))
: (lookup_range arr (from# +# 1#) to#)
-- Arr# Float# -------------------------------------------
test_floats :: String
test_floats
= let arr# = f 1000
in
shows (lookup_range arr# 42# 416#) "\n"
where
f :: Int -> UArray Int Float
f size@(I# size#)
= runST (
-- allocate an array of the specified size
newArray_ (0, (size-1)) >>= \ arr# ->
-- fill in all elements; elem i has "i * pi" put in it
fill_in arr# 0# (size# -# 1#) >>
-- freeze the puppy:
freeze arr#
)
fill_in :: STUArray s Int Float -> Int# -> Int# -> ST s ()
fill_in arr_in# first# last#
= if isTrue# (first# ># last#)
then return ()
{- else let e = ((fromIntegral (I# first#)) * pi)
in trace (show e) $ writeFloatArray arr_in# (I# first#) e >>
fill_in arr_in# (first# +# 1#) last#
-}
else writeArray arr_in# (I# first#) ((fromIntegral (I# first#)) * pi) >>
fill_in arr_in# (first# +# 1#) last#
lookup_range :: UArray Int Float -> Int# -> Int# -> [Float]
lookup_range arr from# to#
= if isTrue# (from# ># to#)
then []
else (arr ! (I# from#))
: (lookup_range arr (from# +# 1#) to#)
-- Arr# Double# -------------------------------------------
test_doubles :: String
test_doubles
= let arr# = f 1000
in
shows (lookup_range arr# 42# 416#) "\n"
where
f :: Int -> UArray Int Double
f size@(I# size#)
= runST (
-- allocate an array of the specified size
newArray_ (0, (size-1)) >>= \ arr# ->
-- fill in all elements; elem i has "i * pi" put in it
fill_in arr# 0# (size# -# 1#) >>
-- freeze the puppy:
freeze arr#
)
fill_in :: STUArray s Int Double -> Int# -> Int# -> ST s ()
fill_in arr_in# first# last#
= if isTrue# (first# ># last#)
then return ()
else writeArray arr_in# (I# first#) ((fromIntegral (I# first#)) * pi) >>
fill_in arr_in# (first# +# 1#) last#
lookup_range :: UArray Int Double -> Int# -> Int# -> [Double]
lookup_range arr from# to#
= if isTrue# (from# ># to#)
then []
else (arr ! (I# from#))
: (lookup_range arr (from# +# 1#) to#)
-- Arr# (Ratio Int) (ptrs) ---------------------------------
-- just like Int# test
test_ptrs :: String
test_ptrs
= let arr# = f 1000
in
shows (lookup_range arr# 42 416) "\n"
where
f :: Int -> Array Int (Ratio Int)
f size
= runST (
newArray (1, size) (3 % 5) >>= \ arr# ->
-- don't fill in the whole thing
fill_in arr# 1 400 >>
freeze arr#
)
fill_in :: STArray s Int (Ratio Int) -> Int -> Int -> ST s ()
fill_in arr_in# first last
= if (first > last)
then return ()
else writeArray arr_in# first (fromIntegral (first * first)) >>
fill_in arr_in# (first + 1) last
lookup_range :: Array Int (Ratio Int) -> Int -> Int -> [Ratio Int]
lookup_range array from too
= if (from > too)
then []
else (array ! from) : (lookup_range array (from + 1) too)
|
ezyang/ghc
|
testsuite/tests/codeGen/should_run/cgrun026.hs
|
bsd-3-clause
| 6,941 | 0 | 17 | 2,401 | 2,148 | 1,114 | 1,034 | 149 | 3 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
module Futhark.CodeGen.ImpGen
( -- * Entry Points
compileProg,
-- * Pluggable Compiler
OpCompiler,
ExpCompiler,
CopyCompiler,
StmsCompiler,
AllocCompiler,
Operations (..),
defaultOperations,
MemLoc (..),
sliceMemLoc,
MemEntry (..),
ScalarEntry (..),
-- * Monadic Compiler Interface
ImpM,
localDefaultSpace,
askFunction,
newVNameForFun,
nameForFun,
askEnv,
localEnv,
localOps,
VTable,
getVTable,
localVTable,
subImpM,
subImpM_,
emit,
emitFunction,
hasFunction,
collect,
collect',
comment,
VarEntry (..),
ArrayEntry (..),
-- * Lookups
lookupVar,
lookupArray,
lookupMemory,
lookupAcc,
-- * Building Blocks
TV,
mkTV,
tvSize,
tvExp,
tvVar,
ToExp (..),
compileAlloc,
everythingVolatile,
compileBody,
compileBody',
compileLoopBody,
defCompileStms,
compileStms,
compileExp,
defCompileExp,
fullyIndexArray,
fullyIndexArray',
copy,
copyDWIM,
copyDWIMFix,
copyElementWise,
typeSize,
inBounds,
isMapTransposeCopy,
-- * Constructing code.
dLParams,
dFParams,
addLoopVar,
dScope,
dArray,
dPrim,
dPrimVol,
dPrim_,
dPrimV_,
dPrimV,
dPrimVE,
dIndexSpace,
dIndexSpace',
sFor,
sWhile,
sComment,
sIf,
sWhen,
sUnless,
sOp,
sDeclareMem,
sAlloc,
sAlloc_,
sArray,
sArrayInMem,
sAllocArray,
sAllocArrayPerm,
sStaticArray,
sWrite,
sUpdate,
sLoopNest,
(<--),
(<~~),
function,
warn,
module Language.Futhark.Warnings,
)
where
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Control.Parallel.Strategies
import Data.Bifunctor (first)
import qualified Data.DList as DL
import Data.Either
import Data.List (find)
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.Map.Strict as M
import Data.Maybe
import qualified Data.Set as S
import Data.String
import Futhark.CodeGen.ImpCode
( Bytes,
Count,
Elements,
bytes,
elements,
withElemType,
)
import qualified Futhark.CodeGen.ImpCode as Imp
import Futhark.CodeGen.ImpGen.Transpose
import Futhark.Construct hiding (ToExp (..))
import Futhark.IR.Mem
import qualified Futhark.IR.Mem.IxFun as IxFun
import Futhark.IR.SOACS (SOACS)
import Futhark.Util
import Futhark.Util.IntegralExp
import Futhark.Util.Loc (noLoc)
import Language.Futhark.Warnings
import Prelude hiding (quot)
-- | How to compile an t'Op'.
type OpCompiler rep r op = Pat (LetDec rep) -> Op rep -> ImpM rep r op ()
-- | How to compile some 'Stms'.
type StmsCompiler rep r op = Names -> Stms rep -> ImpM rep r op () -> ImpM rep r op ()
-- | How to compile an 'Exp'.
type ExpCompiler rep r op = Pat (LetDec rep) -> Exp rep -> ImpM rep r op ()
type CopyCompiler rep r op =
PrimType ->
MemLoc ->
MemLoc ->
ImpM rep r op ()
-- | An alternate way of compiling an allocation.
type AllocCompiler rep r op = VName -> Count Bytes (Imp.TExp Int64) -> ImpM rep r op ()
data Operations rep r op = Operations
{ opsExpCompiler :: ExpCompiler rep r op,
opsOpCompiler :: OpCompiler rep r op,
opsStmsCompiler :: StmsCompiler rep r op,
opsCopyCompiler :: CopyCompiler rep r op,
opsAllocCompilers :: M.Map Space (AllocCompiler rep r op)
}
-- | An operations set for which the expression compiler always
-- returns 'defCompileExp'.
defaultOperations ::
(Mem rep inner, FreeIn op) =>
OpCompiler rep r op ->
Operations rep r op
defaultOperations opc =
Operations
{ opsExpCompiler = defCompileExp,
opsOpCompiler = opc,
opsStmsCompiler = defCompileStms,
opsCopyCompiler = defaultCopy,
opsAllocCompilers = mempty
}
-- | When an array is declared, this is where it is stored.
data MemLoc = MemLoc
{ memLocName :: VName,
memLocShape :: [Imp.DimSize],
memLocIxFun :: IxFun.IxFun (Imp.TExp Int64)
}
deriving (Eq, Show)
sliceMemLoc :: MemLoc -> Slice (Imp.TExp Int64) -> MemLoc
sliceMemLoc (MemLoc mem shape ixfun) slice =
MemLoc mem shape $ IxFun.slice ixfun slice
flatSliceMemLoc :: MemLoc -> FlatSlice (Imp.TExp Int64) -> MemLoc
flatSliceMemLoc (MemLoc mem shape ixfun) slice =
MemLoc mem shape $ IxFun.flatSlice ixfun slice
data ArrayEntry = ArrayEntry
{ entryArrayLoc :: MemLoc,
entryArrayElemType :: PrimType
}
deriving (Show)
entryArrayShape :: ArrayEntry -> [Imp.DimSize]
entryArrayShape = memLocShape . entryArrayLoc
newtype MemEntry = MemEntry {entryMemSpace :: Imp.Space}
deriving (Show)
newtype ScalarEntry = ScalarEntry
{ entryScalarType :: PrimType
}
deriving (Show)
-- | Every non-scalar variable must be associated with an entry.
data VarEntry rep
= ArrayVar (Maybe (Exp rep)) ArrayEntry
| ScalarVar (Maybe (Exp rep)) ScalarEntry
| MemVar (Maybe (Exp rep)) MemEntry
| AccVar (Maybe (Exp rep)) (VName, Shape, [Type])
deriving (Show)
data ValueDestination
= ScalarDestination VName
| MemoryDestination VName
| -- | The 'MemLoc' is 'Just' if a copy if
-- required. If it is 'Nothing', then a
-- copy/assignment of a memory block somewhere
-- takes care of this array.
ArrayDestination (Maybe MemLoc)
deriving (Show)
data Env rep r op = Env
{ envExpCompiler :: ExpCompiler rep r op,
envStmsCompiler :: StmsCompiler rep r op,
envOpCompiler :: OpCompiler rep r op,
envCopyCompiler :: CopyCompiler rep r op,
envAllocCompilers :: M.Map Space (AllocCompiler rep r op),
envDefaultSpace :: Imp.Space,
envVolatility :: Imp.Volatility,
-- | User-extensible environment.
envEnv :: r,
-- | Name of the function we are compiling, if any.
envFunction :: Maybe Name,
-- | The set of attributes that are active on the enclosing
-- statements (including the one we are currently compiling).
envAttrs :: Attrs
}
newEnv :: r -> Operations rep r op -> Imp.Space -> Env rep r op
newEnv r ops ds =
Env
{ envExpCompiler = opsExpCompiler ops,
envStmsCompiler = opsStmsCompiler ops,
envOpCompiler = opsOpCompiler ops,
envCopyCompiler = opsCopyCompiler ops,
envAllocCompilers = mempty,
envDefaultSpace = ds,
envVolatility = Imp.Nonvolatile,
envEnv = r,
envFunction = Nothing,
envAttrs = mempty
}
-- | The symbol table used during compilation.
type VTable rep = M.Map VName (VarEntry rep)
data ImpState rep r op = ImpState
{ stateVTable :: VTable rep,
stateFunctions :: Imp.Functions op,
stateCode :: Imp.Code op,
stateWarnings :: Warnings,
-- | Maps the arrays backing each accumulator to their
-- update function and neutral elements. This works
-- because an array name can only become part of a single
-- accumulator throughout its lifetime. If the arrays
-- backing an accumulator is not in this mapping, the
-- accumulator is scatter-like.
stateAccs :: M.Map VName ([VName], Maybe (Lambda rep, [SubExp])),
stateNameSource :: VNameSource
}
newState :: VNameSource -> ImpState rep r op
newState = ImpState mempty mempty mempty mempty mempty
newtype ImpM rep r op a
= ImpM (ReaderT (Env rep r op) (State (ImpState rep r op)) a)
deriving
( Functor,
Applicative,
Monad,
MonadState (ImpState rep r op),
MonadReader (Env rep r op)
)
instance MonadFreshNames (ImpM rep r op) where
getNameSource = gets stateNameSource
putNameSource src = modify $ \s -> s {stateNameSource = src}
-- Cannot be an KernelsMem scope because the index functions have
-- the wrong leaves (VName instead of Imp.Exp).
instance HasScope SOACS (ImpM rep r op) where
askScope = gets $ M.map (LetName . entryType) . stateVTable
where
entryType (MemVar _ memEntry) =
Mem (entryMemSpace memEntry)
entryType (ArrayVar _ arrayEntry) =
Array
(entryArrayElemType arrayEntry)
(Shape $ entryArrayShape arrayEntry)
NoUniqueness
entryType (ScalarVar _ scalarEntry) =
Prim $ entryScalarType scalarEntry
entryType (AccVar _ (acc, ispace, ts)) =
Acc acc ispace ts NoUniqueness
runImpM ::
ImpM rep r op a ->
r ->
Operations rep r op ->
Imp.Space ->
ImpState rep r op ->
(a, ImpState rep r op)
runImpM (ImpM m) r ops space = runState (runReaderT m $ newEnv r ops space)
subImpM_ ::
r' ->
Operations rep r' op' ->
ImpM rep r' op' a ->
ImpM rep r op (Imp.Code op')
subImpM_ r ops m = snd <$> subImpM r ops m
subImpM ::
r' ->
Operations rep r' op' ->
ImpM rep r' op' a ->
ImpM rep r op (a, Imp.Code op')
subImpM r ops (ImpM m) = do
env <- ask
s <- get
let env' =
env
{ envExpCompiler = opsExpCompiler ops,
envStmsCompiler = opsStmsCompiler ops,
envCopyCompiler = opsCopyCompiler ops,
envOpCompiler = opsOpCompiler ops,
envAllocCompilers = opsAllocCompilers ops,
envEnv = r
}
s' =
ImpState
{ stateVTable = stateVTable s,
stateFunctions = mempty,
stateCode = mempty,
stateNameSource = stateNameSource s,
stateWarnings = mempty,
stateAccs = stateAccs s
}
(x, s'') = runState (runReaderT m env') s'
putNameSource $ stateNameSource s''
warnings $ stateWarnings s''
pure (x, stateCode s'')
-- | Execute a code generation action, returning the code that was
-- emitted.
collect :: ImpM rep r op () -> ImpM rep r op (Imp.Code op)
collect = fmap snd . collect'
collect' :: ImpM rep r op a -> ImpM rep r op (a, Imp.Code op)
collect' m = do
prev_code <- gets stateCode
modify $ \s -> s {stateCode = mempty}
x <- m
new_code <- gets stateCode
modify $ \s -> s {stateCode = prev_code}
pure (x, new_code)
-- | Execute a code generation action, wrapping the generated code
-- within a 'Imp.Comment' with the given description.
comment :: String -> ImpM rep r op () -> ImpM rep r op ()
comment desc m = do
code <- collect m
emit $ Imp.Comment desc code
-- | Emit some generated imperative code.
emit :: Imp.Code op -> ImpM rep r op ()
emit code = modify $ \s -> s {stateCode = stateCode s <> code}
warnings :: Warnings -> ImpM rep r op ()
warnings ws = modify $ \s -> s {stateWarnings = ws <> stateWarnings s}
-- | Emit a warning about something the user should be aware of.
warn :: Located loc => loc -> [loc] -> String -> ImpM rep r op ()
warn loc locs problem =
warnings $ singleWarning' (srclocOf loc) (map srclocOf locs) (fromString problem)
-- | Emit a function in the generated code.
emitFunction :: Name -> Imp.Function op -> ImpM rep r op ()
emitFunction fname fun = do
Imp.Functions fs <- gets stateFunctions
modify $ \s -> s {stateFunctions = Imp.Functions $ (fname, fun) : fs}
-- | Check if a function of a given name exists.
hasFunction :: Name -> ImpM rep r op Bool
hasFunction fname = gets $ \s ->
let Imp.Functions fs = stateFunctions s
in isJust $ lookup fname fs
constsVTable :: Mem rep inner => Stms rep -> VTable rep
constsVTable = foldMap stmVtable
where
stmVtable (Let pat _ e) =
foldMap (peVtable e) $ patElems pat
peVtable e (PatElem name dec) =
M.singleton name $ memBoundToVarEntry (Just e) $ letDecMem dec
compileProg ::
(Mem rep inner, FreeIn op, MonadFreshNames m) =>
r ->
Operations rep r op ->
Imp.Space ->
Prog rep ->
m (Warnings, Imp.Definitions op)
compileProg r ops space (Prog consts funs) =
modifyNameSource $ \src ->
let (_, ss) =
unzip $ parMap rpar (compileFunDef' src) funs
free_in_funs =
freeIn $ mconcat $ map stateFunctions ss
(consts', s') =
runImpM (compileConsts free_in_funs consts) r ops space $
combineStates ss
in ( ( stateWarnings s',
Imp.Definitions consts' (stateFunctions s')
),
stateNameSource s'
)
where
compileFunDef' src fdef =
runImpM
(compileFunDef fdef)
r
ops
space
(newState src) {stateVTable = constsVTable consts}
combineStates ss =
let Imp.Functions funs' = mconcat $ map stateFunctions ss
src = mconcat (map stateNameSource ss)
in (newState src)
{ stateFunctions =
Imp.Functions $ M.toList $ M.fromList funs',
stateWarnings =
mconcat $ map stateWarnings ss
}
compileConsts :: Names -> Stms rep -> ImpM rep r op (Imp.Constants op)
compileConsts used_consts stms = do
code <- collect $ compileStms used_consts stms $ pure ()
pure $ uncurry Imp.Constants $ first DL.toList $ extract code
where
-- Fish out those top-level declarations in the constant
-- initialisation code that are free in the functions.
extract (x Imp.:>>: y) =
extract x <> extract y
extract (Imp.DeclareMem name space)
| name `nameIn` used_consts =
( DL.singleton $ Imp.MemParam name space,
mempty
)
extract (Imp.DeclareScalar name _ t)
| name `nameIn` used_consts =
( DL.singleton $ Imp.ScalarParam name t,
mempty
)
extract s =
(mempty, s)
compileInParam ::
Mem rep inner =>
FParam rep ->
ImpM rep r op (Either Imp.Param ArrayDecl)
compileInParam fparam = case paramDec fparam of
MemPrim bt ->
pure $ Left $ Imp.ScalarParam name bt
MemMem space ->
pure $ Left $ Imp.MemParam name space
MemArray bt shape _ (ArrayIn mem ixfun) ->
pure $ Right $ ArrayDecl name bt $ MemLoc mem (shapeDims shape) ixfun
MemAcc {} ->
error "Functions may not have accumulator parameters."
where
name = paramName fparam
data ArrayDecl = ArrayDecl VName PrimType MemLoc
compileInParams ::
Mem rep inner =>
[FParam rep] ->
[EntryParam] ->
ImpM rep r op ([Imp.Param], [ArrayDecl], [(Name, Imp.ExternalValue)])
compileInParams params eparams = do
let (ctx_params, val_params) =
splitAt (length params - sum (map (entryPointSize . entryParamType) eparams)) params
(inparams, arrayds) <- partitionEithers <$> mapM compileInParam (ctx_params ++ val_params)
let findArray x = find (isArrayDecl x) arrayds
summaries = M.fromList $ mapMaybe memSummary params
where
memSummary param
| MemMem space <- paramDec param =
Just (paramName param, space)
| otherwise =
Nothing
findMemInfo :: VName -> Maybe Space
findMemInfo = flip M.lookup summaries
mkValueDesc fparam signedness =
case (findArray $ paramName fparam, paramType fparam) of
(Just (ArrayDecl _ bt (MemLoc mem shape _)), _) -> do
memspace <- findMemInfo mem
Just $ Imp.ArrayValue mem memspace bt signedness shape
(_, Prim bt) ->
Just $ Imp.ScalarValue bt signedness $ paramName fparam
_ ->
Nothing
mkExts (EntryParam v (TypeOpaque u desc n) : epts) fparams =
let (fparams', rest) = splitAt n fparams
in ( v,
Imp.OpaqueValue
u
desc
(mapMaybe (`mkValueDesc` Imp.TypeDirect) fparams')
) :
mkExts epts rest
mkExts (EntryParam v (TypeUnsigned u) : epts) (fparam : fparams) =
maybeToList ((v,) . Imp.TransparentValue u <$> mkValueDesc fparam Imp.TypeUnsigned)
++ mkExts epts fparams
mkExts (EntryParam v (TypeDirect u) : epts) (fparam : fparams) =
maybeToList ((v,) . Imp.TransparentValue u <$> mkValueDesc fparam Imp.TypeDirect)
++ mkExts epts fparams
mkExts _ _ = []
pure (inparams, arrayds, mkExts eparams val_params)
where
isArrayDecl x (ArrayDecl y _ _) = x == y
compileOutParam ::
FunReturns -> ImpM rep r op (Maybe Imp.Param, ValueDestination)
compileOutParam (MemPrim t) = do
name <- newVName "prim_out"
pure (Just $ Imp.ScalarParam name t, ScalarDestination name)
compileOutParam (MemMem space) = do
name <- newVName "mem_out"
pure (Just $ Imp.MemParam name space, MemoryDestination name)
compileOutParam MemArray {} =
pure (Nothing, ArrayDestination Nothing)
compileOutParam MemAcc {} =
error "Functions may not return accumulators."
compileExternalValues ::
Mem rep inner =>
[RetType rep] ->
[EntryPointType] ->
[Maybe Imp.Param] ->
ImpM rep r op [Imp.ExternalValue]
compileExternalValues orig_rts orig_epts maybe_params = do
let (ctx_rts, val_rts) =
splitAt (length orig_rts - sum (map entryPointSize orig_epts)) orig_rts
let nthOut i = case maybeNth i maybe_params of
Just (Just p) -> Imp.paramName p
Just Nothing -> error $ "Output " ++ show i ++ " not a param."
Nothing -> error $ "Param " ++ show i ++ " does not exist."
mkValueDesc _ signedness (MemArray t shape _ ret) = do
(mem, space) <-
case ret of
ReturnsNewBlock space j _ixfun ->
pure (nthOut j, space)
ReturnsInBlock mem _ixfun -> do
space <- entryMemSpace <$> lookupMemory mem
pure (mem, space)
pure $ Imp.ArrayValue mem space t signedness $ map f $ shapeDims shape
where
f (Free v) = v
f (Ext i) = Var $ nthOut i
mkValueDesc i signedness (MemPrim bt) =
pure $ Imp.ScalarValue bt signedness $ nthOut i
mkValueDesc _ _ MemAcc {} =
error "mkValueDesc: unexpected MemAcc output."
mkValueDesc _ _ MemMem {} =
error "mkValueDesc: unexpected MemMem output."
mkExts i (TypeOpaque u desc n : epts) rets = do
let (rets', rest) = splitAt n rets
vds <- zipWithM (`mkValueDesc` Imp.TypeDirect) [i ..] rets'
(Imp.OpaqueValue u desc vds :) <$> mkExts (i + n) epts rest
mkExts i (TypeUnsigned u : epts) (ret : rets) = do
vd <- mkValueDesc i Imp.TypeUnsigned ret
(Imp.TransparentValue u vd :) <$> mkExts (i + 1) epts rets
mkExts i (TypeDirect u : epts) (ret : rets) = do
vd <- mkValueDesc i Imp.TypeDirect ret
(Imp.TransparentValue u vd :) <$> mkExts (i + 1) epts rets
mkExts _ _ _ = pure []
mkExts (length ctx_rts) orig_epts val_rts
compileOutParams ::
Mem rep inner =>
[RetType rep] ->
Maybe [EntryPointType] ->
ImpM rep r op ([Imp.ExternalValue], [Imp.Param], [ValueDestination])
compileOutParams orig_rts maybe_orig_epts = do
(maybe_params, dests) <- unzip <$> mapM compileOutParam orig_rts
evs <- case maybe_orig_epts of
Just orig_epts -> compileExternalValues orig_rts orig_epts maybe_params
Nothing -> pure []
pure (evs, catMaybes maybe_params, dests)
compileFunDef ::
Mem rep inner =>
FunDef rep ->
ImpM rep r op ()
compileFunDef (FunDef entry _ fname rettype params body) =
local (\env -> env {envFunction = name_entry `mplus` Just fname}) $ do
((outparams, inparams, results, args), body') <- collect' compile
emitFunction fname $ Imp.Function name_entry outparams inparams body' results args
where
(name_entry, params_entry, ret_entry) = case entry of
Nothing ->
( Nothing,
replicate (length params) (EntryParam "" $ TypeDirect mempty),
Nothing
)
Just (x, y, z) -> (Just x, y, Just z)
compile = do
(inparams, arrayds, args) <- compileInParams params params_entry
(results, outparams, dests) <- compileOutParams rettype ret_entry
addFParams params
addArrays arrayds
let Body _ stms ses = body
compileStms (freeIn ses) stms $
forM_ (zip dests ses) $ \(d, SubExpRes _ se) -> copyDWIMDest d [] se []
pure (outparams, inparams, results, args)
compileBody :: Pat (LetDec rep) -> Body rep -> ImpM rep r op ()
compileBody pat (Body _ stms ses) = do
dests <- destinationFromPat pat
compileStms (freeIn ses) stms $
forM_ (zip dests ses) $ \(d, SubExpRes _ se) -> copyDWIMDest d [] se []
compileBody' :: [Param dec] -> Body rep -> ImpM rep r op ()
compileBody' params (Body _ stms ses) =
compileStms (freeIn ses) stms $
forM_ (zip params ses) $ \(param, SubExpRes _ se) -> copyDWIM (paramName param) [] se []
compileLoopBody :: Typed dec => [Param dec] -> Body rep -> ImpM rep r op ()
compileLoopBody mergeparams (Body _ stms ses) = do
-- We cannot write the results to the merge parameters immediately,
-- as some of the results may actually *be* merge parameters, and
-- would thus be clobbered. Therefore, we first copy to new
-- variables mirroring the merge parameters, and then copy this
-- buffer to the merge parameters. This is efficient, because the
-- operations are all scalar operations.
tmpnames <- mapM (newVName . (++ "_tmp") . baseString . paramName) mergeparams
compileStms (freeIn ses) stms $ do
copy_to_merge_params <- forM (zip3 mergeparams tmpnames ses) $ \(p, tmp, SubExpRes _ se) ->
case typeOf p of
Prim pt -> do
emit $ Imp.DeclareScalar tmp Imp.Nonvolatile pt
emit $ Imp.SetScalar tmp $ toExp' pt se
pure $ emit $ Imp.SetScalar (paramName p) $ Imp.var tmp pt
Mem space | Var v <- se -> do
emit $ Imp.DeclareMem tmp space
emit $ Imp.SetMem tmp v space
pure $ emit $ Imp.SetMem (paramName p) tmp space
_ -> pure $ pure ()
sequence_ copy_to_merge_params
compileStms :: Names -> Stms rep -> ImpM rep r op () -> ImpM rep r op ()
compileStms alive_after_stms all_stms m = do
cb <- asks envStmsCompiler
cb alive_after_stms all_stms m
defCompileStms ::
(Mem rep inner, FreeIn op) =>
Names ->
Stms rep ->
ImpM rep r op () ->
ImpM rep r op ()
defCompileStms alive_after_stms all_stms m =
-- We keep track of any memory blocks produced by the statements,
-- and after the last time that memory block is used, we insert a
-- Free. This is very conservative, but can cut down on lifetimes
-- in some cases.
void $ compileStms' mempty $ stmsToList all_stms
where
compileStms' allocs (Let pat aux e : bs) = do
dVars (Just e) (patElems pat)
e_code <-
localAttrs (stmAuxAttrs aux) $
collect $ compileExp pat e
(live_after, bs_code) <- collect' $ compileStms' (patternAllocs pat <> allocs) bs
let dies_here v =
not (v `nameIn` live_after)
&& v `nameIn` freeIn e_code
to_free = S.filter (dies_here . fst) allocs
emit e_code
mapM_ (emit . uncurry Imp.Free) to_free
emit bs_code
pure $ freeIn e_code <> live_after
compileStms' _ [] = do
code <- collect m
emit code
pure $ freeIn code <> alive_after_stms
patternAllocs = S.fromList . mapMaybe isMemPatElem . patElems
isMemPatElem pe = case patElemType pe of
Mem space -> Just (patElemName pe, space)
_ -> Nothing
compileExp :: Pat (LetDec rep) -> Exp rep -> ImpM rep r op ()
compileExp pat e = do
ec <- asks envExpCompiler
ec pat e
defCompileExp ::
(Mem rep inner) =>
Pat (LetDec rep) ->
Exp rep ->
ImpM rep r op ()
defCompileExp pat (If cond tbranch fbranch _) =
sIf (toBoolExp cond) (compileBody pat tbranch) (compileBody pat fbranch)
defCompileExp pat (Apply fname args _ _) = do
dest <- destinationFromPat pat
targets <- funcallTargets dest
args' <- catMaybes <$> mapM compileArg args
emit $ Imp.Call targets fname args'
where
compileArg (se, _) = do
t <- subExpType se
case (se, t) of
(_, Prim pt) -> pure $ Just $ Imp.ExpArg $ toExp' pt se
(Var v, Mem {}) -> pure $ Just $ Imp.MemArg v
_ -> pure Nothing
defCompileExp pat (BasicOp op) = defCompileBasicOp pat op
defCompileExp pat (DoLoop merge form body) = do
attrs <- askAttrs
when ("unroll" `inAttrs` attrs) $
warn (noLoc :: SrcLoc) [] "#[unroll] on loop with unknown number of iterations." -- FIXME: no location.
dFParams params
forM_ merge $ \(p, se) ->
when ((== 0) $ arrayRank $ paramType p) $
copyDWIM (paramName p) [] se []
let doBody = compileLoopBody params body
case form of
ForLoop i _ bound loopvars -> do
let setLoopParam (p, a)
| Prim _ <- paramType p =
copyDWIM (paramName p) [] (Var a) [DimFix $ Imp.le64 i]
| otherwise =
pure ()
bound' <- toExp bound
dLParams $ map fst loopvars
sFor' i bound' $
mapM_ setLoopParam loopvars >> doBody
WhileLoop cond ->
sWhile (TPrimExp $ Imp.var cond Bool) doBody
pat_dests <- destinationFromPat pat
forM_ (zip pat_dests $ map (Var . paramName . fst) merge) $ \(d, r) ->
copyDWIMDest d [] r []
where
params = map fst merge
defCompileExp pat (WithAcc inputs lam) = do
dLParams $ lambdaParams lam
forM_ (zip inputs $ lambdaParams lam) $ \((_, arrs, op), p) ->
modify $ \s ->
s {stateAccs = M.insert (paramName p) (arrs, op) $ stateAccs s}
compileStms mempty (bodyStms $ lambdaBody lam) $ do
let nonacc_res = drop num_accs (bodyResult (lambdaBody lam))
nonacc_pat_names = takeLast (length nonacc_res) (patNames pat)
forM_ (zip nonacc_pat_names nonacc_res) $ \(v, SubExpRes _ se) ->
copyDWIM v [] se []
where
num_accs = length inputs
defCompileExp pat (Op op) = do
opc <- asks envOpCompiler
opc pat op
tracePrim :: String -> PrimType -> SubExp -> ImpM rep r op ()
tracePrim s t se =
emit . Imp.TracePrint $
ErrorMsg [ErrorString (s <> ": "), ErrorVal t (toExp' t se), ErrorString "\n"]
traceArray :: String -> PrimType -> Shape -> SubExp -> ImpM rep r op ()
traceArray s t shape se = do
emit . Imp.TracePrint $ ErrorMsg [ErrorString (s <> ": ")]
sLoopNest shape $ \is -> do
arr_elem <- dPrim "arr_elem" t
copyDWIMFix (tvVar arr_elem) [] se is
emit . Imp.TracePrint $ ErrorMsg [ErrorVal t (untyped (tvExp arr_elem)), " "]
emit . Imp.TracePrint $ ErrorMsg ["\n"]
defCompileBasicOp ::
Mem rep inner =>
Pat (LetDec rep) ->
BasicOp ->
ImpM rep r op ()
defCompileBasicOp (Pat [pe]) (SubExp se) =
copyDWIM (patElemName pe) [] se []
defCompileBasicOp (Pat [pe]) (Opaque op se) = do
copyDWIM (patElemName pe) [] se []
case op of
OpaqueNil -> pure ()
OpaqueTrace s -> comment ("Trace: " <> s) $ do
se_t <- subExpType se
case se_t of
Prim t -> tracePrim s t se
Array t shape _ -> traceArray s t shape se
_ ->
warn [mempty :: SrcLoc] mempty $
s ++ ": cannot trace value of this (core) type: " <> pretty se_t
defCompileBasicOp (Pat [pe]) (UnOp op e) = do
e' <- toExp e
patElemName pe <~~ Imp.UnOpExp op e'
defCompileBasicOp (Pat [pe]) (ConvOp conv e) = do
e' <- toExp e
patElemName pe <~~ Imp.ConvOpExp conv e'
defCompileBasicOp (Pat [pe]) (BinOp bop x y) = do
x' <- toExp x
y' <- toExp y
patElemName pe <~~ Imp.BinOpExp bop x' y'
defCompileBasicOp (Pat [pe]) (CmpOp bop x y) = do
x' <- toExp x
y' <- toExp y
patElemName pe <~~ Imp.CmpOpExp bop x' y'
defCompileBasicOp _ (Assert e msg loc) = do
e' <- toExp e
msg' <- traverse toExp msg
emit $ Imp.Assert e' msg' loc
attrs <- askAttrs
when (AttrComp "warn" ["safety_checks"] `inAttrs` attrs) $
uncurry warn loc "Safety check required at run-time."
defCompileBasicOp (Pat [pe]) (Index src slice)
| Just idxs <- sliceIndices slice =
copyDWIM (patElemName pe) [] (Var src) $ map (DimFix . toInt64Exp) idxs
defCompileBasicOp _ Index {} =
pure ()
defCompileBasicOp (Pat [pe]) (Update safety _ slice se) =
case safety of
Unsafe -> write
Safe -> sWhen (inBounds slice' dims) write
where
slice' = fmap toInt64Exp slice
dims = map toInt64Exp $ arrayDims $ patElemType pe
write = sUpdate (patElemName pe) slice' se
defCompileBasicOp _ FlatIndex {} =
pure ()
defCompileBasicOp (Pat [pe]) (FlatUpdate _ slice v) = do
pe_loc <- entryArrayLoc <$> lookupArray (patElemName pe)
v_loc <- entryArrayLoc <$> lookupArray v
copy (elemType (patElemType pe)) (flatSliceMemLoc pe_loc slice') v_loc
where
slice' = fmap toInt64Exp slice
defCompileBasicOp (Pat [pe]) (Replicate (Shape ds) se)
| Acc {} <- patElemType pe = pure ()
| otherwise = do
ds' <- mapM toExp ds
is <- replicateM (length ds) (newVName "i")
copy_elem <- collect $ copyDWIM (patElemName pe) (map (DimFix . Imp.le64) is) se []
emit $ foldl (.) id (zipWith Imp.For is ds') copy_elem
defCompileBasicOp _ Scratch {} =
pure ()
defCompileBasicOp (Pat [pe]) (Iota n e s it) = do
e' <- toExp e
s' <- toExp s
sFor "i" (toInt64Exp n) $ \i -> do
let i' = sExt it $ untyped i
x <-
dPrimV "x" . TPrimExp $
BinOpExp (Add it OverflowUndef) e' $
BinOpExp (Mul it OverflowUndef) i' s'
copyDWIM (patElemName pe) [DimFix i] (Var (tvVar x)) []
defCompileBasicOp (Pat [pe]) (Copy src) =
copyDWIM (patElemName pe) [] (Var src) []
defCompileBasicOp (Pat [pe]) (Manifest _ src) =
copyDWIM (patElemName pe) [] (Var src) []
defCompileBasicOp (Pat [pe]) (Concat i (x :| ys) _) = do
offs_glb <- dPrimV "tmp_offs" 0
forM_ (x : ys) $ \y -> do
y_dims <- arrayDims <$> lookupType y
let rows = case drop i y_dims of
[] -> error $ "defCompileBasicOp Concat: empty array shape for " ++ pretty y
r : _ -> toInt64Exp r
skip_dims = take i y_dims
sliceAllDim d = DimSlice 0 d 1
skip_slices = map (sliceAllDim . toInt64Exp) skip_dims
destslice = skip_slices ++ [DimSlice (tvExp offs_glb) rows 1]
copyDWIM (patElemName pe) destslice (Var y) []
offs_glb <-- tvExp offs_glb + rows
defCompileBasicOp (Pat [pe]) (ArrayLit es _)
| Just vs@(v : _) <- mapM isLiteral es = do
dest_mem <- entryArrayLoc <$> lookupArray (patElemName pe)
dest_space <- entryMemSpace <$> lookupMemory (memLocName dest_mem)
let t = primValueType v
static_array <- newVNameForFun "static_array"
emit $ Imp.DeclareArray static_array dest_space t $ Imp.ArrayValues vs
let static_src =
MemLoc static_array [intConst Int64 $ fromIntegral $ length es] $
IxFun.iota [fromIntegral $ length es]
entry = MemVar Nothing $ MemEntry dest_space
addVar static_array entry
copy t dest_mem static_src
| otherwise =
forM_ (zip [0 ..] es) $ \(i, e) ->
copyDWIM (patElemName pe) [DimFix $ fromInteger i] e []
where
isLiteral (Constant v) = Just v
isLiteral _ = Nothing
defCompileBasicOp _ Rearrange {} =
pure ()
defCompileBasicOp _ Rotate {} =
pure ()
defCompileBasicOp _ Reshape {} =
pure ()
defCompileBasicOp _ (UpdateAcc acc is vs) = sComment "UpdateAcc" $ do
-- We are abusing the comment mechanism to wrap the operator in
-- braces when we end up generating code. This is necessary because
-- we might otherwise end up declaring lambda parameters (if any)
-- multiple times, as they are duplicated every time we do an
-- UpdateAcc for the same accumulator.
let is' = map toInt64Exp is
-- We need to figure out whether we are updating a scatter-like
-- accumulator or a generalised reduction. This also binds the
-- index parameters.
(_, _, arrs, dims, op) <- lookupAcc acc is'
sWhen (inBounds (Slice (map DimFix is')) dims) $
case op of
Nothing ->
-- Scatter-like.
forM_ (zip arrs vs) $ \(arr, v) -> copyDWIMFix arr is' v []
Just lam -> do
-- Generalised reduction.
dLParams $ lambdaParams lam
let (x_params, y_params) =
splitAt (length vs) $ map paramName $ lambdaParams lam
forM_ (zip x_params arrs) $ \(xp, arr) ->
copyDWIMFix xp [] (Var arr) is'
forM_ (zip y_params vs) $ \(yp, v) ->
copyDWIM yp [] v []
compileStms mempty (bodyStms $ lambdaBody lam) $
forM_ (zip arrs (bodyResult (lambdaBody lam))) $ \(arr, SubExpRes _ se) ->
copyDWIMFix arr is' se []
defCompileBasicOp pat e =
error $
"ImpGen.defCompileBasicOp: Invalid pattern\n "
++ pretty pat
++ "\nfor expression\n "
++ pretty e
-- | Note: a hack to be used only for functions.
addArrays :: [ArrayDecl] -> ImpM rep r op ()
addArrays = mapM_ addArray
where
addArray (ArrayDecl name bt location) =
addVar name $
ArrayVar
Nothing
ArrayEntry
{ entryArrayLoc = location,
entryArrayElemType = bt
}
-- | Like 'dFParams', but does not create new declarations.
-- Note: a hack to be used only for functions.
addFParams :: Mem rep inner => [FParam rep] -> ImpM rep r op ()
addFParams = mapM_ addFParam
where
addFParam fparam =
addVar (paramName fparam) $
memBoundToVarEntry Nothing $ noUniquenessReturns $ paramDec fparam
-- | Another hack.
addLoopVar :: VName -> IntType -> ImpM rep r op ()
addLoopVar i it = addVar i $ ScalarVar Nothing $ ScalarEntry $ IntType it
dVars ::
Mem rep inner =>
Maybe (Exp rep) ->
[PatElem (LetDec rep)] ->
ImpM rep r op ()
dVars e = mapM_ dVar
where
dVar = dScope e . scopeOfPatElem
dFParams :: Mem rep inner => [FParam rep] -> ImpM rep r op ()
dFParams = dScope Nothing . scopeOfFParams
dLParams :: Mem rep inner => [LParam rep] -> ImpM rep r op ()
dLParams = dScope Nothing . scopeOfLParams
dPrimVol :: String -> PrimType -> Imp.TExp t -> ImpM rep r op (TV t)
dPrimVol name t e = do
name' <- newVName name
emit $ Imp.DeclareScalar name' Imp.Volatile t
addVar name' $ ScalarVar Nothing $ ScalarEntry t
name' <~~ untyped e
pure $ TV name' t
dPrim_ :: VName -> PrimType -> ImpM rep r op ()
dPrim_ name t = do
emit $ Imp.DeclareScalar name Imp.Nonvolatile t
addVar name $ ScalarVar Nothing $ ScalarEntry t
-- | The return type is polymorphic, so there is no guarantee it
-- actually matches the 'PrimType', but at least we have to use it
-- consistently.
dPrim :: String -> PrimType -> ImpM rep r op (TV t)
dPrim name t = do
name' <- newVName name
dPrim_ name' t
pure $ TV name' t
dPrimV_ :: VName -> Imp.TExp t -> ImpM rep r op ()
dPrimV_ name e = do
dPrim_ name t
TV name t <-- e
where
t = primExpType $ untyped e
dPrimV :: String -> Imp.TExp t -> ImpM rep r op (TV t)
dPrimV name e = do
name' <- dPrim name $ primExpType $ untyped e
name' <-- e
pure name'
dPrimVE :: String -> Imp.TExp t -> ImpM rep r op (Imp.TExp t)
dPrimVE name e = do
name' <- dPrim name $ primExpType $ untyped e
name' <-- e
pure $ tvExp name'
memBoundToVarEntry ::
Maybe (Exp rep) ->
MemBound NoUniqueness ->
VarEntry rep
memBoundToVarEntry e (MemPrim bt) =
ScalarVar e ScalarEntry {entryScalarType = bt}
memBoundToVarEntry e (MemMem space) =
MemVar e $ MemEntry space
memBoundToVarEntry e (MemAcc acc ispace ts _) =
AccVar e (acc, ispace, ts)
memBoundToVarEntry e (MemArray bt shape _ (ArrayIn mem ixfun)) =
let location = MemLoc mem (shapeDims shape) ixfun
in ArrayVar
e
ArrayEntry
{ entryArrayLoc = location,
entryArrayElemType = bt
}
infoDec ::
Mem rep inner =>
NameInfo rep ->
MemInfo SubExp NoUniqueness MemBind
infoDec (LetName dec) = letDecMem dec
infoDec (FParamName dec) = noUniquenessReturns dec
infoDec (LParamName dec) = dec
infoDec (IndexName it) = MemPrim $ IntType it
dInfo ::
Mem rep inner =>
Maybe (Exp rep) ->
VName ->
NameInfo rep ->
ImpM rep r op ()
dInfo e name info = do
let entry = memBoundToVarEntry e $ infoDec info
case entry of
MemVar _ entry' ->
emit $ Imp.DeclareMem name $ entryMemSpace entry'
ScalarVar _ entry' ->
emit $ Imp.DeclareScalar name Imp.Nonvolatile $ entryScalarType entry'
ArrayVar _ _ ->
pure ()
AccVar {} ->
pure ()
addVar name entry
dScope ::
Mem rep inner =>
Maybe (Exp rep) ->
Scope rep ->
ImpM rep r op ()
dScope e = mapM_ (uncurry $ dInfo e) . M.toList
dArray :: VName -> PrimType -> ShapeBase SubExp -> VName -> IxFun -> ImpM rep r op ()
dArray name pt shape mem ixfun =
addVar name $ ArrayVar Nothing $ ArrayEntry location pt
where
location =
MemLoc mem (shapeDims shape) ixfun
everythingVolatile :: ImpM rep r op a -> ImpM rep r op a
everythingVolatile = local $ \env -> env {envVolatility = Imp.Volatile}
-- | Remove the array targets.
funcallTargets :: [ValueDestination] -> ImpM rep r op [VName]
funcallTargets dests =
concat <$> mapM funcallTarget dests
where
funcallTarget (ScalarDestination name) =
pure [name]
funcallTarget (ArrayDestination _) =
pure []
funcallTarget (MemoryDestination name) =
pure [name]
-- | A typed variable, which we can turn into a typed expression, or
-- use as the target for an assignment. This is used to aid in type
-- safety when doing code generation, by keeping the types straight.
-- It is still easy to cheat when you need to.
data TV t = TV VName PrimType
-- | Create a typed variable from a name and a dynamic type. Note
-- that there is no guarantee that the dynamic type corresponds to the
-- inferred static type, but the latter will at least have to be used
-- consistently.
mkTV :: VName -> PrimType -> TV t
mkTV = TV
-- | Convert a typed variable to a size (a SubExp).
tvSize :: TV t -> Imp.DimSize
tvSize = Var . tvVar
-- | Convert a typed variable to a similarly typed expression.
tvExp :: TV t -> Imp.TExp t
tvExp (TV v t) = Imp.TPrimExp $ Imp.var v t
-- | Extract the underlying variable name from a typed variable.
tvVar :: TV t -> VName
tvVar (TV v _) = v
-- | Compile things to 'Imp.Exp'.
class ToExp a where
-- | Compile to an 'Imp.Exp', where the type (must must still be a
-- primitive) is deduced monadically.
toExp :: a -> ImpM rep r op Imp.Exp
-- | Compile where we know the type in advance.
toExp' :: PrimType -> a -> Imp.Exp
toInt64Exp :: a -> Imp.TExp Int64
toInt64Exp = TPrimExp . toExp' int64
toBoolExp :: a -> Imp.TExp Bool
toBoolExp = TPrimExp . toExp' Bool
instance ToExp SubExp where
toExp (Constant v) =
pure $ Imp.ValueExp v
toExp (Var v) =
lookupVar v >>= \case
ScalarVar _ (ScalarEntry pt) ->
pure $ Imp.var v pt
_ -> error $ "toExp SubExp: SubExp is not a primitive type: " ++ pretty v
toExp' _ (Constant v) = Imp.ValueExp v
toExp' t (Var v) = Imp.var v t
instance ToExp (PrimExp VName) where
toExp = pure
toExp' _ = id
addVar :: VName -> VarEntry rep -> ImpM rep r op ()
addVar name entry =
modify $ \s -> s {stateVTable = M.insert name entry $ stateVTable s}
localDefaultSpace :: Imp.Space -> ImpM rep r op a -> ImpM rep r op a
localDefaultSpace space = local (\env -> env {envDefaultSpace = space})
askFunction :: ImpM rep r op (Maybe Name)
askFunction = asks envFunction
-- | Generate a 'VName', prefixed with 'askFunction' if it exists.
newVNameForFun :: String -> ImpM rep r op VName
newVNameForFun s = do
fname <- fmap nameToString <$> askFunction
newVName $ maybe "" (++ ".") fname ++ s
-- | Generate a 'Name', prefixed with 'askFunction' if it exists.
nameForFun :: String -> ImpM rep r op Name
nameForFun s = do
fname <- askFunction
pure $ maybe "" (<> ".") fname <> nameFromString s
askEnv :: ImpM rep r op r
askEnv = asks envEnv
localEnv :: (r -> r) -> ImpM rep r op a -> ImpM rep r op a
localEnv f = local $ \env -> env {envEnv = f $ envEnv env}
-- | The active attributes, including those for the statement
-- currently being compiled.
askAttrs :: ImpM rep r op Attrs
askAttrs = asks envAttrs
-- | Add more attributes to what is returning by 'askAttrs'.
localAttrs :: Attrs -> ImpM rep r op a -> ImpM rep r op a
localAttrs attrs = local $ \env -> env {envAttrs = attrs <> envAttrs env}
localOps :: Operations rep r op -> ImpM rep r op a -> ImpM rep r op a
localOps ops = local $ \env ->
env
{ envExpCompiler = opsExpCompiler ops,
envStmsCompiler = opsStmsCompiler ops,
envCopyCompiler = opsCopyCompiler ops,
envOpCompiler = opsOpCompiler ops,
envAllocCompilers = opsAllocCompilers ops
}
-- | Get the current symbol table.
getVTable :: ImpM rep r op (VTable rep)
getVTable = gets stateVTable
putVTable :: VTable rep -> ImpM rep r op ()
putVTable vtable = modify $ \s -> s {stateVTable = vtable}
-- | Run an action with a modified symbol table. All changes to the
-- symbol table will be reverted once the action is done!
localVTable :: (VTable rep -> VTable rep) -> ImpM rep r op a -> ImpM rep r op a
localVTable f m = do
old_vtable <- getVTable
putVTable $ f old_vtable
a <- m
putVTable old_vtable
pure a
lookupVar :: VName -> ImpM rep r op (VarEntry rep)
lookupVar name = do
res <- gets $ M.lookup name . stateVTable
case res of
Just entry -> pure entry
_ -> error $ "Unknown variable: " ++ pretty name
lookupArray :: VName -> ImpM rep r op ArrayEntry
lookupArray name = do
res <- lookupVar name
case res of
ArrayVar _ entry -> pure entry
_ -> error $ "ImpGen.lookupArray: not an array: " ++ pretty name
lookupMemory :: VName -> ImpM rep r op MemEntry
lookupMemory name = do
res <- lookupVar name
case res of
MemVar _ entry -> pure entry
_ -> error $ "Unknown memory block: " ++ pretty name
lookupArraySpace :: VName -> ImpM rep r op Space
lookupArraySpace =
fmap entryMemSpace . lookupMemory
<=< fmap (memLocName . entryArrayLoc) . lookupArray
-- | In the case of a histogram-like accumulator, also sets the index
-- parameters.
lookupAcc ::
VName ->
[Imp.TExp Int64] ->
ImpM rep r op (VName, Space, [VName], [Imp.TExp Int64], Maybe (Lambda rep))
lookupAcc name is = do
res <- lookupVar name
case res of
AccVar _ (acc, ispace, _) -> do
acc' <- gets $ M.lookup acc . stateAccs
case acc' of
Just ([], _) ->
error $ "Accumulator with no arrays: " ++ pretty name
Just (arrs@(arr : _), Just (op, _)) -> do
space <- lookupArraySpace arr
let (i_params, ps) = splitAt (length is) $ lambdaParams op
zipWithM_ dPrimV_ (map paramName i_params) is
return
( acc,
space,
arrs,
map toInt64Exp (shapeDims ispace),
Just op {lambdaParams = ps}
)
Just (arrs@(arr : _), Nothing) -> do
space <- lookupArraySpace arr
pure (acc, space, arrs, map toInt64Exp (shapeDims ispace), Nothing)
Nothing ->
error $ "ImpGen.lookupAcc: unlisted accumulator: " ++ pretty name
_ -> error $ "ImpGen.lookupAcc: not an accumulator: " ++ pretty name
destinationFromPat :: Pat (LetDec rep) -> ImpM rep r op [ValueDestination]
destinationFromPat = mapM inspect . patElems
where
inspect pe = do
let name = patElemName pe
entry <- lookupVar name
case entry of
ArrayVar _ (ArrayEntry MemLoc {} _) ->
pure $ ArrayDestination Nothing
MemVar {} ->
pure $ MemoryDestination name
ScalarVar {} ->
pure $ ScalarDestination name
AccVar {} ->
pure $ ArrayDestination Nothing
fullyIndexArray ::
VName ->
[Imp.TExp Int64] ->
ImpM rep r op (VName, Imp.Space, Count Elements (Imp.TExp Int64))
fullyIndexArray name indices = do
arr <- lookupArray name
fullyIndexArray' (entryArrayLoc arr) indices
fullyIndexArray' ::
MemLoc ->
[Imp.TExp Int64] ->
ImpM rep r op (VName, Imp.Space, Count Elements (Imp.TExp Int64))
fullyIndexArray' (MemLoc mem _ ixfun) indices = do
space <- entryMemSpace <$> lookupMemory mem
return
( mem,
space,
elements $ IxFun.index ixfun indices
)
-- More complicated read/write operations that use index functions.
copy :: CopyCompiler rep r op
copy
bt
dst@(MemLoc dst_name _ dst_ixfn@(IxFun.IxFun dst_lmads@(dst_lmad :| _) _ _))
src@(MemLoc src_name _ src_ixfn@(IxFun.IxFun src_lmads@(src_lmad :| _) _ _)) = do
-- If we can statically determine that the two index-functions
-- are equivalent, don't do anything
unless (dst_name == src_name && dst_ixfn `IxFun.equivalent` src_ixfn) $
-- It's also possible that we can dynamically determine that the two
-- index-functions are equivalent.
sUnless
( fromBool (dst_name == src_name && length dst_lmads == 1 && length src_lmads == 1)
.&&. IxFun.dynamicEqualsLMAD dst_lmad src_lmad
)
$ do
-- If none of the above is true, actually do the copy
cc <- asks envCopyCompiler
cc bt dst src
-- | Is this copy really a mapping with transpose?
isMapTransposeCopy ::
PrimType ->
MemLoc ->
MemLoc ->
Maybe
( Imp.TExp Int64,
Imp.TExp Int64,
Imp.TExp Int64,
Imp.TExp Int64,
Imp.TExp Int64
)
isMapTransposeCopy bt (MemLoc _ _ destIxFun) (MemLoc _ _ srcIxFun)
| Just (dest_offset, perm_and_destshape) <- IxFun.rearrangeWithOffset destIxFun bt_size,
(perm, destshape) <- unzip perm_and_destshape,
Just src_offset <- IxFun.linearWithOffset srcIxFun bt_size,
Just (r1, r2, _) <- isMapTranspose perm =
isOk destshape swap r1 r2 dest_offset src_offset
| Just dest_offset <- IxFun.linearWithOffset destIxFun bt_size,
Just (src_offset, perm_and_srcshape) <- IxFun.rearrangeWithOffset srcIxFun bt_size,
(perm, srcshape) <- unzip perm_and_srcshape,
Just (r1, r2, _) <- isMapTranspose perm =
isOk srcshape id r1 r2 dest_offset src_offset
| otherwise =
Nothing
where
bt_size = primByteSize bt
swap (x, y) = (y, x)
isOk shape f r1 r2 dest_offset src_offset = do
let (num_arrays, size_x, size_y) = getSizes shape f r1 r2
return
( dest_offset,
src_offset,
num_arrays,
size_x,
size_y
)
getSizes shape f r1 r2 =
let (mapped, notmapped) = splitAt r1 shape
(pretrans, posttrans) = f $ splitAt r2 notmapped
in (product mapped, product pretrans, product posttrans)
mapTransposeName :: PrimType -> String
mapTransposeName bt = "map_transpose_" ++ pretty bt
mapTransposeForType :: PrimType -> ImpM rep r op Name
mapTransposeForType bt = do
let fname = nameFromString $ "builtin#" <> mapTransposeName bt
exists <- hasFunction fname
unless exists $ emitFunction fname $ mapTransposeFunction fname bt
pure fname
-- | Use an 'Imp.Copy' if possible, otherwise 'copyElementWise'.
defaultCopy :: CopyCompiler rep r op
defaultCopy pt dest src
| Just (destoffset, srcoffset, num_arrays, size_x, size_y) <-
isMapTransposeCopy pt dest src = do
fname <- mapTransposeForType pt
emit $
Imp.Call
[]
fname
$ transposeArgs
pt
destmem
(bytes destoffset)
srcmem
(bytes srcoffset)
num_arrays
size_x
size_y
| Just destoffset <-
IxFun.linearWithOffset dest_ixfun pt_size,
Just srcoffset <-
IxFun.linearWithOffset src_ixfun pt_size = do
srcspace <- entryMemSpace <$> lookupMemory srcmem
destspace <- entryMemSpace <$> lookupMemory destmem
if isScalarSpace srcspace || isScalarSpace destspace
then copyElementWise pt dest src
else
emit $
Imp.Copy
destmem
(bytes destoffset)
destspace
srcmem
(bytes srcoffset)
srcspace
$ num_elems `withElemType` pt
| otherwise =
copyElementWise pt dest src
where
pt_size = primByteSize pt
num_elems = Imp.elements $ product $ IxFun.shape $ memLocIxFun src
MemLoc destmem _ dest_ixfun = dest
MemLoc srcmem _ src_ixfun = src
isScalarSpace ScalarSpace {} = True
isScalarSpace _ = False
copyElementWise :: CopyCompiler rep r op
copyElementWise bt dest src = do
let bounds = IxFun.shape $ memLocIxFun src
is <- replicateM (length bounds) (newVName "i")
let ivars = map Imp.le64 is
(destmem, destspace, destidx) <- fullyIndexArray' dest ivars
(srcmem, srcspace, srcidx) <- fullyIndexArray' src ivars
vol <- asks envVolatility
tmp <- newVName "tmp"
emit $
foldl (.) id (zipWith Imp.For is $ map untyped bounds) $
mconcat
[ Imp.DeclareScalar tmp vol bt,
Imp.Read tmp srcmem srcidx bt srcspace vol,
Imp.Write destmem destidx bt destspace vol $ Imp.var tmp bt
]
-- | Copy from here to there; both destination and source may be
-- indexeded.
copyArrayDWIM ::
PrimType ->
MemLoc ->
[DimIndex (Imp.TExp Int64)] ->
MemLoc ->
[DimIndex (Imp.TExp Int64)] ->
ImpM rep r op (Imp.Code op)
copyArrayDWIM
bt
destlocation@(MemLoc _ destshape _)
destslice
srclocation@(MemLoc _ srcshape _)
srcslice
| Just destis <- mapM dimFix destslice,
Just srcis <- mapM dimFix srcslice,
length srcis == length srcshape,
length destis == length destshape = do
(targetmem, destspace, targetoffset) <-
fullyIndexArray' destlocation destis
(srcmem, srcspace, srcoffset) <-
fullyIndexArray' srclocation srcis
vol <- asks envVolatility
collect $ do
tmp <- tvVar <$> dPrim "tmp" bt
emit $ Imp.Read tmp srcmem srcoffset bt srcspace vol
emit $ Imp.Write targetmem targetoffset bt destspace vol $ Imp.var tmp bt
| otherwise = do
let destslice' = fullSliceNum (map toInt64Exp destshape) destslice
srcslice' = fullSliceNum (map toInt64Exp srcshape) srcslice
destrank = length $ sliceDims destslice'
srcrank = length $ sliceDims srcslice'
destlocation' = sliceMemLoc destlocation destslice'
srclocation' = sliceMemLoc srclocation srcslice'
if destrank /= srcrank
then
error $
"copyArrayDWIM: cannot copy to "
++ pretty (memLocName destlocation)
++ " from "
++ pretty (memLocName srclocation)
++ " because ranks do not match ("
++ pretty destrank
++ " vs "
++ pretty srcrank
++ ")"
else
if destlocation' == srclocation'
then pure mempty -- Copy would be no-op.
else collect $ copy bt destlocation' srclocation'
-- | Like 'copyDWIM', but the target is a 'ValueDestination'
-- instead of a variable name.
copyDWIMDest ::
ValueDestination ->
[DimIndex (Imp.TExp Int64)] ->
SubExp ->
[DimIndex (Imp.TExp Int64)] ->
ImpM rep r op ()
copyDWIMDest _ _ (Constant v) (_ : _) =
error $
unwords ["copyDWIMDest: constant source", pretty v, "cannot be indexed."]
copyDWIMDest pat dest_slice (Constant v) [] =
case mapM dimFix dest_slice of
Nothing ->
error $
unwords ["copyDWIMDest: constant source", pretty v, "with slice destination."]
Just dest_is ->
case pat of
ScalarDestination name ->
emit $ Imp.SetScalar name $ Imp.ValueExp v
MemoryDestination {} ->
error $
unwords ["copyDWIMDest: constant source", pretty v, "cannot be written to memory destination."]
ArrayDestination (Just dest_loc) -> do
(dest_mem, dest_space, dest_i) <-
fullyIndexArray' dest_loc dest_is
vol <- asks envVolatility
emit $ Imp.Write dest_mem dest_i bt dest_space vol $ Imp.ValueExp v
ArrayDestination Nothing ->
error "copyDWIMDest: ArrayDestination Nothing"
where
bt = primValueType v
copyDWIMDest dest dest_slice (Var src) src_slice = do
src_entry <- lookupVar src
case (dest, src_entry) of
(MemoryDestination mem, MemVar _ (MemEntry space)) ->
emit $ Imp.SetMem mem src space
(MemoryDestination {}, _) ->
error $
unwords ["copyDWIMDest: cannot write", pretty src, "to memory destination."]
(_, MemVar {}) ->
error $
unwords ["copyDWIMDest: source", pretty src, "is a memory block."]
(_, ScalarVar _ (ScalarEntry _))
| not $ null src_slice ->
error $
unwords ["copyDWIMDest: prim-typed source", pretty src, "with slice", pretty src_slice]
(ScalarDestination name, _)
| not $ null dest_slice ->
error $
unwords ["copyDWIMDest: prim-typed target", pretty name, "with slice", pretty dest_slice]
(ScalarDestination name, ScalarVar _ (ScalarEntry pt)) ->
emit $ Imp.SetScalar name $ Imp.var src pt
(ScalarDestination name, ArrayVar _ arr)
| Just src_is <- mapM dimFix src_slice,
length src_slice == length (entryArrayShape arr) -> do
let bt = entryArrayElemType arr
(mem, space, i) <-
fullyIndexArray' (entryArrayLoc arr) src_is
vol <- asks envVolatility
emit $ Imp.Read name mem i bt space vol
| otherwise ->
error $
unwords
[ "copyDWIMDest: prim-typed target",
pretty name,
"and array-typed source",
pretty src,
"of shape",
pretty (entryArrayShape arr),
"sliced with",
pretty src_slice
]
(ArrayDestination (Just dest_loc), ArrayVar _ src_arr) -> do
let src_loc = entryArrayLoc src_arr
bt = entryArrayElemType src_arr
emit =<< copyArrayDWIM bt dest_loc dest_slice src_loc src_slice
(ArrayDestination (Just dest_loc), ScalarVar _ (ScalarEntry bt))
| Just dest_is <- mapM dimFix dest_slice,
length dest_is == length (memLocShape dest_loc) -> do
(dest_mem, dest_space, dest_i) <- fullyIndexArray' dest_loc dest_is
vol <- asks envVolatility
emit $ Imp.Write dest_mem dest_i bt dest_space vol (Imp.var src bt)
| otherwise ->
error $
unwords
[ "copyDWIMDest: array-typed target and prim-typed source",
pretty src,
"with slice",
pretty dest_slice
]
(ArrayDestination Nothing, _) ->
pure () -- Nothing to do; something else set some memory
-- somewhere.
(_, AccVar {}) ->
pure () -- Nothing to do; accumulators are phantoms.
-- | Copy from here to there; both destination and source be
-- indexeded. If so, they better be arrays of enough dimensions.
-- This function will generally just Do What I Mean, and Do The Right
-- Thing. Both destination and source must be in scope.
copyDWIM ::
VName ->
[DimIndex (Imp.TExp Int64)] ->
SubExp ->
[DimIndex (Imp.TExp Int64)] ->
ImpM rep r op ()
copyDWIM dest dest_slice src src_slice = do
dest_entry <- lookupVar dest
let dest_target =
case dest_entry of
ScalarVar _ _ ->
ScalarDestination dest
ArrayVar _ (ArrayEntry (MemLoc mem shape ixfun) _) ->
ArrayDestination $ Just $ MemLoc mem shape ixfun
MemVar _ _ ->
MemoryDestination dest
AccVar {} ->
-- Does not matter; accumulators are phantoms.
ArrayDestination Nothing
copyDWIMDest dest_target dest_slice src src_slice
-- | As 'copyDWIM', but implicitly 'DimFix'es the indexes.
copyDWIMFix ::
VName ->
[Imp.TExp Int64] ->
SubExp ->
[Imp.TExp Int64] ->
ImpM rep r op ()
copyDWIMFix dest dest_is src src_is =
copyDWIM dest (map DimFix dest_is) src (map DimFix src_is)
-- | @compileAlloc pat size space@ allocates @n@ bytes of memory in
-- @space@, writing the result to @pat@, which must contain a single
-- memory-typed element.
compileAlloc ::
Mem rep inner => Pat (LetDec rep) -> SubExp -> Space -> ImpM rep r op ()
compileAlloc (Pat [mem]) e space = do
let e' = Imp.bytes $ toInt64Exp e
allocator <- asks $ M.lookup space . envAllocCompilers
case allocator of
Nothing -> emit $ Imp.Allocate (patElemName mem) e' space
Just allocator' -> allocator' (patElemName mem) e'
compileAlloc pat _ _ =
error $ "compileAlloc: Invalid pattern: " ++ pretty pat
-- | The number of bytes needed to represent the array in a
-- straightforward contiguous format, as an t'Int64' expression.
typeSize :: Type -> Count Bytes (Imp.TExp Int64)
typeSize t =
Imp.bytes $ primByteSize (elemType t) * product (map toInt64Exp (arrayDims t))
-- | Is this indexing in-bounds for an array of the given shape? This
-- is useful for things like scatter, which ignores out-of-bounds
-- writes.
inBounds :: Slice (Imp.TExp Int64) -> [Imp.TExp Int64] -> Imp.TExp Bool
inBounds (Slice slice) dims =
let condInBounds (DimFix i) d =
0 .<=. i .&&. i .<. d
condInBounds (DimSlice i n s) d =
0 .<=. i .&&. i + (n - 1) * s .<. d
in foldl1 (.&&.) $ zipWith condInBounds slice dims
--- Building blocks for constructing code.
sFor' :: VName -> Imp.Exp -> ImpM rep r op () -> ImpM rep r op ()
sFor' i bound body = do
let it = case primExpType bound of
IntType bound_t -> bound_t
t -> error $ "sFor': bound " ++ pretty bound ++ " is of type " ++ pretty t
addLoopVar i it
body' <- collect body
emit $ Imp.For i bound body'
sFor :: String -> Imp.TExp t -> (Imp.TExp t -> ImpM rep r op ()) -> ImpM rep r op ()
sFor i bound body = do
i' <- newVName i
sFor' i' (untyped bound) $
body $ TPrimExp $ Imp.var i' $ primExpType $ untyped bound
sWhile :: Imp.TExp Bool -> ImpM rep r op () -> ImpM rep r op ()
sWhile cond body = do
body' <- collect body
emit $ Imp.While cond body'
sComment :: String -> ImpM rep r op () -> ImpM rep r op ()
sComment s code = do
code' <- collect code
emit $ Imp.Comment s code'
sIf :: Imp.TExp Bool -> ImpM rep r op () -> ImpM rep r op () -> ImpM rep r op ()
sIf cond tbranch fbranch = do
tbranch' <- collect tbranch
fbranch' <- collect fbranch
-- Avoid generating branch if the condition is known statically.
emit $
if cond == true
then tbranch'
else
if cond == false
then fbranch'
else Imp.If cond tbranch' fbranch'
sWhen :: Imp.TExp Bool -> ImpM rep r op () -> ImpM rep r op ()
sWhen cond tbranch = sIf cond tbranch (return ())
sUnless :: Imp.TExp Bool -> ImpM rep r op () -> ImpM rep r op ()
sUnless cond = sIf cond (return ())
sOp :: op -> ImpM rep r op ()
sOp = emit . Imp.Op
sDeclareMem :: String -> Space -> ImpM rep r op VName
sDeclareMem name space = do
name' <- newVName name
emit $ Imp.DeclareMem name' space
addVar name' $ MemVar Nothing $ MemEntry space
pure name'
sAlloc_ :: VName -> Count Bytes (Imp.TExp Int64) -> Space -> ImpM rep r op ()
sAlloc_ name' size' space = do
allocator <- asks $ M.lookup space . envAllocCompilers
case allocator of
Nothing -> emit $ Imp.Allocate name' size' space
Just allocator' -> allocator' name' size'
sAlloc :: String -> Count Bytes (Imp.TExp Int64) -> Space -> ImpM rep r op VName
sAlloc name size space = do
name' <- sDeclareMem name space
sAlloc_ name' size space
pure name'
sArray :: String -> PrimType -> ShapeBase SubExp -> VName -> IxFun -> ImpM rep r op VName
sArray name bt shape mem ixfun = do
name' <- newVName name
dArray name' bt shape mem ixfun
pure name'
-- | Declare an array in row-major order in the given memory block.
sArrayInMem :: String -> PrimType -> ShapeBase SubExp -> VName -> ImpM rep r op VName
sArrayInMem name pt shape mem =
sArray name pt shape mem $
IxFun.iota $ map (isInt64 . primExpFromSubExp int64) $ shapeDims shape
-- | Like 'sAllocArray', but permute the in-memory representation of the indices as specified.
sAllocArrayPerm :: String -> PrimType -> ShapeBase SubExp -> Space -> [Int] -> ImpM rep r op VName
sAllocArrayPerm name pt shape space perm = do
let permuted_dims = rearrangeShape perm $ shapeDims shape
mem <- sAlloc (name ++ "_mem") (typeSize (Array pt shape NoUniqueness)) space
let iota_ixfun = IxFun.iota $ map (isInt64 . primExpFromSubExp int64) permuted_dims
sArray name pt shape mem $
IxFun.permute iota_ixfun $ rearrangeInverse perm
-- | Uses linear/iota index function.
sAllocArray :: String -> PrimType -> ShapeBase SubExp -> Space -> ImpM rep r op VName
sAllocArray name pt shape space =
sAllocArrayPerm name pt shape space [0 .. shapeRank shape - 1]
-- | Uses linear/iota index function.
sStaticArray :: String -> Space -> PrimType -> Imp.ArrayContents -> ImpM rep r op VName
sStaticArray name space pt vs = do
let num_elems = case vs of
Imp.ArrayValues vs' -> length vs'
Imp.ArrayZeros n -> fromIntegral n
shape = Shape [intConst Int64 $ toInteger num_elems]
mem <- newVNameForFun $ name ++ "_mem"
emit $ Imp.DeclareArray mem space pt vs
addVar mem $ MemVar Nothing $ MemEntry space
sArray name pt shape mem $ IxFun.iota [fromIntegral num_elems]
sWrite :: VName -> [Imp.TExp Int64] -> Imp.Exp -> ImpM rep r op ()
sWrite arr is v = do
(mem, space, offset) <- fullyIndexArray arr is
vol <- asks envVolatility
emit $ Imp.Write mem offset (primExpType v) space vol v
sUpdate :: VName -> Slice (Imp.TExp Int64) -> SubExp -> ImpM rep r op ()
sUpdate arr slice v = copyDWIM arr (unSlice slice) v []
sLoopNest ::
Shape ->
([Imp.TExp Int64] -> ImpM rep r op ()) ->
ImpM rep r op ()
sLoopNest = sLoopNest' [] . shapeDims
where
sLoopNest' is [] f = f $ reverse is
sLoopNest' is (d : ds) f =
sFor "nest_i" (toInt64Exp d) $ \i -> sLoopNest' (i : is) ds f
-- | Untyped assignment.
(<~~) :: VName -> Imp.Exp -> ImpM rep r op ()
x <~~ e = emit $ Imp.SetScalar x e
infixl 3 <~~
-- | Typed assignment.
(<--) :: TV t -> Imp.TExp t -> ImpM rep r op ()
TV x _ <-- e = emit $ Imp.SetScalar x $ untyped e
infixl 3 <--
-- | Constructing an ad-hoc function that does not
-- correspond to any of the IR functions in the input program.
function ::
Name ->
[Imp.Param] ->
[Imp.Param] ->
ImpM rep r op () ->
ImpM rep r op ()
function fname outputs inputs m = local newFunction $ do
body <- collect $ do
mapM_ addParam $ outputs ++ inputs
m
emitFunction fname $ Imp.Function Nothing outputs inputs body [] []
where
addParam (Imp.MemParam name space) =
addVar name $ MemVar Nothing $ MemEntry space
addParam (Imp.ScalarParam name bt) =
addVar name $ ScalarVar Nothing $ ScalarEntry bt
newFunction env = env {envFunction = Just fname}
dSlices :: [Imp.TExp Int64] -> ImpM rep r op [Imp.TExp Int64]
dSlices = fmap (drop 1 . snd) . dSlices'
where
dSlices' [] = pure (1, [1])
dSlices' (n : ns) = do
(prod, ns') <- dSlices' ns
n' <- dPrimVE "slice" $ n * prod
pure (n', n' : ns')
-- | @dIndexSpace f dims i@ computes a list of indices into an
-- array with dimension @dims@ given the flat index @i@. The
-- resulting list will have the same size as @dims@. Intermediate
-- results are passed to @f@.
dIndexSpace ::
[(VName, Imp.TExp Int64)] ->
Imp.TExp Int64 ->
ImpM rep r op ()
dIndexSpace vs_ds j = do
slices <- dSlices (map snd vs_ds)
loop (zip (map fst vs_ds) slices) j
where
loop ((v, size) : rest) i = do
dPrimV_ v (i `quot` size)
i' <- dPrimVE "remnant" $ i - Imp.le64 v * size
loop rest i'
loop _ _ = pure ()
-- | Like 'dIndexSpace', but invent some new names for the indexes
-- based on the given template.
dIndexSpace' ::
String ->
[Imp.TExp Int64] ->
Imp.TExp Int64 ->
ImpM rep r op [Imp.TExp Int64]
dIndexSpace' desc ds j = do
ivs <- replicateM (length ds) (newVName desc)
dIndexSpace (zip ivs ds) j
pure $ map Imp.le64 ivs
|
diku-dk/futhark
|
src/Futhark/CodeGen/ImpGen.hs
|
isc
| 63,424 | 0 | 23 | 16,348 | 21,219 | 10,527 | 10,692 | -1 | -1 |
-- FizzBuzz in Haskell
module Main where
main :: IO ()
main = printAll $ map fizzBuzz [1..100]
where
printAll [] = return ()
printAll (x:xs) = putStrLn x >> printAll xs
fizzBuzz :: Integer -> String
fizzBuzz n | n `mod` 15 == 0 = "fizzbuzz"
| n `mod` 5 == 0 = "buzz"
| n `mod` 3 == 0 = "fizz"
| otherwise = show n
|
qjcg/CO2Aldrin
|
app/src/fizzbuzz.hs
|
mit
| 380 | 0 | 9 | 134 | 159 | 81 | 78 | 10 | 2 |
module Spelling where
{- A few explicit Prelude imports -}
import Prelude ()
import GHC.Base ((.))
import GHC.Num (Num, (+))
import GHC.Types (Int)
import Data.Tuple (fst, snd)
import System.IO (IO)
import Data.Eq (Eq, (==))
import Data.Functor (fmap)
import Data.Ord (Ord, Ordering(GT),
comparing) -- not in Prelude
import Data.List ((++), zip,
inits, tails) -- not in Prelude
import Control.Monad (return,
(<=<)) -- not in Prelude
{- Other imports -}
import Data.String (String, words)
import Data.Char (isAlpha, toLower)
import Data.Text (unpack)
import Data.Text.IO (readFile)
import qualified Data.Map.Strict as M
import Data.Maybe (mapMaybe)
import Data.Monoid ((<>))
import Data.Foldable (Foldable, foldl')
import Paths_Norvigs_Spelling_Corrector (getDataFileName)
import First (First(MkFirst), getFirst)
type TrainingDict = M.Map String Int
transpose :: [a] -> [a]
transpose [] = []
transpose [_] = []
transpose (a:b:xs) = b:a:xs
delete :: [a] -> [a]
delete [] = []
delete (_:xs) = xs
insert :: [a] -> [a] -> [[a]]
insert letters word =
do -- List
l <- letters
return (l:word)
replace :: [a] -> [a] -> [[a]]
replace _ [] = []
replace letters (_:xs) = insert letters xs
allEditors :: [a] -> [[a] -> [[a]]]
allEditors letters = [return . transpose, replace letters, return . delete, insert letters]
splits :: [a] -> [([a],[a])]
splits =
do -- Reader
iw <- inits
tw <- tails
return (zip iw tw)
editsOnceWith :: [[a]->[[a]]] -> [a] -> [[a]]
editsOnceWith editors word = do -- List
(begin,end) <- splits word
editor <- editors
endedit <- editor end
return (begin ++ endedit)
inDict :: (Eq k, Ord k) => M.Map k v -> [k] -> [(k,v)]
inDict dict = mapMaybe myLookup
where
myLookup w = -- returns either Nothing or Just (w,f) if w is found in dict (f is the frequency)
-- (,) w <$> M.lookup w dict
do -- Maybe
f <- M.lookup w dict
return (w,f)
allChoices :: ([a] -> [(a,b)]) -> (a -> [a]) -> a -> [(a,b)]
allChoices inDict' edits1 word = getFirst possibilities
where
possibilities =
mkFirst return
<> mkFirst edits1
<> mkFirst (edits1 <=< edits1)
mkFirst edit = (MkFirst . inDict' . edit) word
{- Missing function from the Data.Foldable package: maximumBy with default for
empty list -}
maxByOrDefault :: (Foldable t) => (a -> a -> Ordering) -> a -> t a -> a
maxByOrDefault comp def list = foldl' max' def list
where
max' e e' = if comp e' e == GT then e' else e
{- Choose word with best score -}
chooseBest :: (Ord k, Ord v, Num v) => k -> [(k,v)] -> k
chooseBest nothing choices' = fst bestPair
where bestPair = maxByOrDefault (comparing snd) (nothing, 0) choices'
{- Getting the training dictionary -}
nWords :: IO TrainingDict
nWords = do -- IO
fileName <- getDataFileName "big.txt"
ws <- readFile fileName
return ((train . lowerWords . unpack) ws)
lowerWords :: String -> [String]
lowerWords = words . fmap normalize
where normalize c = if isAlpha c then toLower c else ' '
train :: (Ord k, Num v) => [k] -> M.Map k v
train trainWords = foldl' increment M.empty trainWords
where
increment dict x = M.insertWith (+) x 1 dict
{- Piecing all together -}
alphabet :: String
alphabet = ['a' .. 'z']
correct :: TrainingDict -> String -> String -> String
correct dict notfound word = chooseBest notfound choices
where
choices = allChoices (inDict dict) (editsOnceWith (allEditors alphabet)) word
ioCorrect :: String -> IO String
ioCorrect w = do -- IO
d <- nWords
return (correct d "" w)
|
olivierverdier/Norvigs-Spelling-Corrector
|
src/Spelling.hs
|
mit
| 3,647 | 0 | 12 | 843 | 1,428 | 788 | 640 | 94 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Hpack.Render.Dsl (
-- * AST
Element (..)
, Value (..)
-- * Render
, RenderSettings (..)
, CommaStyle (..)
, defaultRenderSettings
, Alignment (..)
, Nesting
, render
-- * Utils
, sortFieldsBy
#ifdef TEST
, Lines (..)
, renderValue
, addSortKey
#endif
) where
import Imports
data Value =
Literal String
| CommaSeparatedList [String]
| LineSeparatedList [String]
| WordList [String]
deriving (Eq, Show)
data Element = Stanza String [Element] | Group Element Element | Field String Value | Verbatim String
deriving (Eq, Show)
data Lines = SingleLine String | MultipleLines [String]
deriving (Eq, Show)
data CommaStyle = LeadingCommas | TrailingCommas
deriving (Eq, Show)
newtype Nesting = Nesting Int
deriving (Eq, Show, Num, Enum)
newtype Alignment = Alignment Int
deriving (Eq, Show, Num)
data RenderSettings = RenderSettings {
renderSettingsIndentation :: Int
, renderSettingsFieldAlignment :: Alignment
, renderSettingsCommaStyle :: CommaStyle
} deriving (Eq, Show)
defaultRenderSettings :: RenderSettings
defaultRenderSettings = RenderSettings 2 0 LeadingCommas
render :: RenderSettings -> Nesting -> Element -> [String]
render settings nesting (Stanza name elements) = indent settings nesting name : renderElements settings (succ nesting) elements
render settings nesting (Group a b) = render settings nesting a ++ render settings nesting b
render settings nesting (Field name value) = renderField settings nesting name value
render settings nesting (Verbatim str) = map (indent settings nesting) (lines str)
renderElements :: RenderSettings -> Nesting -> [Element] -> [String]
renderElements settings nesting = concatMap (render settings nesting)
renderField :: RenderSettings -> Nesting -> String -> Value -> [String]
renderField settings@RenderSettings{..} nesting name value = case renderValue settings value of
SingleLine "" -> []
SingleLine x -> [indent settings nesting (name ++ ": " ++ padding ++ x)]
MultipleLines [] -> []
MultipleLines xs -> (indent settings nesting name ++ ":") : map (indent settings $ succ nesting) xs
where
Alignment fieldAlignment = renderSettingsFieldAlignment
padding = replicate (fieldAlignment - length name - 2) ' '
renderValue :: RenderSettings -> Value -> Lines
renderValue RenderSettings{..} v = case v of
Literal s -> SingleLine s
WordList ws -> SingleLine $ unwords ws
LineSeparatedList xs -> renderLineSeparatedList renderSettingsCommaStyle xs
CommaSeparatedList xs -> renderCommaSeparatedList renderSettingsCommaStyle xs
renderLineSeparatedList :: CommaStyle -> [String] -> Lines
renderLineSeparatedList style = MultipleLines . map (padding ++)
where
padding = case style of
LeadingCommas -> " "
TrailingCommas -> ""
renderCommaSeparatedList :: CommaStyle -> [String] -> Lines
renderCommaSeparatedList style = MultipleLines . case style of
LeadingCommas -> map renderLeadingComma . zip (True : repeat False)
TrailingCommas -> map renderTrailingComma . reverse . zip (True : repeat False) . reverse
where
renderLeadingComma :: (Bool, String) -> String
renderLeadingComma (isFirst, x)
| isFirst = " " ++ x
| otherwise = ", " ++ x
renderTrailingComma :: (Bool, String) -> String
renderTrailingComma (isLast, x)
| isLast = x
| otherwise = x ++ ","
instance IsString Value where
fromString = Literal
indent :: RenderSettings -> Nesting -> String -> String
indent RenderSettings{..} (Nesting nesting) s = replicate (nesting * renderSettingsIndentation) ' ' ++ s
sortFieldsBy :: [String] -> [Element] -> [Element]
sortFieldsBy existingFieldOrder =
map snd
. sortOn fst
. addSortKey
. map (\a -> (existingIndex a, a))
where
existingIndex :: Element -> Maybe Int
existingIndex (Field name _) = name `elemIndex` existingFieldOrder
existingIndex _ = Nothing
addSortKey :: [(Maybe Int, a)] -> [((Int, Int), a)]
addSortKey = go (-1) . zip [0..]
where
go :: Int -> [(Int, (Maybe Int, a))] -> [((Int, Int), a)]
go n xs = case xs of
[] -> []
(x, (Just y, a)) : ys -> ((y, x), a) : go y ys
(x, (Nothing, a)) : ys -> ((n, x), a) : go n ys
|
sol/hpack
|
src/Hpack/Render/Dsl.hs
|
mit
| 4,283 | 0 | 14 | 822 | 1,471 | 788 | 683 | 95 | 4 |
{-|
Module: Treb.BadRegex
Description: Unfortunate Regex Evaluation on Data.Text
Copyright: Travis Whitaker 2015
License: MIT
Maintainer: [email protected]
Stability: Provisional
Portability: POSIX
Please fix this.
-}
module Treb.BadRegex where
import qualified Data.Text as T
import Text.Regex.TDFA
-- | Evaluate a regular expression over 'T.Text' by unpacking the
badRegexBool :: T.Text -> T.Text -> Bool
badRegexBool x e = T.unpack x =~ T.unpack e
|
MadSciGuys/trebuchet
|
src/Treb/BadRegex.hs
|
mit
| 473 | 0 | 7 | 82 | 63 | 36 | 27 | 5 | 1 |
-- | Safe temporary file implementation.
--
-- Safe in the sense that OS facilities to create a temporary file are used and
-- it is assumed that the OS is doing it right.
--
{-# LANGUAGE ForeignFunctionInterface #-}
module System.Temporary
( withTemporaryFile )
where
import System.IO
import Data.Monoid
import Foreign.C.Types
import Foreign.C.String
import Foreign.C.Error
import Foreign.Ptr
import Control.Exception
import Control.Monad
import System.Posix.IO
import System.Posix.Files
import System.Posix.Types
foreign import ccall unsafe mkstemp :: Ptr CChar -> IO CInt
foreign import ccall unsafe get_tempdir :: IO (Ptr CChar)
withTemporaryFile :: (Handle -> FilePath -> IO a) -> IO a
withTemporaryFile action = mask $ \restore -> do
tmp_dir_name <- peekCString =<< get_tempdir
withCString (tmp_dir_name <> "/hs-interfaceXXXXXX") $ \tmpname -> do
result <- mkstemp tmpname
when (result == -1) $ throwErrno "withTemporaryFile"
str <- peekCString tmpname
handle <- fdToHandle (Fd result)
flip finally (removeLink str >> hClose handle) $ restore $
action handle str
|
Noeda/dynamically-loaded-haskell
|
System/Temporary.hs
|
mit
| 1,144 | 0 | 18 | 227 | 289 | 152 | 137 | 26 | 1 |
module Renderable where
class Renderable a where
render :: a -> IO ()
|
MichaelBaker/opengl-haskell
|
src/Renderable.hs
|
mit
| 73 | 0 | 9 | 16 | 27 | 14 | 13 | 3 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.