code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.PersistentVolume where
import GHC.Generics
import Data.Text
import Kubernetes.V1.ObjectMeta
import Kubernetes.V1.PersistentVolumeSpec
import Kubernetes.V1.PersistentVolumeStatus
import qualified Data.Aeson
-- | PersistentVolume (PV) is a storage resource provisioned by an administrator. It is analogous to a node. More info: http://releases.k8s.io/HEAD/docs/user-guide/persistent-volumes.md
data PersistentVolume = PersistentVolume
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ObjectMeta -- ^ Standard object's metadata. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
, spec :: Maybe PersistentVolumeSpec -- ^ Spec defines a specification of a persistent volume owned by the cluster. Provisioned by an administrator. More info: http://releases.k8s.io/HEAD/docs/user-guide/persistent-volumes.md#persistent-volumes
, status :: Maybe PersistentVolumeStatus -- ^ Status represents the current information/status for the persistent volume. Populated by the system. Read-only. More info: http://releases.k8s.io/HEAD/docs/user-guide/persistent-volumes.md#persistent-volumes
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON PersistentVolume
instance Data.Aeson.ToJSON PersistentVolume
| minhdoboi/deprecated-openshift-haskell-api | kubernetes/lib/Kubernetes/V1/PersistentVolume.hs | apache-2.0 | 2,004 | 0 | 9 | 243 | 141 | 86 | 55 | 21 | 0 |
module EdictDB where
import System.IO
import qualified Data.Text as DT
import Data.Text.Encoding
import qualified Data.ByteString.Char8 as C
type Word = (String, Char)
dbLookup :: String -> Maybe Word
dbLookup = undefined
returnLine :: IO String -> String
returnLine = undefined
getDict :: IO String
getDict = do
y <- openFile "edict" ReadMode
hSetEncoding y latin1
z <- hGetContents y
let k = decodeLatin1 $ C.pack z
hClose y
return $ DT.unpack k
| MarkMcCaskey/Refcon | EdictDB.hs | apache-2.0 | 468 | 0 | 12 | 92 | 157 | 83 | 74 | 18 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Store (Id, Url, FileType, genId, genPut, put, get) where
import Control.Monad.Trans.AWS
(sinkBody, runResourceT, runAWST, send, presignURL, newEnv
,Env, Seconds, toBody, RqBody, Region(..), Credentials(..))
import Network.AWS.S3
(getObject, putObject, gorsBody, PutObjectResponse
,BucketName(..), ObjectKey(..))
import Control.Monad.Trans
(liftIO)
import Control.Lens
(view)
import Data.Conduit.Binary
(sinkLbs)
import Data.Time
(getCurrentTime)
import Data.ByteString
(ByteString)
import Data.Text
(pack)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.UUID
(UUID)
import System.Random
(randomIO)
import Config
(Domain)
import User
(Upload(..), FileType, FileName, Token)
import Network.S3
(S3Keys(..), S3Request(..), S3Method(S3PUT), generateS3URL, signedRequest)
import System.Environment as Sys
type Id
= UUID
type Url
= String
genId :: IO Id
genId = randomIO
genPut :: Domain -> Upload -> IO Url
genPut domain Upload{..} =
do
credentials <- (S3Keys . BS.pack) <$> Sys.getEnv "MS_AWS_ID" <*> (BS.pack <$> Sys.getEnv "MS_AWS_KEY")
let request = S3Request S3PUT (BS.pack fileType) (BS.pack $ domain ++ "-uploads") (BS.pack fileName) expiry
BS.unpack . signedRequest <$> generateS3URL credentials request
put :: Show a => Domain -> Id -> a -> IO PutObjectResponse
put domain id object =
do
env <- awsEnv
let req = send $ putObject (metaBucket domain) (key id) (body object)
runResourceT . runAWST env $ req
get :: Read a => Domain -> Id -> IO a
get domain id =
do
env <- awsEnv
let req = send $ getObject (metaBucket domain) (key id)
body <- runResourceT . runAWST env $
do
resp <- req
sinkBody (view gorsBody resp) sinkLbs
return . read . BSL.unpack $ body
awsEnv :: IO Env
awsEnv = newEnv NorthVirginia Discover
metaBucket :: Domain -> BucketName
metaBucket domain =
BucketName $ pack $ domain ++ "/media-server/uploads/meta"
key :: Store.Id -> ObjectKey
key id =
ObjectKey $ pack . show $ id
body :: Show a => a -> RqBody
body =
toBody . show
expiry :: Integer
expiry = 30 * 60
| svanderbleek/media-server | src/Store.hs | bsd-3-clause | 2,245 | 0 | 14 | 437 | 797 | 441 | 356 | 75 | 1 |
-- |Type aliases used throughout the crypto-api modules.
module Crypto.Types where
import Data.ByteString as B
import Data.ByteString.Lazy as L
-- |The length of a field (usually a ByteString) in bits
type BitLength = Int
-- |The length fo a field in bytes.
type ByteLength = Int
| ekmett/crypto-api | Crypto/Types.hs | bsd-3-clause | 283 | 0 | 4 | 49 | 36 | 26 | 10 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Web.RTBBidder.Types.Request.Video (Video(..)) where
import qualified Data.Aeson as AESON
import Data.Aeson ((.=), (.:), (.:?), (.!=))
import qualified Data.Text as TX
import Web.RTBBidder.Types.Request.Banner (Banner(..))
data Video = Video
{ videoMimes :: [TX.Text]
, videoMinduration :: Maybe Int
, videoMaxduration :: Maybe Int
, videoProtocols :: [Int]
, videoProtocol :: Maybe Int -- DEPRECATED
, videoW :: Maybe Int
, videoH :: Maybe Int
, videoStartdelay :: Maybe Int
, videoPlacement :: Maybe Int
, videoLinearity :: Maybe Int
, videoSkip :: Maybe Int
, videoSkipmin :: Int
, videoSkipafter :: Int
, videoSequence :: Maybe Int
, videoBattr :: [Int]
, videoMaxextendeded :: Maybe Int
, videoMinbitrate :: Maybe Int
, videoMaxbitrate :: Maybe Int
, videoBoxingallowed :: Int
, videoPlaybackmethod :: [Int]
, videoPlaybackend :: Maybe Int
, videoDelivery :: [Int]
, videoPos :: Maybe Int
, videoCompanionad :: [Banner]
, videoApi :: [Int]
, videoCompaniontype :: [Int]
, videoExt :: Maybe AESON.Value
} deriving (Show, Eq)
instance AESON.FromJSON Video where
parseJSON = AESON.withObject "video" $ \o -> do
videoMimes <- o .: "mimes"
videoMinduration <- o .:? "minduration"
videoMaxduration <- o .:? "maxduration"
videoProtocols <- o .:? "protocols" .!= []
videoProtocol <- o .:? "protocol"
videoW <- o .:? "w"
videoH <- o .:? "h"
videoStartdelay <- o .:? "startdelay"
videoPlacement <- o .:? "placement"
videoLinearity <- o .:? "linearity"
videoSkip <- o .:? "skip"
videoSkipmin <- o .:? "skipmin" .!= 0
videoSkipafter <- o .:? "skipafter" .!= 0
videoSequence <- o .:? "sequence"
videoBattr <- o .:? "battr" .!= []
videoMaxextendeded <- o .:? "maxextended"
videoMinbitrate <- o .:? "minbitrate"
videoMaxbitrate <- o .:? "maxbitrate"
videoBoxingallowed <- o .:? "boxingallowed" .!= 1
videoPlaybackmethod <- o .:? "playbackmethod" .!= []
videoPlaybackend <- o .:? "playbackend"
videoDelivery <- o .:? "delivery" .!= []
videoPos <- o .:? "pos"
videoCompanionad <- o .:? "companionad" .!= []
videoApi <- o .:? "api" .!= []
videoCompaniontype <- o .:? "companiontype" .!= []
videoExt <- o .:? "ext"
return Video{..}
instance AESON.ToJSON Video where
toJSON Video{..} = AESON.object
[ "mimes" .= videoMimes
, "minduration" .= videoMinduration
, "maxduration" .= videoMaxduration
, "protocols" .= videoProtocols
, "protocol" .= videoProtocol
, "w" .= videoW
, "h" .= videoH
, "startdelay" .= videoStartdelay
, "placement" .= videoPlacement
, "linearity" .= videoLinearity
, "skip" .= videoSkip
, "skipmin" .= videoSkipmin
, "skipafter" .= videoSkipafter
, "sequence" .= videoSequence
, "battr" .= videoBattr
, "maxextended" .= videoMaxextendeded
, "minbitrate" .= videoMinbitrate
, "maxbitrate" .= videoMaxbitrate
, "boxingallowed" .= videoBoxingallowed
, "playbackmethod" .= videoPlaybackmethod
, "playbackend" .= videoPlaybackend
, "delivery" .= videoDelivery
, "pos" .= videoPos
, "companionad" .= videoCompanionad
, "api" .= videoApi
, "companiontype" .= videoCompaniontype
, "ext" .= videoExt
]
| hiratara/hs-rtb-bidder | src/Web/RTBBidder/Types/Request/Video.hs | bsd-3-clause | 3,366 | 0 | 12 | 743 | 948 | 517 | 431 | 95 | 0 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[Simplify]{The main module of the simplifier}
-}
{-# LANGUAGE CPP #-}
module Simplify ( simplTopBinds, simplExpr, simplRules ) where
#include "HsVersions.h"
import DynFlags
import SimplMonad
import Type hiding ( substTy, substTyVar, extendTvSubst, extendCvSubst )
import SimplEnv
import SimplUtils
import FamInstEnv ( FamInstEnv )
import Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326
import Id
import MkId ( seqId, voidPrimId )
import MkCore ( mkImpossibleExpr, castBottomExpr )
import IdInfo
import Name ( Name, mkSystemVarName, isExternalName, getOccFS )
import Coercion hiding ( substCo, substCoVar )
import OptCoercion ( optCoercion )
import FamInstEnv ( topNormaliseType_maybe )
import DataCon ( DataCon, dataConWorkId, dataConRepStrictness
, isMarkedStrict, dataConRepArgTys ) --, dataConTyCon, dataConTag, fIRST_TAG )
--import TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326
import CoreMonad ( Tick(..), SimplifierMode(..) )
import CoreSyn
import Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd )
import PprCore ( pprCoreExpr )
import CoreUnfold
import CoreUtils
import CoreArity
--import PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326
import Rules ( mkRuleInfo, lookupRule, getRules )
import TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326
import BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) )
import MonadUtils ( foldlM, mapAccumLM, liftIO )
import Maybes ( orElse )
--import Unique ( hasKey ) -- temporalily commented out. See #8326
import Control.Monad
import Outputable
import FastString
import Pair
import Util
import ErrUtils
{-
The guts of the simplifier is in this module, but the driver loop for
the simplifier is in SimplCore.hs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
The simplifier used to guarantee that the output had no shadowing, but
it does not do so any more. (Actually, it never did!) The reason is
documented with simplifyArgs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
Many parts of the simplifier return a bunch of "floats" as well as an
expression. This is wrapped as a datatype SimplUtils.FloatsWith.
All "floats" are let-binds, not case-binds, but some non-rec lets may
be unlifted (with RHS ok-for-speculation).
-----------------------------------------
ORGANISATION OF FUNCTIONS
-----------------------------------------
simplTopBinds
- simplify all top-level binders
- for NonRec, call simplRecOrTopPair
- for Rec, call simplRecBind
------------------------------
simplExpr (applied lambda) ==> simplNonRecBind
simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind
simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind
------------------------------
simplRecBind [binders already simplfied]
- use simplRecOrTopPair on each pair in turn
simplRecOrTopPair [binder already simplified]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
Returns:
- check for PreInlineUnconditionally
- simplLazyBind
simplNonRecBind
Used for: non-top-level non-recursive bindings
beta reductions (which amount to the same thing)
Because it can deal with strict arts, it takes a
"thing-inside" and returns an expression
- check for PreInlineUnconditionally
- simplify binder, including its IdInfo
- if strict binding
simplStrictArg
mkAtomicArgs
completeNonRecX
else
simplLazyBind
addFloats
simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder]
Used for: binding case-binder and constr args in a known-constructor case
- check for PreInLineUnconditionally
- simplify binder
- completeNonRecX
------------------------------
simplLazyBind: [binder already simplified, RHS not]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
non-top-level, but *lazy* non-recursive bindings
[must not be strict or unboxed]
Returns floats + an augmented environment, not an expression
- substituteIdInfo and add result to in-scope
[so that rules are available in rec rhs]
- simplify rhs
- mkAtomicArgs
- float if exposes constructor or PAP
- completeBind
completeNonRecX: [binder and rhs both simplified]
- if the the thing needs case binding (unlifted and not ok-for-spec)
build a Case
else
completeBind
addFloats
completeBind: [given a simplified RHS]
[used for both rec and non-rec bindings, top level and not]
- try PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
Right hand sides and arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In many ways we want to treat
(a) the right hand side of a let(rec), and
(b) a function argument
in the same way. But not always! In particular, we would
like to leave these arguments exactly as they are, so they
will match a RULE more easily.
f (g x, h x)
g (+ x)
It's harder to make the rule match if we ANF-ise the constructor,
or eta-expand the PAP:
f (let { a = g x; b = h x } in (a,b))
g (\y. + x y)
On the other hand if we see the let-defns
p = (g x, h x)
q = + x
then we *do* want to ANF-ise and eta-expand, so that p and q
can be safely inlined.
Even floating lets out is a bit dubious. For let RHS's we float lets
out if that exposes a value, so that the value can be inlined more vigorously.
For example
r = let x = e in (x,x)
Here, if we float the let out we'll expose a nice constructor. We did experiments
that showed this to be a generally good thing. But it was a bad thing to float
lets out unconditionally, because that meant they got allocated more often.
For function arguments, there's less reason to expose a constructor (it won't
get inlined). Just possibly it might make a rule match, but I'm pretty skeptical.
So for the moment we don't float lets out of function arguments either.
Eta expansion
~~~~~~~~~~~~~~
For eta expansion, we want to catch things like
case e of (a,b) -> \x -> case a of (p,q) -> \y -> r
If the \x was on the RHS of a let, we'd eta expand to bring the two
lambdas together. And in general that's a good thing to do. Perhaps
we should eta expand wherever we find a (value) lambda? Then the eta
expansion at a let RHS can concentrate solely on the PAP case.
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
-}
simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simplTopBinds env0 binds0
= do { -- Put all the top-level binders into scope at the start
-- so that if a transformation rule has unexpectedly brought
-- anything into scope, then we don't get a complaint about that.
-- It's rather as if the top-level binders were imported.
-- See note [Glomming] in OccurAnal.
; env1 <- simplRecBndrs env0 (bindersOfBinds binds0)
; env2 <- simpl_binds env1 binds0
; freeTick SimplifierDone
; return env2 }
where
-- We need to track the zapped top-level binders, because
-- they should have their fragile IdInfo zapped (notably occurrence info)
-- That's why we run down binds and bndrs' simultaneously.
--
simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simpl_binds env [] = return env
simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind
; simpl_binds env' binds }
simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs
simpl_bind env (NonRec b r) = do { (env', b') <- addBndrRules env b (lookupRecBndr env b)
; simplRecOrTopPair env' TopLevel NonRecursive b b' r }
{-
************************************************************************
* *
\subsection{Lazy bindings}
* *
************************************************************************
simplRecBind is used for
* recursive bindings only
-}
simplRecBind :: SimplEnv -> TopLevelFlag
-> [(InId, InExpr)]
-> SimplM SimplEnv
simplRecBind env0 top_lvl pairs0
= do { (env_with_info, triples) <- mapAccumLM add_rules env0 pairs0
; env1 <- go (zapFloats env_with_info) triples
; return (env0 `addRecFloats` env1) }
-- addFloats adds the floats from env1,
-- _and_ updates env0 with the in-scope set from env1
where
add_rules :: SimplEnv -> (InBndr,InExpr) -> SimplM (SimplEnv, (InBndr, OutBndr, InExpr))
-- Add the (substituted) rules to the binder
add_rules env (bndr, rhs)
= do { (env', bndr') <- addBndrRules env bndr (lookupRecBndr env bndr)
; return (env', (bndr, bndr', rhs)) }
go env [] = return env
go env ((old_bndr, new_bndr, rhs) : pairs)
= do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs
; go env' pairs }
{-
simplOrTopPair is used for
* recursive bindings (whether top level or not)
* top-level non-recursive bindings
It assumes the binder has already been simplified, but not its IdInfo.
-}
simplRecOrTopPair :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutBndr -> InExpr -- Binder and rhs
-> SimplM SimplEnv -- Returns an env that includes the binding
simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs
= do { dflags <- getDynFlags
; trace_bind dflags $
if preInlineUnconditionally dflags env top_lvl old_bndr rhs
-- Check for unconditional inline
then do tick (PreInlineUnconditionally old_bndr)
return (extendIdSubst env old_bndr (mkContEx env rhs))
else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env }
where
trace_bind dflags thing_inside
| not (dopt Opt_D_verbose_core2core dflags)
= thing_inside
| otherwise
= pprTrace "SimplBind" (ppr old_bndr) thing_inside
-- trace_bind emits a trace for each top-level binding, which
-- helps to locate the tracing for inlining and rule firing
{-
simplLazyBind is used for
* [simplRecOrTopPair] recursive bindings (whether top level or not)
* [simplRecOrTopPair] top-level non-recursive bindings
* [simplNonRecE] non-top-level *lazy* non-recursive bindings
Nota bene:
1. It assumes that the binder is *already* simplified,
and is in scope, and its IdInfo too, except unfolding
2. It assumes that the binder type is lifted.
3. It does not check for pre-inline-unconditionally;
that should have been done already.
-}
simplLazyBind :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutId -- Binder, both pre-and post simpl
-- The OutId has IdInfo, except arity, unfolding
-> InExpr -> SimplEnv -- The RHS and its environment
-> SimplM SimplEnv
-- Precondition: rhs obeys the let/app invariant
simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se
= -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $
do { let rhs_env = rhs_se `setInScope` env
(tvs, body) = case collectTyAndValBinders rhs of
(tvs, [], body)
| surely_not_lam body -> (tvs, body)
_ -> ([], rhs)
surely_not_lam (Lam {}) = False
surely_not_lam (Tick t e)
| not (tickishFloatable t) = surely_not_lam e
-- eta-reduction could float
surely_not_lam _ = True
-- Do not do the "abstract tyyvar" thing if there's
-- a lambda inside, because it defeats eta-reduction
-- f = /\a. \x. g a x
-- should eta-reduce.
; (body_env, tvs') <- simplBinders rhs_env tvs
-- See Note [Floating and type abstraction] in SimplUtils
-- Simplify the RHS
; let rhs_cont = mkRhsStop (substTy body_env (exprType body))
; (body_env1, body1) <- simplExprF body_env body rhs_cont
-- ANF-ise a constructor or PAP rhs
; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1
; (env', rhs')
<- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2)
then -- No floating, revert to body1
do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont
; return (env, rhs') }
else if null tvs then -- Simple floating
do { tick LetFloatFromLet
; return (addFloats env body_env2, body2) }
else -- Do type-abstraction first
do { tick LetFloatFromLet
; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2
; rhs' <- mkLam tvs' body3 rhs_cont
; env' <- foldlM (addPolyBind top_lvl) env poly_binds
; return (env', rhs') }
; completeBind env' top_lvl bndr bndr1 rhs' }
{-
A specialised variant of simplNonRec used when the RHS is already simplified,
notably in knownCon. It uses case-binding where necessary.
-}
simplNonRecX :: SimplEnv
-> InId -- Old binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
simplNonRecX env bndr new_rhs
| isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p }
= return env -- Here c is dead, and we avoid creating
-- the binding c = (a,b)
| Coercion co <- new_rhs
= return (extendCvSubst env bndr co)
| otherwise
= do { (env', bndr') <- simplBinder env bndr
; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs }
-- simplNonRecX is only used for NotTopLevel things
completeNonRecX :: TopLevelFlag -> SimplEnv
-> Bool
-> InId -- Old binder
-> OutId -- New binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs
= do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs
; (env2, rhs2) <-
if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1
then do { tick LetFloatFromLet
; return (addFloats env env1, rhs1) } -- Add the floats to the main env
else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS
; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 }
{-
{- No, no, no! Do not try preInlineUnconditionally in completeNonRecX
Doing so risks exponential behaviour, because new_rhs has been simplified once already
In the cases described by the folowing commment, postInlineUnconditionally will
catch many of the relevant cases.
-- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
--
-- Similarly, single occurrences can be inlined vigourously
-- e.g. case (f x, g y) of (a,b) -> ....
-- If a,b occur once we can avoid constructing the let binding for them.
Furthermore in the case-binding case preInlineUnconditionally risks extra thunks
-- Consider case I# (quotInt# x y) of
-- I# v -> let w = J# v in ...
-- If we gaily inline (quotInt# x y) for v, we end up building an
-- extra thunk:
-- let w = J# (quotInt# x y) in ...
-- because quotInt# can fail.
| preInlineUnconditionally env NotTopLevel bndr new_rhs
= thing_inside (extendIdSubst env bndr (DoneEx new_rhs))
-}
----------------------------------
prepareRhs takes a putative RHS, checks whether it's a PAP or
constructor application and, if so, converts it to ANF, so that the
resulting thing can be inlined more easily. Thus
x = (f a, g b)
becomes
t1 = f a
t2 = g b
x = (t1,t2)
We also want to deal well cases like this
v = (f e1 `cast` co) e2
Here we want to make e1,e2 trivial and get
x1 = e1; x2 = e2; v = (f x1 `cast` co) v2
That's what the 'go' loop in prepareRhs does
-}
prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Adds new floats to the env iff that allows us to return a good RHS
prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions]
| Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type
, not (isUnliftedType ty1) -- see Note [Float coercions (unlifted)]
= do { (env', rhs') <- makeTrivialWithInfo top_lvl env (getOccFS id) sanitised_info rhs
; return (env', Cast rhs' co) }
where
sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info
`setDemandInfo` demandInfo info
info = idInfo id
prepareRhs top_lvl env0 id rhs0
= do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0
; return (env1, rhs1) }
where
go n_val_args env (Cast rhs co)
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Cast rhs' co) }
go n_val_args env (App fun (Type ty))
= do { (is_exp, env', rhs') <- go n_val_args env fun
; return (is_exp, env', App rhs' (Type ty)) }
go n_val_args env (App fun arg)
= do { (is_exp, env', fun') <- go (n_val_args+1) env fun
; case is_exp of
True -> do { (env'', arg') <- makeTrivial top_lvl env' (getOccFS id) arg
; return (True, env'', App fun' arg') }
False -> return (False, env, App fun arg) }
go n_val_args env (Var fun)
= return (is_exp, env, Var fun)
where
is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- OccurAnal.occAnalApp
go n_val_args env (Tick t rhs)
-- We want to be able to float bindings past this
-- tick. Non-scoping ticks don't care.
| tickishScoped t == NoScope
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Tick t rhs') }
-- On the other hand, for scoping ticks we need to be able to
-- copy them on the floats, which in turn is only allowed if
-- we can obtain non-counting ticks.
| not (tickishCounts t) || tickishCanSplit t
= do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs
; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr)
floats' = seFloats $ env `addFloats` mapFloats env' tickIt
; return (is_exp, env' { seFloats = floats' }, Tick t rhs') }
go _ env other
= return (False, env, other)
{-
Note [Float coercions]
~~~~~~~~~~~~~~~~~~~~~~
When we find the binding
x = e `cast` co
we'd like to transform it to
x' = e
x = x `cast` co -- A trivial binding
There's a chance that e will be a constructor application or function, or something
like that, so moving the coercion to the usage site may well cancel the coercions
and lead to further optimisation. Example:
data family T a :: *
data instance T Int = T Int
foo :: Int -> Int -> Int
foo m n = ...
where
x = T m
go 0 = 0
go n = case x of { T m -> go (n-m) }
-- This case should optimise
Note [Preserve strictness when floating coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the Note [Float coercions] transformation, keep the strictness info.
Eg
f = e `cast` co -- f has strictness SSL
When we transform to
f' = e -- f' also has strictness SSL
f = f' `cast` co -- f still has strictness SSL
Its not wrong to drop it on the floor, but better to keep it.
Note [Float coercions (unlifted)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT don't do [Float coercions] if 'e' has an unlifted type.
This *can* happen:
foo :: Int = (error (# Int,Int #) "urk")
`cast` CoUnsafe (# Int,Int #) Int
If do the makeTrivial thing to the error call, we'll get
foo = case error (# Int,Int #) "urk" of v -> v `cast` ...
But 'v' isn't in scope!
These strange casts can happen as a result of case-of-case
bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of
(# p,q #) -> p+q
-}
makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec)
makeTrivialArg env (ValArg e) = do
{ (env', e') <- makeTrivial NotTopLevel env (fsLit "arg") e
; return (env', ValArg e') }
makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg
makeTrivial :: TopLevelFlag -> SimplEnv
-> FastString -- ^ a "friendly name" to build the new binder from
-> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Binds the expression to a variable, if it's not trivial, returning the variable
makeTrivial top_lvl env context expr =
makeTrivialWithInfo top_lvl env context vanillaIdInfo expr
makeTrivialWithInfo :: TopLevelFlag -> SimplEnv
-> FastString
-- ^ a "friendly name" to build the new binder from
-> IdInfo -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Propagate strictness and demand info to the new binder
-- Note [Preserve strictness when floating coercions]
-- Returned SimplEnv has same substitution as incoming one
makeTrivialWithInfo top_lvl env context info expr
| exprIsTrivial expr -- Already trivial
|| not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise
-- See Note [Cannot trivialise]
= return (env, expr)
| otherwise -- See Note [Take care] below
= do { uniq <- getUniqueM
; let name = mkSystemVarName uniq context
var = mkLocalIdOrCoVarWithInfo name expr_ty info
; env' <- completeNonRecX top_lvl env False var var expr
; expr' <- simplVar env' var
; return (env', expr') }
-- The simplVar is needed because we're constructing a new binding
-- a = rhs
-- And if rhs is of form (rhs1 |> co), then we might get
-- a1 = rhs1
-- a = a1 |> co
-- and now a's RHS is trivial and can be substituted out, and that
-- is what completeNonRecX will do
-- To put it another way, it's as if we'd simplified
-- let var = e in var
where
expr_ty = exprType expr
bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool
-- True iff we can have a binding of this expression at this level
-- Precondition: the type is the type of the expression
bindingOk top_lvl _ expr_ty
| isTopLevel top_lvl = not (isUnliftedType expr_ty)
| otherwise = True
{-
Note [Cannot trivialise]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider tih
f :: Int -> Addr#
foo :: Bar
foo = Bar (f 3)
Then we can't ANF-ise foo, even though we'd like to, because
we can't make a top-level binding for the Addr# (f 3). And if
so we don't want to turn it into
foo = let x = f 3 in Bar x
because we'll just end up inlining x back, and that makes the
simplifier loop. Better not to ANF-ise it at all.
A case in point is literal strings (a MachStr is not regarded as
trivial):
foo = Ptr "blob"#
We don't want to ANF-ise this.
************************************************************************
* *
\subsection{Completing a lazy binding}
* *
************************************************************************
completeBind
* deals only with Ids, not TyVars
* takes an already-simplified binder and RHS
* is used for both recursive and non-recursive bindings
* is used for both top-level and non-top-level bindings
It does the following:
- tries discarding a dead binding
- tries PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
It does *not* attempt to do let-to-case. Why? Because it is used for
- top-level bindings (when let-to-case is impossible)
- many situations where the "rhs" is known to be a WHNF
(so let-to-case is inappropriate).
Nor does it do the atomic-argument thing
-}
completeBind :: SimplEnv
-> TopLevelFlag -- Flag stuck into unfolding
-> InId -- Old binder
-> OutId -> OutExpr -- New binder and RHS
-> SimplM SimplEnv
-- completeBind may choose to do its work
-- * by extending the substitution (e.g. let x = y in ...)
-- * or by adding to the floats in the envt
--
-- Precondition: rhs obeys the let/app invariant
completeBind env top_lvl old_bndr new_bndr new_rhs
| isCoVar old_bndr
= case new_rhs of
Coercion co -> return (extendCvSubst env old_bndr co)
_ -> return (addNonRec env new_bndr new_rhs)
| otherwise
= ASSERT( isId new_bndr )
do { let old_info = idInfo old_bndr
old_unf = unfoldingInfo old_info
occ_info = occInfo old_info
-- Do eta-expansion on the RHS of the binding
-- See Note [Eta-expanding at let bindings] in SimplUtils
; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs
-- Simplify the unfolding
; new_unfolding <- simplLetUnfolding env top_lvl old_bndr final_rhs old_unf
; dflags <- getDynFlags
; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info
final_rhs new_unfolding
-- Inline and discard the binding
then do { tick (PostInlineUnconditionally old_bndr)
; return (extendIdSubst env old_bndr (DoneEx final_rhs)) }
-- Use the substitution to make quite, quite sure that the
-- substitution will happen, since we are going to discard the binding
else
do { let info1 = idInfo new_bndr `setArityInfo` new_arity
-- Unfolding info: Note [Setting the new unfolding]
info2 = info1 `setUnfoldingInfo` new_unfolding
-- Demand info: Note [Setting the demand info]
--
-- We also have to nuke demand info if for some reason
-- eta-expansion *reduces* the arity of the binding to less
-- than that of the strictness sig. This can happen: see Note [Arity decrease].
info3 | isEvaldUnfolding new_unfolding
|| (case strictnessInfo info2 of
StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty)
= zapDemandInfo info2 `orElse` info2
| otherwise
= info2
final_id = new_bndr `setIdInfo` info3
; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $
return (addNonRec env final_id final_rhs) } }
-- The addNonRec adds it to the in-scope set too
------------------------------
addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv
-- Add a new binding to the environment, complete with its unfolding
-- but *do not* do postInlineUnconditionally, because we have already
-- processed some of the scope of the binding
-- We still want the unfolding though. Consider
-- let
-- x = /\a. let y = ... in Just y
-- in body
-- Then we float the y-binding out (via abstractFloats and addPolyBind)
-- but 'x' may well then be inlined in 'body' in which case we'd like the
-- opportunity to inline 'y' too.
--
-- INVARIANT: the arity is correct on the incoming binders
addPolyBind top_lvl env (NonRec poly_id rhs)
= do { unfolding <- simplLetUnfolding env top_lvl poly_id rhs noUnfolding
-- Assumes that poly_id did not have an INLINE prag
-- which is perhaps wrong. ToDo: think about this
; let final_id = setIdInfo poly_id $
idInfo poly_id `setUnfoldingInfo` unfolding
; return (addNonRec env final_id rhs) }
addPolyBind _ env bind@(Rec _)
= return (extendFloats env bind)
-- Hack: letrecs are more awkward, so we extend "by steam"
-- without adding unfoldings etc. At worst this leads to
-- more simplifier iterations
{- Note [Arity decrease]
~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking the arity of a binding should not decrease. But it *can*
legitimately happen because of RULES. Eg
f = g Int
where g has arity 2, will have arity 2. But if there's a rewrite rule
g Int --> h
where h has arity 1, then f's arity will decrease. Here's a real-life example,
which is in the output of Specialise:
Rec {
$dm {Arity 2} = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm #-}
dInt = MkD .... opInt ...
opInt {Arity 1} = $dm dInt
$s$dm {Arity 0} = \x. op dInt }
Here opInt has arity 1; but when we apply the rule its arity drops to 0.
That's why Specialise goes to a little trouble to pin the right arity
on specialised functions too.
Note [Setting the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the unfolding is a value, the demand info may
go pear-shaped, so we nuke it. Example:
let x = (a,b) in
case x of (p,q) -> h p q x
Here x is certainly demanded. But after we've nuked
the case, we'll get just
let x = (a,b) in h a b x
and now x is not demanded (I'm assuming h is lazy)
This really happens. Similarly
let f = \x -> e in ...f..f...
After inlining f at some of its call sites the original binding may
(for example) be no longer strictly demanded.
The solution here is a bit ad hoc...
************************************************************************
* *
\subsection[Simplify-simplExpr]{The main function: simplExpr}
* *
************************************************************************
The reason for this OutExprStuff stuff is that we want to float *after*
simplifying a RHS, not before. If we do so naively we get quadratic
behaviour as things float out.
To see why it's important to do it after, consider this (real) example:
let t = f x
in fst t
==>
let t = let a = e1
b = e2
in (a,b)
in fst t
==>
let a = e1
b = e2
t = (a,b)
in
a -- Can't inline a this round, cos it appears twice
==>
e1
Each of the ==> steps is a round of simplification. We'd save a
whole round if we float first. This can cascade. Consider
let f = g d
in \x -> ...f...
==>
let f = let d1 = ..d.. in \y -> e
in \x -> ...f...
==>
let d1 = ..d..
in \x -> ...(\y ->e)...
Only in this second round can the \y be applied, and it
might do the same again.
-}
simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty)
where
expr_out_ty :: OutType
expr_out_ty = substTy env (exprType expr)
simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr
-- Simplify an expression, given a continuation
simplExprC env expr cont
= -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $
do { (env', expr') <- simplExprF (zapFloats env) expr cont
; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $
-- pprTrace "simplExprC ret3" (ppr (seInScope env')) $
-- pprTrace "simplExprC ret4" (ppr (seFloats env')) $
return (wrapFloats env' expr') }
--------------------------------------------------
simplExprF :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF env e cont
= {- pprTrace "simplExprF" (vcat
[ ppr e
, text "cont =" <+> ppr cont
, text "inscope =" <+> ppr (seInScope env)
, text "tvsubst =" <+> ppr (seTvSubst env)
, text "idsubst =" <+> ppr (seIdSubst env)
, text "cvsubst =" <+> ppr (seCvSubst env)
{- , ppr (seFloats env) -}
]) $ -}
simplExprF1 env e cont
simplExprF1 :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF1 env (Var v) cont = simplIdF env v cont
simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont
simplExprF1 env (Tick t expr) cont = simplTick env t expr cont
simplExprF1 env (Cast body co) cont = simplCast env body co cont
simplExprF1 env (Coercion co) cont = simplCoercionF env co cont
simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont )
rebuild env (Type (substTy env ty)) cont
simplExprF1 env (App fun arg) cont
= simplExprF env fun $
case arg of
Type ty -> ApplyToTy { sc_arg_ty = substTy env ty
, sc_hole_ty = substTy env (exprType fun)
, sc_cont = cont }
_ -> ApplyToVal { sc_arg = arg, sc_env = env
, sc_dup = NoDup, sc_cont = cont }
simplExprF1 env expr@(Lam {}) cont
= simplLam env zapped_bndrs body cont
-- The main issue here is under-saturated lambdas
-- (\x1. \x2. e) arg1
-- Here x1 might have "occurs-once" occ-info, because occ-info
-- is computed assuming that a group of lambdas is applied
-- all at once. If there are too few args, we must zap the
-- occ-info, UNLESS the remaining binders are one-shot
where
(bndrs, body) = collectBinders expr
zapped_bndrs | need_to_zap = map zap bndrs
| otherwise = bndrs
need_to_zap = any zappable_bndr (drop n_args bndrs)
n_args = countArgs cont
-- NB: countArgs counts all the args (incl type args)
-- and likewise drop counts all binders (incl type lambdas)
zappable_bndr b = isId b && not (isOneShotBndr b)
zap b | isTyVar b = b
| otherwise = zapLamIdInfo b
simplExprF1 env (Case scrut bndr _ alts) cont
= simplExprF env scrut (Select { sc_dup = NoDup, sc_bndr = bndr
, sc_alts = alts
, sc_env = env, sc_cont = cont })
simplExprF1 env (Let (Rec pairs) body) cont
= do { env' <- simplRecBndrs env (map fst pairs)
-- NB: bndrs' don't have unfoldings or rules
-- We add them as we go down
; env'' <- simplRecBind env' NotTopLevel pairs
; simplExprF env'' body cont }
simplExprF1 env (Let (NonRec bndr rhs) body) cont
= simplNonRecE env bndr (rhs, env) ([], body) cont
---------------------------------
simplType :: SimplEnv -> InType -> SimplM OutType
-- Kept monadic just so we can do the seqType
simplType env ty
= -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $
seqType new_ty `seq` return new_ty
where
new_ty = substTy env ty
---------------------------------
simplCoercionF :: SimplEnv -> InCoercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCoercionF env co cont
= do { co' <- simplCoercion env co
; rebuild env (Coercion co') cont }
simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion
simplCoercion env co
= let opt_co = optCoercion (getTCvSubst env) co
in seqCo opt_co `seq` return opt_co
-----------------------------------
-- | Push a TickIt context outwards past applications and cases, as
-- long as this is a non-scoping tick, to let case and application
-- optimisations apply.
simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplTick env tickish expr cont
-- A scoped tick turns into a continuation, so that we can spot
-- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do
-- it this way, then it would take two passes of the simplifier to
-- reduce ((scc t (\x . e)) e').
-- NB, don't do this with counting ticks, because if the expr is
-- bottom, then rebuildCall will discard the continuation.
-- XXX: we cannot do this, because the simplifier assumes that
-- the context can be pushed into a case with a single branch. e.g.
-- scc<f> case expensive of p -> e
-- becomes
-- case expensive of p -> scc<f> e
--
-- So I'm disabling this for now. It just means we will do more
-- simplifier iterations that necessary in some cases.
-- | tickishScoped tickish && not (tickishCounts tickish)
-- = simplExprF env expr (TickIt tickish cont)
-- For unscoped or soft-scoped ticks, we are allowed to float in new
-- cost, so we simply push the continuation inside the tick. This
-- has the effect of moving the tick to the outside of a case or
-- application context, allowing the normal case and application
-- optimisations to fire.
| tickish `tickishScopesLike` SoftScope
= do { (env', expr') <- simplExprF env expr cont
; return (env', mkTick tickish expr')
}
-- Push tick inside if the context looks like this will allow us to
-- do a case-of-case - see Note [case-of-scc-of-case]
| Select {} <- cont, Just expr' <- push_tick_inside
= simplExprF env expr' cont
-- We don't want to move the tick, but we might still want to allow
-- floats to pass through with appropriate wrapping (or not, see
-- wrap_floats below)
--- | not (tickishCounts tickish) || tickishCanSplit tickish
-- = wrap_floats
| otherwise
= no_floating_past_tick
where
-- Try to push tick inside a case, see Note [case-of-scc-of-case].
push_tick_inside =
case expr0 of
Case scrut bndr ty alts
-> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts)
_other -> Nothing
where (ticks, expr0) = stripTicksTop movable (Tick tickish expr)
movable t = not (tickishCounts t) ||
t `tickishScopesLike` NoScope ||
tickishCanSplit t
tickScrut e = foldr mkTick e ticks
-- Alternatives get annotated with all ticks that scope in some way,
-- but we don't want to count entries.
tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope)
ts_scope = map mkNoCount $
filter (not . (`tickishScopesLike` NoScope)) ticks
no_floating_past_tick =
do { let (inc,outc) = splitCont cont
; (env', expr') <- simplExprF (zapFloats env) expr inc
; let tickish' = simplTickish env tickish
; (env'', expr'') <- rebuild (zapFloats env')
(wrapFloats env' expr')
(TickIt tickish' outc)
; return (addFloats env env'', expr'')
}
-- Alternative version that wraps outgoing floats with the tick. This
-- results in ticks being duplicated, as we don't make any attempt to
-- eliminate the tick if we re-inline the binding (because the tick
-- semantics allows unrestricted inlining of HNFs), so I'm not doing
-- this any more. FloatOut will catch any real opportunities for
-- floating.
--
-- wrap_floats =
-- do { let (inc,outc) = splitCont cont
-- ; (env', expr') <- simplExprF (zapFloats env) expr inc
-- ; let tickish' = simplTickish env tickish
-- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0),
-- mkTick (mkNoCount tickish') rhs)
-- -- when wrapping a float with mkTick, we better zap the Id's
-- -- strictness info and arity, because it might be wrong now.
-- ; let env'' = addFloats env (mapFloats env' wrap_float)
-- ; rebuild env'' expr' (TickIt tickish' outc)
-- }
simplTickish env tickish
| Breakpoint n ids <- tickish
= Breakpoint n (map (getDoneId . substId env) ids)
| otherwise = tickish
-- Push type application and coercion inside a tick
splitCont :: SimplCont -> (SimplCont, SimplCont)
splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc)
where (inc,outc) = splitCont tail
splitCont (CastIt co c) = (CastIt co inc, outc)
where (inc,outc) = splitCont c
splitCont other = (mkBoringStop (contHoleType other), other)
getDoneId (DoneId id) = id
getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst
getDoneId other = pprPanic "getDoneId" (ppr other)
-- Note [case-of-scc-of-case]
-- It's pretty important to be able to transform case-of-case when
-- there's an SCC in the way. For example, the following comes up
-- in nofib/real/compress/Encode.hs:
--
-- case scctick<code_string.r1>
-- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje
-- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) ->
-- (ww1_s13f, ww2_s13g, ww3_s13h)
-- }
-- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) ->
-- tick<code_string.f1>
-- (ww_s12Y,
-- ww1_s12Z,
-- PTTrees.PT
-- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf)
-- }
--
-- We really want this case-of-case to fire, because then the 3-tuple
-- will go away (indeed, the CPR optimisation is relying on this
-- happening). But the scctick is in the way - we need to push it
-- inside to expose the case-of-case. So we perform this
-- transformation on the inner case:
--
-- scctick c (case e of { p1 -> e1; ...; pn -> en })
-- ==>
-- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en }
--
-- So we've moved a constant amount of work out of the scc to expose
-- the case. We only do this when the continuation is interesting: in
-- for now, it has to be another Case (maybe generalise this later).
{-
************************************************************************
* *
\subsection{The main rebuilder}
* *
************************************************************************
-}
rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- At this point the substitution in the SimplEnv should be irrelevant
-- only the in-scope set and floats should matter
rebuild env expr cont
= case cont of
Stop {} -> return (env, expr)
TickIt t cont -> rebuild env (mkTick t expr) cont
CastIt co cont -> rebuild env (mkCast expr co) cont
-- NB: mkCast implements the (Coercion co |> g) optimisation
Select { sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont }
-> rebuildCase (se `setFloats` env) expr bndr alts cont
StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont
StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr
-- expr satisfies let/app since it started life
-- in a call to simplNonRecE
; simplLam env' bs body cont }
ApplyToTy { sc_arg_ty = ty, sc_cont = cont}
-> rebuild env (App expr (Type ty)) cont
ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont}
-- See Note [Avoid redundant simplification]
| isSimplified dup_flag -> rebuild env (App expr arg) cont
| otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg
; rebuild env (App expr arg') cont }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
-}
simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCast env body co0 cont0
= do { co1 <- simplCoercion env co0
; cont1 <- addCoerce co1 cont0
; simplExprF env body cont1 }
where
addCoerce co cont = add_coerce co (coercionKind co) cont
add_coerce _co (Pair s1 k1) cont -- co :: ty~ty
| s1 `eqType` k1 = return cont -- is a no-op
add_coerce co1 (Pair s1 _k2) (CastIt co2 cont)
| (Pair _l1 t1) <- coercionKind co2
-- e |> (g1 :: S1~L) |> (g2 :: L~T1)
-- ==>
-- e, if S1=T1
-- e |> (g1 . g2 :: S1~T1) otherwise
--
-- For example, in the initial form of a worker
-- we may find (coerce T (coerce S (\x.e))) y
-- and we'd like it to simplify to e[y/x] in one round
-- of simplification
, s1 `eqType` t1 = return cont -- The coerces cancel out
| otherwise = return (CastIt (mkTransCo co1 co2) cont)
add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail })
-- (f |> g) ty ---> (f ty) |> (g @ ty)
-- This implements the PushT rule from the paper
| isForAllTy s1s2
= do { cont' <- addCoerce new_cast tail
; return (cont { sc_cont = cont' }) }
where
new_cast = mkInstCo co (mkNomReflCo arg_ty)
add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup, sc_cont = cont })
| isFunTy s1s2 -- This implements the Push rule from the paper
, isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg
-- (e |> (g :: s1s2 ~ t1->t2)) f
-- ===>
-- (e (f |> (arg g :: t1~s1))
-- |> (res g :: s2->t2)
--
-- t1t2 must be a function type, t1->t2, because it's applied
-- to something but s1s2 might conceivably not be
--
-- When we build the ApplyTo we can't mix the out-types
-- with the InExpr in the argument, so we simply substitute
-- to make it all consistent. It's a bit messy.
-- But it isn't a common case.
--
-- Example of use: Trac #995
= do { (dup', arg_se', arg') <- simplArg env dup arg_se arg
; cont' <- addCoerce co2 cont
; return (ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1)
, sc_env = arg_se'
, sc_dup = dup'
, sc_cont = cont' }) }
where
-- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and
-- t2 ~ s2 with left and right on the curried form:
-- (->) t1 t2 ~ (->) s1 s2
[co1, co2] = decomposeCo 2 co
add_coerce co _ cont = return (CastIt co cont)
simplArg :: SimplEnv -> DupFlag -> StaticEnv -> CoreExpr
-> SimplM (DupFlag, StaticEnv, OutExpr)
simplArg env dup_flag arg_env arg
| isSimplified dup_flag
= return (dup_flag, arg_env, arg)
| otherwise
= do { arg' <- simplExpr (arg_env `setInScope` env) arg
; return (Simplified, zapSubstEnv arg_env, arg') }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
Note [Zap unfolding when beta-reducing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lambda-bound variables can have stable unfoldings, such as
$j = \x. \b{Unf=Just x}. e
See Note [Case binders and join points] below; the unfolding for lets
us optimise e better. However when we beta-reduce it we want to
revert to using the actual value, otherwise we can end up in the
stupid situation of
let x = blah in
let b{Unf=Just x} = y
in ...b...
Here it'd be far better to drop the unfolding and use the actual RHS.
-}
simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplLam env [] body cont = simplExprF env body cont
-- Beta reduction
simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplLam (extendTvSubst env bndr arg_ty) bndrs body cont }
simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplNonRecE env' (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont }
where
env' | Coercion co <- arg
= extendCvSubst env bndr co
| otherwise
= env
zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing]
| isId bndr, isStableUnfolding (realIdUnfolding bndr)
= setIdUnfolding bndr NoUnfolding
| otherwise = bndr
-- discard a non-counting tick on a lambda. This may change the
-- cost attribution slightly (moving the allocation of the
-- lambda elsewhere), but we don't care: optimisation changes
-- cost attribution all the time.
simplLam env bndrs body (TickIt tickish cont)
| not (tickishCounts tickish)
= simplLam env bndrs body cont
-- Not enough args, so there are real lambdas left to put in the result
simplLam env bndrs body cont
= do { (env', bndrs') <- simplLamBndrs env bndrs
; body' <- simplExpr env' body
; new_lam <- mkLam bndrs' body' cont
; rebuild env' new_lam cont }
simplLamBndrs :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplLamBndrs env bndrs = mapAccumLM simplLamBndr env bndrs
-------------
simplLamBndr :: SimplEnv -> Var -> SimplM (SimplEnv, Var)
-- Used for lambda binders. These sometimes have unfoldings added by
-- the worker/wrapper pass that must be preserved, because they can't
-- be reconstructed from context. For example:
-- f x = case x of (a,b) -> fw a b x
-- fw a b x{=(a,b)} = ...
-- The "{=(a,b)}" is an unfolding we can't reconstruct otherwise.
simplLamBndr env bndr
| isId bndr && hasSomeUnfolding old_unf -- Special case
= do { (env1, bndr1) <- simplBinder env bndr
; unf' <- simplUnfolding env1 NotTopLevel bndr old_unf
; let bndr2 = bndr1 `setIdUnfolding` unf'
; return (modifyInScope env1 bndr2, bndr2) }
| otherwise
= simplBinder env bndr -- Normal case
where
old_unf = idUnfolding bndr
------------------
simplNonRecE :: SimplEnv
-> InBndr -- The binder
-> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda)
-> ([InBndr], InExpr) -- Body of the let/lambda
-- \xs.e
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
-- simplNonRecE is used for
-- * non-top-level non-recursive lets in expressions
-- * beta reduction
--
-- It deals with strict bindings, via the StrictBind continuation,
-- which may abort the whole process
--
-- Precondition: rhs satisfies the let/app invariant
-- Note [CoreSyn let/app invariant] in CoreSyn
--
-- The "body" of the binding comes as a pair of ([InId],InExpr)
-- representing a lambda; so we recurse back to simplLam
-- Why? Because of the binder-occ-info-zapping done before
-- the call to simplLam in simplExprF (Lam ...)
-- First deal with type applications and type lets
-- (/\a. e) (Type ty) and (let a = Type ty in e)
simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont
= ASSERT( isTyVar bndr )
do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg
; simplLam (extendTvSubst env bndr ty_arg') bndrs body cont }
simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont
= do dflags <- getDynFlags
case () of
_ | preInlineUnconditionally dflags env NotTopLevel bndr rhs
-> do { tick (PreInlineUnconditionally bndr)
; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $
simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont }
| isStrictId bndr -- Includes coercions
-> simplExprF (rhs_se `setFloats` env) rhs
(StrictBind bndr bndrs body env cont)
| otherwise
-> ASSERT( not (isTyVar bndr) )
do { (env1, bndr1) <- simplNonRecBndr env bndr
; (env2, bndr2) <- addBndrRules env1 bndr bndr1
; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se
; simplLam env3 bndrs body cont }
{-
************************************************************************
* *
Variables
* *
************************************************************************
-}
simplVar :: SimplEnv -> InVar -> SimplM OutExpr
-- Look up an InVar in the environment
simplVar env var
| isTyVar var = return (Type (substTyVar env var))
| isCoVar var = return (Coercion (substCoVar env var))
| otherwise
= case substId env var of
DoneId var1 -> return (Var var1)
DoneEx e -> return e
ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e
simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr)
simplIdF env var cont
= case substId env var of
DoneEx e -> simplExprF (zapSubstEnv env) e cont
ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont
DoneId var1 -> completeCall env var1 cont
-- Note [zapSubstEnv]
-- The template is already simplified, so don't re-substitute.
-- This is VITAL. Consider
-- let x = e in
-- let y = \z -> ...x... in
-- \ x -> ...y...
-- We'll clone the inner \x, adding x->x' in the id_subst
-- Then when we inline y, we must *not* replace x by x' in
-- the inlined copy!!
---------------------------------------------------------
-- Dealing with a call site
completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr)
completeCall env var cont
= do { ------------- Try inlining ----------------
dflags <- getDynFlags
; let (lone_variable, arg_infos, call_cont) = contArgs cont
n_val_args = length arg_infos
interesting_cont = interestingCallContext call_cont
unfolding = activeUnfolding env var
maybe_inline = callSiteInline dflags var unfolding
lone_variable arg_infos interesting_cont
; case maybe_inline of {
Just expr -- There is an inlining!
-> do { checkedTick (UnfoldingDone var)
; dump_inline dflags expr cont
; simplExprF (zapSubstEnv env) expr cont }
; Nothing -> do -- No inlining!
{ rule_base <- getSimplRules
; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont
; rebuildCall env info cont
}}}
where
dump_inline dflags unfolding cont
| not (dopt Opt_D_dump_inlinings dflags) = return ()
| not (dopt Opt_D_verbose_core2core dflags)
= when (isExternalName (idName var)) $
liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done:", nest 4 (ppr var)]
| otherwise
= liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done: " <> ppr var,
nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding),
text "Cont: " <+> ppr cont])]
rebuildCall :: SimplEnv
-> ArgInfo
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont
-- When we run out of strictness args, it means
-- that the call is definitely bottom; see SimplUtils.mkArgInfo
-- Then we want to discard the entire strict continuation. E.g.
-- * case (error "hello") of { ... }
-- * (error "Hello") arg
-- * f (error "Hello") where f is strict
-- etc
-- Then, especially in the first of these cases, we'd like to discard
-- the continuation, leaving just the bottoming expression. But the
-- type might not be right, so we may have to add a coerce.
| not (contIsTrivial cont) -- Only do this if there is a non-trivial
= return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it
where -- again and again!
res = argInfoExpr fun rev_args
cont_ty = contResultType cont
rebuildCall env info (CastIt co cont)
= rebuildCall env (addCastTo info co) cont
rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= rebuildCall env (info `addTyArgTo` arg_ty) cont
rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty
, ai_strs = str:strs, ai_discs = disc:discs })
(ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup_flag, sc_cont = cont })
| isSimplified dup_flag -- See Note [Avoid redundant simplification]
= rebuildCall env (addValArgTo info' arg) cont
| str -- Strict argument
= -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $
simplExprF (arg_se `setFloats` env) arg
(StrictArg info' cci cont)
-- Note [Shadowing]
| otherwise -- Lazy argument
-- DO NOT float anything outside, hence simplExprC
-- There is no benefit (unlike in a let-binding), and we'd
-- have to be very careful about bogus strictness through
-- floating a demanded let.
= do { arg' <- simplExprC (arg_se `setInScope` env) arg
(mkLazyArgStop (funArgTy fun_ty) cci)
; rebuildCall env (addValArgTo info' arg') cont }
where
info' = info { ai_strs = strs, ai_discs = discs }
cci | encl_rules = RuleArgCtxt
| disc > 0 = DiscArgCtxt -- Be keener here
| otherwise = BoringCtxt -- Nothing interesting
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont
| null rules
= rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case
| otherwise
= do { -- We've accumulated a simplified call in <fun,rev_args>
-- so try rewrite rules; see Note [RULEs apply to simplified arguments]
-- See also Note [Rules for recursive functions]
; let env' = zapSubstEnv env -- See Note [zapSubstEnv];
-- and NB that 'rev_args' are all fully simplified
; mb_rule <- tryRules env' rules fun (reverse rev_args) cont
; case mb_rule of {
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
-- Rules don't match
; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules
} }
{-
Note [RULES apply to simplified arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very desirable to try RULES once the arguments have been simplified, because
doing so ensures that rule cascades work in one pass. Consider
{-# RULES g (h x) = k x
f (k x) = x #-}
...f (g (h x))...
Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If
we match f's rules against the un-simplified RHS, it won't match. This
makes a particularly big difference when superclass selectors are involved:
op ($p1 ($p2 (df d)))
We want all this to unravel in one sweep.
Note [Avoid redundant simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because RULES apply to simplified arguments, there's a danger of repeatedly
simplifying already-simplified arguments. An important example is that of
(>>=) d e1 e2
Here e1, e2 are simplified before the rule is applied, but don't really
participate in the rule firing. So we mark them as Simplified to avoid
re-simplifying them.
Note [Shadowing]
~~~~~~~~~~~~~~~~
This part of the simplifier may break the no-shadowing invariant
Consider
f (...(\a -> e)...) (case y of (a,b) -> e')
where f is strict in its second arg
If we simplify the innermost one first we get (...(\a -> e)...)
Simplifying the second arg makes us float the case out, so we end up with
case y of (a,b) -> f (...(\a -> e)...) e'
So the output does not have the no-shadowing invariant. However, there is
no danger of getting name-capture, because when the first arg was simplified
we used an in-scope set that at least mentioned all the variables free in its
static environment, and that is enough.
We can't just do innermost first, or we'd end up with a dual problem:
case x of (a,b) -> f e (...(\a -> e')...)
I spent hours trying to recover the no-shadowing invariant, but I just could
not think of an elegant way to do it. The simplifier is already knee-deep in
continuations. We have to keep the right in-scope set around; AND we have
to get the effect that finding (error "foo") in a strict arg position will
discard the entire application and replace it with (error "foo"). Getting
all this at once is TOO HARD!
************************************************************************
* *
Rewrite rules
* *
************************************************************************
-}
tryRules :: SimplEnv -> [CoreRule]
-> Id -> [ArgSpec] -> SimplCont
-> SimplM (Maybe (CoreExpr, SimplCont))
-- The SimplEnv already has zapSubstEnv applied to it
tryRules env rules fn args call_cont
| null rules
= return Nothing
{- Disabled until we fix #8326
| fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#]
, [_type_arg, val_arg] <- args
, Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont
, isDeadBinder bndr
= do { dflags <- getDynFlags
; let enum_to_tag :: CoreAlt -> CoreAlt
-- Takes K -> e into tagK# -> e
-- where tagK# is the tag of constructor K
enum_to_tag (DataAlt con, [], rhs)
= ASSERT( isEnumerationTyCon (dataConTyCon con) )
(LitAlt tag, [], rhs)
where
tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG))
enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt)
new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts
new_bndr = setIdType bndr intPrimTy
-- The binder is dead, but should have the right type
; return (Just (val_arg, Select dup new_bndr new_alts se cont)) }
-}
| otherwise
= do { dflags <- getDynFlags
; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env)
fn (argInfoAppArgs args) rules of {
Nothing ->
do { nodump dflags -- This ensures that an empty file is written
; return Nothing } ; -- No rule matches
Just (rule, rule_rhs) ->
do { checkedTick (RuleFired (ru_name rule))
; let cont' = pushSimplifiedArgs env
(drop (ruleArity rule) args)
call_cont
-- (ruleArity rule) says how many args the rule consumed
; dump dflags rule rule_rhs
; return (Just (rule_rhs, cont')) }}}
where
dump dflags rule rule_rhs
| dopt Opt_D_dump_rule_rewrites dflags
= log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat
[ text "Rule:" <+> ftext (ru_name rule)
, text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args))
, text "After: " <+> pprCoreExpr rule_rhs
, text "Cont: " <+> ppr call_cont ]
| dopt Opt_D_dump_rule_firings dflags
= log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $
ftext (ru_name rule)
| otherwise
= return ()
nodump dflags
| dopt Opt_D_dump_rule_rewrites dflags
= liftIO $ dumpSDoc dflags alwaysQualify Opt_D_dump_rule_rewrites "" empty
| dopt Opt_D_dump_rule_firings dflags
= liftIO $ dumpSDoc dflags alwaysQualify Opt_D_dump_rule_firings "" empty
| otherwise
= return ()
log_rule dflags flag hdr details
= liftIO . dumpSDoc dflags alwaysQualify flag "" $
sep [text hdr, nest 4 details]
{-
Note [Optimising tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an enumeration data type:
data Foo = A | B | C
Then we want to transform
case tagToEnum# x of ==> case x of
A -> e1 DEFAULT -> e1
B -> e2 1# -> e2
C -> e3 2# -> e3
thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT
alternative we retain it (remember it comes first). If not the case must
be exhaustive, and we reflect that in the transformed version by adding
a DEFAULT. Otherwise Lint complains that the new case is not exhaustive.
See #8317.
Note [Rules for recursive functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You might think that we shouldn't apply rules for a loop breaker:
doing so might give rise to an infinite loop, because a RULE is
rather like an extra equation for the function:
RULE: f (g x) y = x+y
Eqn: f a y = a-y
But it's too drastic to disable rules for loop breakers.
Even the foldr/build rule would be disabled, because foldr
is recursive, and hence a loop breaker:
foldr k z (build g) = g k z
So it's up to the programmer: rules can cause divergence
************************************************************************
* *
Rebuilding a case expression
* *
************************************************************************
Note [Case elimination]
~~~~~~~~~~~~~~~~~~~~~~~
The case-elimination transformation discards redundant case expressions.
Start with a simple situation:
case x# of ===> let y# = x# in e
y# -> e
(when x#, y# are of primitive type, of course). We can't (in general)
do this for algebraic cases, because we might turn bottom into
non-bottom!
The code in SimplUtils.prepareAlts has the effect of generalise this
idea to look for a case where we're scrutinising a variable, and we
know that only the default case can match. For example:
case x of
0# -> ...
DEFAULT -> ...(case x of
0# -> ...
DEFAULT -> ...) ...
Here the inner case is first trimmed to have only one alternative, the
DEFAULT, after which it's an instance of the previous case. This
really only shows up in eliminating error-checking code.
Note that SimplUtils.mkCase combines identical RHSs. So
case e of ===> case e of DEFAULT -> r
True -> r
False -> r
Now again the case may be elminated by the CaseElim transformation.
This includes things like (==# a# b#)::Bool so that we simplify
case ==# a# b# of { True -> x; False -> x }
to just
x
This particular example shows up in default methods for
comparison operations (e.g. in (>=) for Int.Int32)
Note [Case elimination: lifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a case over a lifted type has a single alternative, and is being used
as a strict 'let' (all isDeadBinder bndrs), we may want to do this
transformation:
case e of r ===> let r = e in ...r...
_ -> ...r...
(a) 'e' is already evaluated (it may so if e is a variable)
Specifically we check (exprIsHNF e). In this case
we can just allocate the WHNF directly with a let.
or
(b) 'x' is not used at all and e is ok-for-speculation
The ok-for-spec bit checks that we don't lose any
exceptions or divergence.
NB: it'd be *sound* to switch from case to let if the
scrutinee was not yet WHNF but was guaranteed to
converge; but sticking with case means we won't build a
thunk
or
(c) 'x' is used strictly in the body, and 'e' is a variable
Then we can just substitute 'e' for 'x' in the body.
See Note [Eliminating redundant seqs]
For (b), the "not used at all" test is important. Consider
case (case a ># b of { True -> (p,q); False -> (q,p) }) of
r -> blah
The scrutinee is ok-for-speculation (it looks inside cases), but we do
not want to transform to
let r = case a ># b of { True -> (p,q); False -> (q,p) }
in blah
because that builds an unnecessary thunk.
Note [Eliminating redundant seqs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have this:
case x of r { _ -> ..r.. }
where 'r' is used strictly in (..r..), the case is effectively a 'seq'
on 'x', but since 'r' is used strictly anyway, we can safely transform to
(...x...)
Note that this can change the error behaviour. For example, we might
transform
case x of { _ -> error "bad" }
--> error "bad"
which is might be puzzling if 'x' currently lambda-bound, but later gets
let-bound to (error "good").
Nevertheless, the paper "A semantics for imprecise exceptions" allows
this transformation. If you want to fix the evaluation order, use
'pseq'. See Trac #8900 for an example where the loss of this
transformation bit us in practice.
See also Note [Empty case alternatives] in CoreSyn.
Just for reference, the original code (added Jan 13) looked like this:
|| case_bndr_evald_next rhs
case_bndr_evald_next :: CoreExpr -> Bool
-- See Note [Case binder next]
case_bndr_evald_next (Var v) = v == case_bndr
case_bndr_evald_next (Cast e _) = case_bndr_evald_next e
case_bndr_evald_next (App e _) = case_bndr_evald_next e
case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e
case_bndr_evald_next _ = False
(This came up when fixing Trac #7542. See also Note [Eta reduction of
an eval'd function] in CoreUtils.)
Note [Case elimination: unlifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case a +# b of r -> ...r...
Then we do case-elimination (to make a let) followed by inlining,
to get
.....(a +# b)....
If we have
case indexArray# a i of r -> ...r...
we might like to do the same, and inline the (indexArray# a i).
But indexArray# is not okForSpeculation, so we don't build a let
in rebuildCase (lest it get floated *out*), so the inlining doesn't
happen either.
This really isn't a big deal I think. The let can be
Further notes about case elimination
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider: test :: Integer -> IO ()
test = print
Turns out that this compiles to:
Print.test
= \ eta :: Integer
eta1 :: Void# ->
case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT ->
case hPutStr stdout
(PrelNum.jtos eta ($w[] @ Char))
eta1
of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }}
Notice the strange '<' which has no effect at all. This is a funny one.
It started like this:
f x y = if x < 0 then jtos x
else if y==0 then "" else jtos x
At a particular call site we have (f v 1). So we inline to get
if v < 0 then jtos x
else if 1==0 then "" else jtos x
Now simplify the 1==0 conditional:
if v<0 then jtos v else jtos v
Now common-up the two branches of the case:
case (v<0) of DEFAULT -> jtos v
Why don't we drop the case? Because it's strict in v. It's technically
wrong to drop even unnecessary evaluations, and in practice they
may be a result of 'seq' so we *definitely* don't want to drop those.
I don't really know how to improve this situation.
-}
---------------------------------------------------------
-- Eliminate the case if possible
rebuildCase, reallyRebuildCase
:: SimplEnv
-> OutExpr -- Scrutinee
-> InId -- Case binder
-> [InAlt] -- Alternatives (inceasing order)
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
--------------------------------------------------
-- 1. Eliminate the case if there's a known constructor
--------------------------------------------------
rebuildCase env scrut case_bndr alts cont
| Lit lit <- scrut -- No need for same treatment as constructors
-- because literals are inlined more vigorously
, not (litIsLifted lit)
= do { tick (KnownBranch case_bndr)
; case findAlt (LitAlt lit) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (_, bs, rhs) -> simple_rhs bs rhs }
| Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut
-- Works when the scrutinee is a variable with a known unfolding
-- as well as when it's an explicit constructor application
= do { tick (KnownBranch case_bndr)
; case findAlt (DataAlt con) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs
Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args
case_bndr bs rhs cont
}
where
simple_rhs bs rhs = ASSERT( null bs )
do { env' <- simplNonRecX env case_bndr scrut
-- scrut is a constructor application,
-- hence satisfies let/app invariant
; simplExprF env' rhs cont }
--------------------------------------------------
-- 2. Eliminate the case if scrutinee is evaluated
--------------------------------------------------
rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont
-- See if we can get rid of the case altogether
-- See Note [Case elimination]
-- mkCase made sure that if all the alternatives are equal,
-- then there is now only one (DEFAULT) rhs
-- 2a. Dropping the case altogether, if
-- a) it binds nothing (so it's really just a 'seq')
-- b) evaluating the scrutinee has no side effects
| is_plain_seq
, exprOkForSideEffects scrut
-- The entire case is dead, so we can drop it
-- if the scrutinee converges without having imperative
-- side effects or raising a Haskell exception
-- See Note [PrimOp can_fail and has_side_effects] in PrimOp
= simplExprF env rhs cont
-- 2b. Turn the case into a let, if
-- a) it binds only the case-binder
-- b) unlifted case: the scrutinee is ok-for-speculation
-- lifted case: the scrutinee is in HNF (or will later be demanded)
| all_dead_bndrs
, if is_unlifted
then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case]
else exprIsHNF scrut -- See Note [Case elimination: lifted case]
|| scrut_is_demanded_var scrut
= do { tick (CaseElim case_bndr)
; env' <- simplNonRecX env case_bndr scrut
; simplExprF env' rhs cont }
-- 2c. Try the seq rules if
-- a) it binds only the case binder
-- b) a rule for seq applies
-- See Note [User-defined RULES for seq] in MkId
| is_plain_seq
= do { let scrut_ty = exprType scrut
rhs_ty = substTy env (exprType rhs)
out_args = [ TyArg { as_arg_ty = scrut_ty
, as_hole_ty = seq_id_ty }
, TyArg { as_arg_ty = rhs_ty
, as_hole_ty = piResultTy seq_id_ty scrut_ty }
, ValArg scrut]
rule_cont = ApplyToVal { sc_dup = NoDup, sc_arg = rhs
, sc_env = env, sc_cont = cont }
env' = zapSubstEnv env
-- Lazily evaluated, so we don't do most of this
; rule_base <- getSimplRules
; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args rule_cont
; case mb_rule of
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
Nothing -> reallyRebuildCase env scrut case_bndr alts cont }
where
is_unlifted = isUnliftedType (idType case_bndr)
all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId]
is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect
seq_id_ty = idType seqId
scrut_is_demanded_var :: CoreExpr -> Bool
-- See Note [Eliminating redundant seqs]
scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s
scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr)
scrut_is_demanded_var _ = False
rebuildCase env scrut case_bndr alts cont
= reallyRebuildCase env scrut case_bndr alts cont
--------------------------------------------------
-- 3. Catch-all case
--------------------------------------------------
reallyRebuildCase env scrut case_bndr alts cont
= do { -- Prepare the continuation;
-- The new subst_env is in place
(env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- Simplify the alternatives
; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont
; dflags <- getDynFlags
; let alts_ty' = contResultType dup_cont
; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts'
-- Notice that rebuild gets the in-scope set from env', not alt_env
-- (which in any case is only build in simplAlts)
-- The case binder *not* scope over the whole returned case-expression
; rebuild env' case_expr nodup_cont }
{-
simplCaseBinder checks whether the scrutinee is a variable, v. If so,
try to eliminate uses of v in the RHSs in favour of case_bndr; that
way, there's a chance that v will now only be used once, and hence
inlined.
Historical note: we use to do the "case binder swap" in the Simplifier
so there were additional complications if the scrutinee was a variable.
Now the binder-swap stuff is done in the occurrence analyer; see
OccurAnal Note [Binder swap].
Note [knownCon occ info]
~~~~~~~~~~~~~~~~~~~~~~~~
If the case binder is not dead, then neither are the pattern bound
variables:
case <any> of x { (a,b) ->
case x of { (p,q) -> p } }
Here (a,b) both look dead, but come alive after the inner case is eliminated.
The point is that we bring into the envt a binding
let x = (a,b)
after the outer case, and that makes (a,b) alive. At least we do unless
the case binder is guaranteed dead.
Note [Case alternative occ info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are simply reconstructing a case (the common case), we always
zap the occurrence info on the binders in the alternatives. Even
if the case binder is dead, the scrutinee is usually a variable, and *that*
can bring the case-alternative binders back to life.
See Note [Add unfolding for scrutinee]
Note [Improving seq]
~~~~~~~~~~~~~~~~~~~
Consider
type family F :: * -> *
type instance F Int = Int
... case e of x { DEFAULT -> rhs } ...
where x::F Int. Then we'd like to rewrite (F Int) to Int, getting
case e `cast` co of x'::Int
I# x# -> let x = x' `cast` sym co
in rhs
so that 'rhs' can take advantage of the form of x'.
Notice that Note [Case of cast] (in OccurAnal) may then apply to the result.
Nota Bene: We only do the [Improving seq] transformation if the
case binder 'x' is actually used in the rhs; that is, if the case
is *not* a *pure* seq.
a) There is no point in adding the cast to a pure seq.
b) There is a good reason not to: doing so would interfere
with seq rules (Note [Built-in RULES for seq] in MkId).
In particular, this [Improving seq] thing *adds* a cast
while [Built-in RULES for seq] *removes* one, so they
just flip-flop.
You might worry about
case v of x { __DEFAULT ->
... case (v `cast` co) of y { I# -> ... }}
This is a pure seq (since x is unused), so [Improving seq] won't happen.
But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get
case v of x { __DEFAULT ->
... case (x `cast` co) of y { I# -> ... }}
Now the outer case is not a pure seq, so [Improving seq] will happen,
and then the inner case will disappear.
The need for [Improving seq] showed up in Roman's experiments. Example:
foo :: F Int -> Int -> Int
foo t n = t `seq` bar n
where
bar 0 = 0
bar n = bar (n - case t of TI i -> i)
Here we'd like to avoid repeated evaluating t inside the loop, by
taking advantage of the `seq`.
At one point I did transformation in LiberateCase, but it's more
robust here. (Otherwise, there's a danger that we'll simply drop the
'seq' altogether, before LiberateCase gets to see it.)
-}
simplAlts :: SimplEnv
-> OutExpr
-> InId -- Case binder
-> [InAlt] -- Non-empty
-> SimplCont
-> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation
-- Like simplExpr, this just returns the simplified alternatives;
-- it does not return an environment
-- The returned alternatives can be empty, none are possible
simplAlts env scrut case_bndr alts cont'
= do { let env0 = zapFloats env
; (env1, case_bndr1) <- simplBinder env0 case_bndr
; fam_envs <- getFamEnvs
; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut
case_bndr case_bndr1 alts
; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts
-- NB: it's possible that the returned in_alts is empty: this is handled
-- by the caller (rebuildCase) in the missingAlt function
; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts
; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $
return (scrut', case_bndr', alts') }
------------------------------------
improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv
-> OutExpr -> InId -> OutId -> [InAlt]
-> SimplM (SimplEnv, OutExpr, OutId)
-- Note [Improving seq]
improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)]
| not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq]
, Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1)
= do { case_bndr2 <- newId (fsLit "nt") ty2
; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co)
env2 = extendIdSubst env case_bndr rhs
; return (env2, scrut `Cast` co, case_bndr2) }
improveSeq _ env scrut _ case_bndr1 _
= return (env, scrut, case_bndr1)
------------------------------------
simplAlt :: SimplEnv
-> Maybe OutExpr -- The scrutinee
-> [AltCon] -- These constructors can't be present when
-- matching the DEFAULT alternative
-> OutId -- The case binder
-> SimplCont
-> InAlt
-> SimplM OutAlt
simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs)
= ASSERT( null bndrs )
do { let env' = addBinderUnfolding env case_bndr'
(mkOtherCon imposs_deflt_cons)
-- Record the constructors that the case-binder *can't* be.
; rhs' <- simplExprC env' rhs cont'
; return (DEFAULT, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs)
= ASSERT( null bndrs )
do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit)
; rhs' <- simplExprC env' rhs cont'
; return (LitAlt lit, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs)
= do { -- Deal with the pattern-bound variables
-- Mark the ones that are in ! positions in the
-- data constructor as certainly-evaluated.
-- NB: simplLamBinders preserves this eval info
; let vs_with_evals = add_evals (dataConRepStrictness con)
; (env', vs') <- simplLamBndrs env vs_with_evals
-- Bind the case-binder to (con args)
; let inst_tys' = tyConAppArgs (idType case_bndr')
con_app :: OutExpr
con_app = mkConApp2 con inst_tys' vs'
; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app
; rhs' <- simplExprC env'' rhs cont'
; return (DataAlt con, vs', rhs') }
where
-- add_evals records the evaluated-ness of the bound variables of
-- a case pattern. This is *important*. Consider
-- data T = T !Int !Int
--
-- case x of { T a b -> T (a+1) b }
--
-- We really must record that b is already evaluated so that we don't
-- go and re-evaluate it when constructing the result.
-- See Note [Data-con worker strictness] in MkId.hs
add_evals the_strs
= go vs the_strs
where
go [] [] = []
go (v:vs') strs | isTyVar v = v : go vs' strs
go (v:vs') (str:strs)
| isMarkedStrict str = eval v : go vs' strs
| otherwise = zap v : go vs' strs
go _ _ = pprPanic "cat_evals"
(ppr con $$
ppr vs $$
ppr_with_length the_strs $$
ppr_with_length (dataConRepArgTys con) $$
ppr_with_length (dataConRepStrictness con))
where
ppr_with_length list
= ppr list <+> parens (text "length =" <+> ppr (length list))
-- NB: If this panic triggers, note that
-- NoStrictnessMark doesn't print!
zap v = zapIdOccInfo v -- See Note [Case alternative occ info]
eval v = zap v `setIdUnfolding` evaldUnfolding
addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv
addAltUnfoldings env scrut case_bndr con_app
= do { dflags <- getDynFlags
; let con_app_unf = mkSimpleUnfolding dflags con_app
env1 = addBinderUnfolding env case_bndr con_app_unf
-- See Note [Add unfolding for scrutinee]
env2 = case scrut of
Just (Var v) -> addBinderUnfolding env1 v con_app_unf
Just (Cast (Var v) co) -> addBinderUnfolding env1 v $
mkSimpleUnfolding dflags (Cast con_app (mkSymCo co))
_ -> env1
; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app])
; return env2 }
addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv
addBinderUnfolding env bndr unf
| debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf
= WARN( not (eqType (idType bndr) (exprType tmpl)),
ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) )
modifyInScope env (bndr `setIdUnfolding` unf)
| otherwise
= modifyInScope env (bndr `setIdUnfolding` unf)
zapBndrOccInfo :: Bool -> Id -> Id
-- Consider case e of b { (a,b) -> ... }
-- Then if we bind b to (a,b) in "...", and b is not dead,
-- then we must zap the deadness info on a,b
zapBndrOccInfo keep_occ_info pat_id
| keep_occ_info = pat_id
| otherwise = zapIdOccInfo pat_id
{-
Note [Add unfolding for scrutinee]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general it's unlikely that a variable scrutinee will appear
in the case alternatives case x of { ...x unlikely to appear... }
because the binder-swap in OccAnal has got rid of all such occcurrences
See Note [Binder swap] in OccAnal.
BUT it is still VERY IMPORTANT to add a suitable unfolding for a
variable scrutinee, in simplAlt. Here's why
case x of y
(a,b) -> case b of c
I# v -> ...(f y)...
There is no occurrence of 'b' in the (...(f y)...). But y gets
the unfolding (a,b), and *that* mentions b. If f has a RULE
RULE f (p, I# q) = ...
we want that rule to match, so we must extend the in-scope env with a
suitable unfolding for 'y'. It's *essential* for rule matching; but
it's also good for case-elimintation -- suppose that 'f' was inlined
and did multi-level case analysis, then we'd solve it in one
simplifier sweep instead of two.
Exactly the same issue arises in SpecConstr;
see Note [Add scrutinee to ValueEnv too] in SpecConstr
HOWEVER, given
case x of y { Just a -> r1; Nothing -> r2 }
we do not want to add the unfolding x -> y to 'x', which might seem cool,
since 'y' itself has different unfoldings in r1 and r2. Reason: if we
did that, we'd have to zap y's deadness info and that is a very useful
piece of information.
So instead we add the unfolding x -> Just a, and x -> Nothing in the
respective RHSs.
************************************************************************
* *
\subsection{Known constructor}
* *
************************************************************************
We are a bit careful with occurrence info. Here's an example
(\x* -> case x of (a*, b) -> f a) (h v, e)
where the * means "occurs once". This effectively becomes
case (h v, e) of (a*, b) -> f a)
and then
let a* = h v; b = e in f a
and then
f (h v)
All this should happen in one sweep.
-}
knownCon :: SimplEnv
-> OutExpr -- The scrutinee
-> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces)
-> InId -> [InBndr] -> InExpr -- The alternative
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont
= do { env' <- bind_args env bs dc_args
; env'' <- bind_case_bndr env'
; simplExprF env'' rhs cont }
where
zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId
-- Ugh!
bind_args env' [] _ = return env'
bind_args env' (b:bs') (Type ty : args)
= ASSERT( isTyVar b )
bind_args (extendTvSubst env' b ty) bs' args
bind_args env' (b:bs') (Coercion co : args)
= ASSERT( isCoVar b )
bind_args (extendCvSubst env' b co) bs' args
bind_args env' (b:bs') (arg : args)
= ASSERT( isId b )
do { let b' = zap_occ b
-- Note that the binder might be "dead", because it doesn't
-- occur in the RHS; and simplNonRecX may therefore discard
-- it via postInlineUnconditionally.
-- Nevertheless we must keep it if the case-binder is alive,
-- because it may be used in the con_app. See Note [knownCon occ info]
; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant
; bind_args env'' bs' args }
bind_args _ _ _ =
pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$
text "scrut:" <+> ppr scrut
-- It's useful to bind bndr to scrut, rather than to a fresh
-- binding x = Con arg1 .. argn
-- because very often the scrut is a variable, so we avoid
-- creating, and then subsequently eliminating, a let-binding
-- BUT, if scrut is a not a variable, we must be careful
-- about duplicating the arg redexes; in that case, make
-- a new con-app from the args
bind_case_bndr env
| isDeadBinder bndr = return env
| exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut))
| otherwise = do { dc_args <- mapM (simplVar env) bs
-- dc_ty_args are aready OutTypes,
-- but bs are InBndrs
; let con_app = Var (dataConWorkId dc)
`mkTyApps` dc_ty_args
`mkApps` dc_args
; simplNonRecX env bndr con_app }
-------------------
missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- This isn't strictly an error, although it is unusual.
-- It's possible that the simplifier might "see" that
-- an inner case has no accessible alternatives before
-- it "sees" that the entire branch of an outer case is
-- inaccessible. So we simply put an error case here instead.
missingAlt env case_bndr _ cont
= WARN( True, text "missingAlt" <+> ppr case_bndr )
return (env, mkImpossibleExpr (contResultType cont))
{-
************************************************************************
* *
\subsection{Duplicating continuations}
* *
************************************************************************
-}
prepareCaseCont :: SimplEnv
-> [InAlt] -> SimplCont
-> SimplM (SimplEnv,
SimplCont, -- Dupable part
SimplCont) -- Non-dupable part
-- We are considering
-- K[case _ of { p1 -> r1; ...; pn -> rn }]
-- where K is some enclosing continuation for the case
-- Goal: split K into two pieces Kdup,Knodup so that
-- a) Kdup can be duplicated
-- b) Knodup[Kdup[e]] = K[e]
-- The idea is that we'll transform thus:
-- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] }
--
-- We may also return some extra bindings in SimplEnv (that scope over
-- the entire continuation)
--
-- When case-of-case is off, just make the entire continuation non-dupable
prepareCaseCont env alts cont
| not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont)
| not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont))
| otherwise = mkDupableCont env cont
where
many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative
many_alts [] = False -- See Note [Bottom alternatives]
many_alts [_] = False
many_alts (alt:alts)
| is_bot_alt alt = many_alts alts
| otherwise = not (all is_bot_alt alts)
is_bot_alt (_,_,rhs) = exprIsBottom rhs
{-
Note [Bottom alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have
case (case x of { A -> error .. ; B -> e; C -> error ..)
of alts
then we can just duplicate those alts because the A and C cases
will disappear immediately. This is more direct than creating
join points and inlining them away; and in some cases we would
not even create the join points (see Note [Single-alternative case])
and we would keep the case-of-case which is silly. See Trac #4930.
-}
mkDupableCont :: SimplEnv -> SimplCont
-> SimplM (SimplEnv, SimplCont, SimplCont)
mkDupableCont env cont
| contIsDupable cont
= return (env, cont, mkBoringStop (contResultType cont))
mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn
mkDupableCont env (CastIt ty cont)
= do { (env', dup, nodup) <- mkDupableCont env cont
; return (env', CastIt ty dup, nodup) }
-- Duplicating ticks for now, not sure if this is good or not
mkDupableCont env cont@(TickIt{})
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env cont@(StrictBind {})
= return (env, mkBoringStop (contHoleType cont), cont)
-- See Note [Duplicating StrictBind]
mkDupableCont env (StrictArg info cci cont)
-- See Note [Duplicating StrictArg]
= do { (env', dup, nodup) <- mkDupableCont env cont
; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info)
; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) }
mkDupableCont env cont@(ApplyToTy { sc_cont = tail })
= do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail
; return (env', cont { sc_cont = dup_cont }, nodup_cont ) }
mkDupableCont env (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_env = se, sc_cont = cont })
= -- e.g. [...hole...] (...arg...)
-- ==>
-- let a = ...arg...
-- in [...hole...] a
do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont
; (_, se', arg') <- simplArg env' dup se arg
; (env'', arg'') <- makeTrivial NotTopLevel env' (fsLit "karg") arg'
; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = se'
, sc_dup = OkToDup, sc_cont = dup_cont }
; return (env'', app_cont, nodup_cont) }
mkDupableCont env cont@(Select { sc_bndr = case_bndr, sc_alts = [(_, bs, _rhs)] })
-- See Note [Single-alternative case]
-- | not (exprIsDupable rhs && contIsDupable case_cont)
-- | not (isDeadBinder case_bndr)
| all isDeadBinder bs -- InIds
&& not (isUnliftedType (idType case_bndr))
-- Note [Single-alternative-unlifted]
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env (Select { sc_bndr = case_bndr, sc_alts = alts
, sc_env = se, sc_cont = cont })
= -- e.g. (case [...hole...] of { pi -> ei })
-- ===>
-- let ji = \xij -> ei
-- in case [...hole...] of { pi -> ji xij }
do { tick (CaseOfCase case_bndr)
; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- NB: We call prepareCaseCont here. If there is only one
-- alternative, then dup_cont may be big, but that's ok
-- because we push it into the single alternative, and then
-- use mkDupableAlt to turn that simplified alternative into
-- a join point if it's too big to duplicate.
-- And this is important: see Note [Fusing case continuations]
; let alt_env = se `setInScope` env'
; (alt_env', case_bndr') <- simplBinder alt_env case_bndr
; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts
-- Safe to say that there are no handled-cons for the DEFAULT case
-- NB: simplBinder does not zap deadness occ-info, so
-- a dead case_bndr' will still advertise its deadness
-- This is really important because in
-- case e of b { (# p,q #) -> ... }
-- b is always dead, and indeed we are not allowed to bind b to (# p,q #),
-- which might happen if e was an explicit unboxed pair and b wasn't marked dead.
-- In the new alts we build, we have the new case binder, so it must retain
-- its deadness.
-- NB: we don't use alt_env further; it has the substEnv for
-- the alternatives, and we don't want that
; (env'', alts'') <- mkDupableAlts env' case_bndr' alts'
; return (env'', -- Note [Duplicated env]
Select { sc_dup = OkToDup
, sc_bndr = case_bndr', sc_alts = alts''
, sc_env = zapSubstEnv env''
, sc_cont = mkBoringStop (contHoleType nodup_cont) },
nodup_cont) }
mkDupableAlts :: SimplEnv -> OutId -> [InAlt]
-> SimplM (SimplEnv, [InAlt])
-- Absorbs the continuation into the new alternatives
mkDupableAlts env case_bndr' the_alts
= go env the_alts
where
go env0 [] = return (env0, [])
go env0 (alt:alts)
= do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt
; (env2, alts') <- go env1 alts
; return (env2, alt' : alts' ) }
mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr)
-> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr))
mkDupableAlt env case_bndr (con, bndrs', rhs') = do
dflags <- getDynFlags
if exprIsDupable dflags rhs' -- Note [Small alternative rhs]
then return (env, (con, bndrs', rhs'))
else
do { let rhs_ty' = exprType rhs'
scrut_ty = idType case_bndr
case_bndr_w_unf
= case con of
DEFAULT -> case_bndr
DataAlt dc -> setIdUnfolding case_bndr unf
where
-- See Note [Case binders and join points]
unf = mkInlineUnfolding Nothing rhs
rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs'
LitAlt {} -> WARN( True, text "mkDupableAlt"
<+> ppr case_bndr <+> ppr con )
case_bndr
-- The case binder is alive but trivial, so why has
-- it not been substituted away?
used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs'
| otherwise = bndrs' ++ [case_bndr_w_unf]
abstract_over bndr
| isTyVar bndr = True -- Abstract over all type variables just in case
| otherwise = not (isDeadBinder bndr)
-- The deadness info on the new Ids is preserved by simplBinders
; (final_bndrs', final_args) -- Note [Join point abstraction]
<- if (any isId used_bndrs')
then return (used_bndrs', varsToCoreExprs used_bndrs')
else do { rw_id <- newId (fsLit "w") voidPrimTy
; return ([setOneShotLambda rw_id], [Var voidPrimId]) }
; join_bndr <- newId (fsLit "$j") (mkLamTypes final_bndrs' rhs_ty')
-- Note [Funky mkLamTypes]
; let -- We make the lambdas into one-shot-lambdas. The
-- join point is sure to be applied at most once, and doing so
-- prevents the body of the join point being floated out by
-- the full laziness pass
really_final_bndrs = map one_shot final_bndrs'
one_shot v | isId v = setOneShotLambda v
| otherwise = v
join_rhs = mkLams really_final_bndrs rhs'
join_arity = exprArity join_rhs
join_call = mkApps (Var join_bndr) final_args
; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs)
; return (env', (con, bndrs', join_call)) }
-- See Note [Duplicated env]
{-
Note [Fusing case continuations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's important to fuse two successive case continuations when the
first has one alternative. That's why we call prepareCaseCont here.
Consider this, which arises from thunk splitting (see Note [Thunk
splitting] in WorkWrap):
let
x* = case (case v of {pn -> rn}) of
I# a -> I# a
in body
The simplifier will find
(Var v) with continuation
Select (pn -> rn) (
Select [I# a -> I# a] (
StrictBind body Stop
So we'll call mkDupableCont on
Select [I# a -> I# a] (StrictBind body Stop)
There is just one alternative in the first Select, so we want to
simplify the rhs (I# a) with continuation (StricgtBind body Stop)
Supposing that body is big, we end up with
let $j a = <let x = I# a in body>
in case v of { pn -> case rn of
I# a -> $j a }
This is just what we want because the rn produces a box that
the case rn cancels with.
See Trac #4957 a fuller example.
Note [Case binders and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
case (case .. ) of c {
I# c# -> ....c....
If we make a join point with c but not c# we get
$j = \c -> ....c....
But if later inlining scrutines the c, thus
$j = \c -> ... case c of { I# y -> ... } ...
we won't see that 'c' has already been scrutinised. This actually
happens in the 'tabulate' function in wave4main, and makes a significant
difference to allocation.
An alternative plan is this:
$j = \c# -> let c = I# c# in ...c....
but that is bad if 'c' is *not* later scrutinised.
So instead we do both: we pass 'c' and 'c#' , and record in c's inlining
(a stable unfolding) that it's really I# c#, thus
$j = \c# -> \c[=I# c#] -> ...c....
Absence analysis may later discard 'c'.
NB: take great care when doing strictness analysis;
see Note [Lamba-bound unfoldings] in DmdAnal.
Also note that we can still end up passing stuff that isn't used. Before
strictness analysis we have
let $j x y c{=(x,y)} = (h c, ...)
in ...
After strictness analysis we see that h is strict, we end up with
let $j x y c{=(x,y)} = ($wh x y, ...)
and c is unused.
Note [Duplicated env]
~~~~~~~~~~~~~~~~~~~~~
Some of the alternatives are simplified, but have not been turned into a join point
So they *must* have an zapped subst-env. So we can't use completeNonRecX to
bind the join point, because it might to do PostInlineUnconditionally, and
we'd lose that when zapping the subst-env. We could have a per-alt subst-env,
but zapping it (as we do in mkDupableCont, the Select case) is safe, and
at worst delays the join-point inlining.
Note [Small alternative rhs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is worth checking for a small RHS because otherwise we
get extra let bindings that may cause an extra iteration of the simplifier to
inline back in place. Quite often the rhs is just a variable or constructor.
The Ord instance of Maybe in PrelMaybe.hs, for example, took several extra
iterations because the version with the let bindings looked big, and so wasn't
inlined, but after the join points had been inlined it looked smaller, and so
was inlined.
NB: we have to check the size of rhs', not rhs.
Duplicating a small InAlt might invalidate occurrence information
However, if it *is* dupable, we return the *un* simplified alternative,
because otherwise we'd need to pair it up with an empty subst-env....
but we only have one env shared between all the alts.
(Remember we must zap the subst-env before re-simplifying something).
Rather than do this we simply agree to re-simplify the original (small) thing later.
Note [Funky mkLamTypes]
~~~~~~~~~~~~~~~~~~~~~~
Notice the funky mkLamTypes. If the constructor has existentials
it's possible that the join point will be abstracted over
type variables as well as term variables.
Example: Suppose we have
data T = forall t. C [t]
Then faced with
case (case e of ...) of
C t xs::[t] -> rhs
We get the join point
let j :: forall t. [t] -> ...
j = /\t \xs::[t] -> rhs
in
case (case e of ...) of
C t xs::[t] -> j t xs
Note [Join point abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Join points always have at least one value argument,
for several reasons
* If we try to lift a primitive-typed something out
for let-binding-purposes, we will *caseify* it (!),
with potentially-disastrous strictness results. So
instead we turn it into a function: \v -> e
where v::Void#. The value passed to this function is void,
which generates (almost) no code.
* CPR. We used to say "&& isUnliftedType rhs_ty'" here, but now
we make the join point into a function whenever used_bndrs'
is empty. This makes the join-point more CPR friendly.
Consider: let j = if .. then I# 3 else I# 4
in case .. of { A -> j; B -> j; C -> ... }
Now CPR doesn't w/w j because it's a thunk, so
that means that the enclosing function can't w/w either,
which is a lose. Here's the example that happened in practice:
kgmod :: Int -> Int -> Int
kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0
then 78
else 5
* Let-no-escape. We want a join point to turn into a let-no-escape
so that it is implemented as a jump, and one of the conditions
for LNE is that it's not updatable. In CoreToStg, see
Note [What is a non-escaping let]
* Floating. Since a join point will be entered once, no sharing is
gained by floating out, but something might be lost by doing
so because it might be allocated.
I have seen a case alternative like this:
True -> \v -> ...
It's a bit silly to add the realWorld dummy arg in this case, making
$j = \s v -> ...
True -> $j s
(the \v alone is enough to make CPR happy) but I think it's rare
There's a slight infelicity here: we pass the overall
case_bndr to all the join points if it's used in *any* RHS,
because we don't know its usage in each RHS separately
Note [Duplicating StrictArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The original plan had (where E is a big argument)
e.g. f E [..hole..]
==> let $j = \a -> f E a
in $j [..hole..]
But this is terrible! Here's an example:
&& E (case x of { T -> F; F -> T })
Now, && is strict so we end up simplifying the case with
an ArgOf continuation. If we let-bind it, we get
let $j = \v -> && E v
in simplExpr (case x of { T -> F; F -> T })
(ArgOf (\r -> $j r)
And after simplifying more we get
let $j = \v -> && E v
in case x of { T -> $j F; F -> $j T }
Which is a Very Bad Thing
What we do now is this
f E [..hole..]
==> let a = E
in f a [..hole..]
Now if the thing in the hole is a case expression (which is when
we'll call mkDupableCont), we'll push the function call into the
branches, which is what we want. Now RULES for f may fire, and
call-pattern specialisation. Here's an example from Trac #3116
go (n+1) (case l of
1 -> bs'
_ -> Chunk p fpc (o+1) (l-1) bs')
If we can push the call for 'go' inside the case, we get
call-pattern specialisation for 'go', which is *crucial* for
this program.
Here is the (&&) example:
&& E (case x of { T -> F; F -> T })
==> let a = E in
case x of { T -> && a F; F -> && a T }
Much better!
Notice that
* Arguments to f *after* the strict one are handled by
the ApplyToVal case of mkDupableCont. Eg
f [..hole..] E
* We can only do the let-binding of E because the function
part of a StrictArg continuation is an explicit syntax
tree. In earlier versions we represented it as a function
(CoreExpr -> CoreEpxr) which we couldn't take apart.
Do *not* duplicate StrictBind and StritArg continuations. We gain
nothing by propagating them into the expressions, and we do lose a
lot.
The desire not to duplicate is the entire reason that
mkDupableCont returns a pair of continuations.
Note [Duplicating StrictBind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unlike StrictArg, there doesn't seem anything to gain from
duplicating a StrictBind continuation, so we don't.
Note [Single-alternative cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This case is just like the ArgOf case. Here's an example:
data T a = MkT !a
...(MkT (abs x))...
Then we get
case (case x of I# x' ->
case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
Because the (case x) has only one alternative, we'll transform to
case x of I# x' ->
case (case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
But now we do *NOT* want to make a join point etc, giving
case x of I# x' ->
let $j = \y -> MkT y
in case x' <# 0# of
True -> $j (I# (negate# x'))
False -> $j (I# x')
In this case the $j will inline again, but suppose there was a big
strict computation enclosing the orginal call to MkT. Then, it won't
"see" the MkT any more, because it's big and won't get duplicated.
And, what is worse, nothing was gained by the case-of-case transform.
So, in circumstances like these, we don't want to build join points
and push the outer case into the branches of the inner one. Instead,
don't duplicate the continuation.
When should we use this strategy? We should not use it on *every*
single-alternative case:
e.g. case (case ....) of (a,b) -> (# a,b #)
Here we must push the outer case into the inner one!
Other choices:
* Match [(DEFAULT,_,_)], but in the common case of Int,
the alternative-filling-in code turned the outer case into
case (...) of y { I# _ -> MkT y }
* Match on single alternative plus (not (isDeadBinder case_bndr))
Rationale: pushing the case inwards won't eliminate the construction.
But there's a risk of
case (...) of y { (a,b) -> let z=(a,b) in ... }
Now y looks dead, but it'll come alive again. Still, this
seems like the best option at the moment.
* Match on single alternative plus (all (isDeadBinder bndrs))
Rationale: this is essentially seq.
* Match when the rhs is *not* duplicable, and hence would lead to a
join point. This catches the disaster-case above. We can test
the *un-simplified* rhs, which is fine. It might get bigger or
smaller after simplification; if it gets smaller, this case might
fire next time round. NB also that we must test contIsDupable
case_cont *too, because case_cont might be big!
HOWEVER: I found that this version doesn't work well, because
we can get let x = case (...) of { small } in ...case x...
When x is inlined into its full context, we find that it was a bad
idea to have pushed the outer case inside the (...) case.
There is a cost to not doing case-of-case; see Trac #10626.
Note [Single-alternative-unlifted]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's another single-alternative where we really want to do case-of-case:
data Mk1 = Mk1 Int# | Mk2 Int#
M1.f =
\r [x_s74 y_s6X]
case
case y_s6X of tpl_s7m {
M1.Mk1 ipv_s70 -> ipv_s70;
M1.Mk2 ipv_s72 -> ipv_s72;
}
of
wild_s7c
{ __DEFAULT ->
case
case x_s74 of tpl_s7n {
M1.Mk1 ipv_s77 -> ipv_s77;
M1.Mk2 ipv_s79 -> ipv_s79;
}
of
wild1_s7b
{ __DEFAULT -> ==# [wild1_s7b wild_s7c];
};
};
So the outer case is doing *nothing at all*, other than serving as a
join-point. In this case we really want to do case-of-case and decide
whether to use a real join point or just duplicate the continuation:
let $j s7c = case x of
Mk1 ipv77 -> (==) s7c ipv77
Mk1 ipv79 -> (==) s7c ipv79
in
case y of
Mk1 ipv70 -> $j ipv70
Mk2 ipv72 -> $j ipv72
Hence: check whether the case binder's type is unlifted, because then
the outer case is *not* a seq.
************************************************************************
* *
Unfoldings
* *
************************************************************************
-}
simplLetUnfolding :: SimplEnv-> TopLevelFlag
-> InId
-> OutExpr
-> Unfolding -> SimplM Unfolding
simplLetUnfolding env top_lvl id new_rhs unf
| isStableUnfolding unf
= simplUnfolding env top_lvl id unf
| otherwise
= bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags InlineRhs (isTopLevel top_lvl) bottoming new_rhs) }
-- We make an unfolding *even for loop-breakers*.
-- Reason: (a) It might be useful to know that they are WHNF
-- (b) In TidyPgm we currently assume that, if we want to
-- expose the unfolding then indeed we *have* an unfolding
-- to expose. (We could instead use the RHS, but currently
-- we don't.) The simple thing is always to have one.
where
bottoming = isBottomingId id
simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> Unfolding -> SimplM Unfolding
-- Note [Setting the new unfolding]
simplUnfolding env top_lvl id unf
= case unf of
NoUnfolding -> return unf
OtherCon {} -> return unf
DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args }
-> do { (env', bndrs') <- simplBinders rule_env bndrs
; args' <- mapM (simplExpr env') args
; return (mkDFunUnfolding bndrs' con args') }
CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide }
| isStableSource src
-> do { expr' <- simplExpr rule_env expr
; case guide of
UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things
-> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok
, ug_boring_ok = inlineBoringOk expr' }
-- Refresh the boring-ok flag, in case expr'
-- has got small. This happens, notably in the inlinings
-- for dfuns for single-method classes; see
-- Note [Single-method classes] in TcInstDcls.
-- A test case is Trac #4138
in return (mkCoreUnfolding src is_top_lvl expr' guide')
-- See Note [Top-level flag on inline rules] in CoreUnfold
_other -- Happens for INLINABLE things
-> bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags src is_top_lvl bottoming expr') } }
-- If the guidance is UnfIfGoodArgs, this is an INLINABLE
-- unfolding, and we need to make sure the guidance is kept up
-- to date with respect to any changes in the unfolding.
| otherwise -> return noUnfolding -- Discard unstable unfoldings
where
bottoming = isBottomingId id
is_top_lvl = isTopLevel top_lvl
act = idInlineActivation id
rule_env = updMode (updModeForStableUnfoldings act) env
-- See Note [Simplifying inside stable unfoldings] in SimplUtils
{-
Note [Force bottoming field]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to force bottoming, or the new unfolding holds
on to the old unfolding (which is part of the id).
Note [Setting the new unfolding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* If there's an INLINE pragma, we simplify the RHS gently. Maybe we
should do nothing at all, but simplifying gently might get rid of
more crap.
* If not, we make an unfolding from the new RHS. But *only* for
non-loop-breakers. Making loop breakers not have an unfolding at all
means that we can avoid tests in exprIsConApp, for example. This is
important: if exprIsConApp says 'yes' for a recursive thing, then we
can get into an infinite loop
If there's an stable unfolding on a loop breaker (which happens for
INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the
user did say 'INLINE'. May need to revisit this choice.
************************************************************************
* *
Rules
* *
************************************************************************
Note [Rules in a letrec]
~~~~~~~~~~~~~~~~~~~~~~~~
After creating fresh binders for the binders of a letrec, we
substitute the RULES and add them back onto the binders; this is done
*before* processing any of the RHSs. This is important. Manuel found
cases where he really, really wanted a RULE for a recursive function
to apply in that function's own right-hand side.
See Note [Loop breaking and RULES] in OccAnal.
-}
addBndrRules :: SimplEnv -> InBndr -> OutBndr -> SimplM (SimplEnv, OutBndr)
-- Rules are added back into the bin
addBndrRules env in_id out_id
| null old_rules
= return (env, out_id)
| otherwise
= do { new_rules <- simplRules env (Just (idName out_id)) old_rules
; let final_id = out_id `setIdSpecialisation` mkRuleInfo new_rules
; return (modifyInScope env final_id, final_id) }
where
old_rules = ruleInfoRules (idSpecialisation in_id)
simplRules :: SimplEnv -> Maybe Name -> [CoreRule] -> SimplM [CoreRule]
simplRules env mb_new_nm rules
= mapM simpl_rule rules
where
simpl_rule rule@(BuiltinRule {})
= return rule
simpl_rule rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs })
= do { (env', bndrs') <- simplBinders env bndrs
; let rule_env = updMode updModeForRules env'
; args' <- mapM (simplExpr rule_env) args
; rhs' <- simplExpr rule_env rhs
; return (rule { ru_bndrs = bndrs'
, ru_fn = mb_new_nm `orElse` fn_name
, ru_args = args'
, ru_rhs = rhs' }) }
| sgillespie/ghc | compiler/simplCore/Simplify.hs | bsd-3-clause | 124,418 | 20 | 25 | 38,197 | 15,479 | 8,190 | 7,289 | -1 | -1 |
module Bot where
import Args
import Control.Applicative.Trans.Either
import Control.Concurrent.WriteSem
import Control.Concurrent
import Control.Concurrent.Async
import Control.Exception (catchJust)
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader
import Data.Binary
import Data.Char
import Data.Classifier.NaiveBayes (NaiveBayes)
import Data.Coerce
import Data.Default.Class
import Data.Function (fix)
import Data.Maybe
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Time.Clock
import Data.Time.Format
import Data.Yaml
import Reddit hiding (failWith, bans)
import Reddit.Types.Comment (PostComments(..), CommentReference(..))
import Reddit.Types.Listing
import Reddit.Types.Subreddit (SubredditName(..))
import Reddit.Types.User (Username(..))
import System.Exit
import System.IO
import System.IO.Error
import qualified Data.Bounded.Set as Bounded
import qualified Data.Classifier.NaiveBayes as NB
import qualified Data.Counter as Counter
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Reddit.Types.Comment as Comment
import qualified Reddit.Types.Post as Post
data ConcreteSettings =
ConcreteSettings { username :: Username
, password :: Password
, subreddit :: SubredditName
, replyText :: ReplyText
, refreshTime :: RefreshTime
, bans :: [Username]
, classifier :: Maybe (NaiveBayes Bool Text)
, useClassifier :: Bool
, verboseOutput :: Bool }
deriving (Show)
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
hSetBuffering stderr NoBuffering
ConfigFile fp <- optionsFromArgs
decodeFileEither fp >>= \case
Left err -> do
print err
exitFailure
Right (Config b m) -> do
resolvedSettings <- mapM confirm $ resolve b m
let (lefts, rights) = Map.mapEither id resolvedSettings
if Map.null lefts
then do
sem <- newWriteSemIO
void $ mapConcurrently (\(k, s) -> run sem (s k)) $ Map.toList rights
else do
Map.foldrWithKey handleError (return ()) lefts
exitFailure
handleError :: SubredditName -> [Text] -> IO () -> IO ()
handleError (R r) errs m = m >> do
Text.putStrLn $ pluralize errs "Error" <> " with settings for subreddit /r/" <> r <> ":"
forM_ errs $ \err ->
Text.putStr $ " - " <> err
pluralize :: [a] -> Text -> Text
pluralize [_] x = x
pluralize _ x = x <> "s"
confirm :: Settings -> IO (Either [Text] (SubredditName -> ConcreteSettings))
confirm (Settings u p r b t c x v) =
runEitherA $
subredditLastSettings
<$> justOr ["Missing username"] u
<*> justOr ["Missing password"] p
<*> loadReply r
<*> pure (fromMaybe 5 t)
<*> loadBans b
<*> sequenceA (fmap loadClassifier c)
<*> pure x
<*> pure (fromMaybe False v)
subredditLastSettings :: Username -> Password -> ReplyText -> RefreshTime -> [Username] -> Maybe (NaiveBayes Bool Text) -> Bool -> Bool -> SubredditName -> ConcreteSettings
subredditLastSettings u p r t b n x v s = ConcreteSettings u p s r t b n x v
loadBans :: [Bans] -> EitherA [Text] IO [Username]
loadBans = fmap concat . sequenceA . map f
where
f (BansList us) = pure us
f (BansFilePath fp) = EitherA $
decodeFileEither fp >>= \case
Left err -> return $ Left [Text.pack $ show err]
Right xs -> return $ Right $ map Username xs
loadReply :: Maybe Reply -> EitherA [Text] IO ReplyText
loadReply x = case x of
Just r -> case r of
ReplyLiteral lit -> pure lit
ReplyFilePath fp -> readReplyFile fp
Nothing -> failWith ["Missing reply"]
readReplyFile :: FilePath -> EitherA [Text] IO ReplyText
readReplyFile fp = EitherA $ catchJust f (Right <$> Text.readFile fp) (return . Left . return)
where
f (isDoesNotExistError -> True) = Just "Reply file does not exist"
f (isPermissionError -> True) = Just "Incorrect permissions for reply file"
f _ = Nothing
loadClassifier :: FilePath -> EitherA [Text] IO (NaiveBayes Bool Text)
loadClassifier fp = EitherA $ f <$> decodeFileOrFail fp
where
f (Left _) = Left ["Classifier could not be read"]
f (Right x) = pure x
run :: WriteSem -> ConcreteSettings -> IO ()
run sem settings =
withAsync (loopWith (forever $ commentsLoop sem) sem settings) $ \c ->
case classifier settings of
Just _ ->
withAsync (loopWith (forever $ postsLoop sem) sem settings) $ \p ->
void $ waitBoth c p
Nothing -> wait c
loopWith :: RedditT (ReaderT ConcreteSettings IO) () -> WriteSem -> ConcreteSettings -> IO ()
loopWith act sem settings = do
res <- flip runReaderT settings $
runResumeRedditWith def { customUserAgent = Just "intolerable-bot v0.1.0.0"
, loginMethod = Credentials (coerce (username settings)) (password settings)
, rateLimitingEnabled = False } act
case res of
Left (APIError CredentialsError, _) ->
withWriteSem sem $
Text.putStrLn $ "Username / password details incorrect for /r/" <> coerce (subreddit settings)
Left (err, Nothing) -> do
liftIO $ print err
(5 * refreshTime settings) `seconds` threadDelay
loopWith act sem settings
Left (err, Just resume) -> do
liftIO $ print err
loopWith resume sem settings
Right () -> return ()
postsLoop :: WriteSem -> RedditT (ReaderT ConcreteSettings IO) ()
postsLoop sem = do
u <- lift $ asks username
r <- lift $ asks subreddit
t <- lift $ asks refreshTime
rt <- lift $ asks replyText
cls <- lift $ asks (fromJust . classifier)
use <- lift $ asks useClassifier
withInitial (Bounded.empty 500) $ \loop set -> do
Listing _ _ ps <- getPosts' (Options Nothing (Just 100)) New (Just r)
writeLogEntry sem r "got listing"
let news = filter (\x -> not $ Bounded.member (Post.postID x) set) ps
forM_ news $ \p ->
unless (Post.author p == u) $
case Post.content p of
Post.SelfPost m _ -> do
let c = Counter.fromList $ process m
case NB.test cls c of
Just True ->
if use
then do
PostComments _ cs <- getPostComments $ Post.postID p
actuals <- resolveComments (Post.postID p) cs
unless (any ((== u) . Comment.author) actuals) $ do
botReply <- reply p rt
writeLogEntry sem r $ mconcat
[ "Auto-responded to "
, coerce $ Post.postID p
, " ("
, coerce botReply
, ")" ]
else
writeLogEntry sem r $ mconcat
[ "Possible AI match @ "
, coerce $ Post.postID p ]
_ -> return ()
_ -> return ()
unless (null news) $ writeLogEntry sem r "got listing"
t `seconds` threadDelay
loop $ Bounded.insertAll (Post.postID <$> news) set
commentsLoop :: WriteSem -> RedditT (ReaderT ConcreteSettings IO) ()
commentsLoop sem = do
r <- lift $ asks subreddit
t <- lift $ asks refreshTime
withInitial (Bounded.empty 500) $ \loop set -> do
Listing _ _ cs <- getNewComments' (Options Nothing (Just 100)) (Just r)
let news = filter (\x -> not $ Bounded.member (Comment.commentID x) set) cs
mapM_ (commentResponder sem) news
unless (null news) $ writeLogEntry sem r "dealt with new comments"
t `seconds` threadDelay
loop $ Bounded.insertAll (Comment.commentID <$> news) set
commentResponder :: WriteSem -> Comment -> RedditT (ReaderT ConcreteSettings IO) ()
commentResponder sem c = do
u <- lift $ asks username
r <- lift $ asks subreddit
rt <- lift $ asks replyText
bs <- lift $ asks bans
when (shouldRespond u (Comment.body c)) $
unless (Comment.author c `elem` bs) $ do
writeLogEntry sem r "found a comment"
(selfpost, sibs) <- getSiblingComments c
unless (any ((== u) . Comment.author) sibs) $ do
writeLogEntry sem r $ "found a comment we didn't already respond to: " <> coerce (Comment.commentID c)
case Comment.inReplyTo c of
Just parentComment -> reply parentComment rt >>= logReply r
Nothing ->
when selfpost $
reply (Comment.parentLink c) rt >>= logReply r
where
logReply r botReply = writeLogEntry sem r $ mconcat
[ "Responded to "
, coerce (Comment.commentID c)
, " by "
, coerce (Comment.author c)
, " ("
, coerce botReply
, ")" ]
getSiblingComments :: MonadIO m => Comment -> RedditT m (Bool, [Comment])
getSiblingComments c = do
let parent = Comment.parentLink c
PostComments p cs <-
case Comment.inReplyTo c of
Just parentComment ->
getPostSubComments parent parentComment >>= \case
PostComments p (com:_) -> do
Listing _ _ cs <- mconcat <$> map Comment.replies <$> resolveComments parent [com]
return $ PostComments p cs
x -> return x
Nothing -> getPostComments parent
case Post.content p of
Post.SelfPost _ _ -> (,) True <$> resolveComments parent cs
_ -> (,) (isJust (Comment.inReplyTo c)) <$> resolveComments parent cs
resolveComments :: MonadIO m => PostID -> [CommentReference] -> RedditT m [Comment]
resolveComments p refs = concat <$> mapM f refs
where
f (Actual c) = return [c]
f (Reference _ cs) = do
moreComments <- getMoreChildren p cs
resolveComments p moreComments
shouldRespond :: Username -> Text -> Bool
shouldRespond (Username u) = Text.isInfixOf (Text.toCaseFold $ "u/" <> u) . Text.toCaseFold
withInitial :: a -> ((a -> b) -> a -> b) -> b
withInitial = flip fix
seconds :: MonadIO m => Int -> (Int -> IO ()) -> m ()
n `seconds` f = liftIO $ f $ n * 1000000
writeLogEntry :: MonadIO m => WriteSem -> SubredditName -> Text -> m ()
writeLogEntry sem (R r) t = do
time <- liftIO getCurrentTime
let space = " "
withWriteSem sem $
mapM_ Text.putStr
[ makeTime time
, space
, "/r/"
, r
, ": "
, t
, "\n" ]
makeTime :: UTCTime -> Text
makeTime t = Text.pack $ formatTime defaultTimeLocale (iso8601DateFormat (Just "%H:%M:%S")) t
process :: Text -> [Text]
process = filter (not . Text.null) .
map (Text.map toLower . Text.filter isAlpha) .
concatMap (Text.splitOn ".") .
Text.splitOn " " .
Text.filter (not . (== '-'))
| intolerable/intolerable-bot | src/Bot.hs | bsd-3-clause | 10,673 | 0 | 33 | 2,908 | 3,751 | 1,873 | 1,878 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
module Task where
import Text.Printf
import Simulation.Aivika
import Data.Functor
data System = System {
processingDistribution :: (String, Parameter Double)
, bufferCapacity :: Int
}
data Input = Input {
generationDistribution :: (String, Parameter Double)
, inputSystems :: [System]
, simulationTime :: Double
, outputPrecision :: Int
}
instance Show System where
show System{..} = fst processingDistribution ++ "-" ++ show bufferCapacity
instance Show Input where
show Input{..} = fst generationDistribution ++ "-" ++ show inputSystems
data Output = Output {
failChances :: [Double], -- ^ Вероятность отказа системы
queueSizes :: [Double], -- ^ Средний размер буфера
systemLoads :: [Double], -- ^ Загрузка системы
requestsCounts :: [Double], -- ^ Среднее число заявок в системе
awaitingTimes :: [Double], -- ^ Среднее время ожидания в буфере
totalTimes :: [Double], -- ^ Общее время пребывания заявки в системе
usedInput :: Input -- ^ Используемые входные данные
}
emptyOutput :: Input -> Output
emptyOutput input = Output [] [] [] [] [] [] input
data PartialOutput = PartialOutput {
failChance :: Double, -- ^ Вероятность отказа системы
queueSize :: Double, -- ^ Средний размер буфера
systemLoad :: Double, -- ^ Загрузка системы
requestsCount :: Double, -- ^ Среднее число заявок в системе
awaitingTime :: Double, -- ^ Среднее время ожидания в буфере
totalTime :: Double -- ^ Общее время пребывания заявки в системе
}
combineOutput :: Output -> PartialOutput -> Output
combineOutput output poutput = output {
failChances = failChances output ++ [failChance poutput]
, queueSizes = queueSizes output ++ [queueSize poutput]
, systemLoads = systemLoads output ++ [systemLoad poutput]
, requestsCounts = requestsCounts output ++ [requestsCount poutput]
, awaitingTimes = awaitingTimes output ++ [awaitingTime poutput]
, totalTimes = totalTimes output ++ [totalTime poutput]
}
combineOutputs :: Output -> [PartialOutput] -> Output
combineOutputs output = foldl combineOutput output
instance Show Output where
show Output{..} = unlines [
unwords ["Вероятность отказа в каждом буфере:", unwords $ printPrec <$> failChances]
, unwords ["Средний размер буферов:", unwords $ printPrec <$> queueSizes]
, unwords ["Загрузка подсистем:", unwords $ printPrec <$> systemLoads]
, unwords ["Среднее число заявок в системах:", unwords $ printPrec <$> requestsCounts]
, unwords ["Среднее время ожидания в буфере:", unwords $ printPrec <$> awaitingTimes]
, unwords ["Общее время пребывания заявки в системе:", unwords $ printPrec <$> totalTimes]
]
where
precision = show (outputPrecision usedInput)
printPrec :: Double -> String
printPrec = printf ("%."++precision++"f") | NCrashed/bmstu-aivika-tutorial-01 | src/Task.hs | bsd-3-clause | 3,365 | 0 | 11 | 596 | 695 | 391 | 304 | 55 | 1 |
import System.Environment (getArgs)
import Data.List.Split (splitOn)
import Data.Bits (testBit)
compareBits :: [Int] -> String
compareBits [i, a, b] | testBit i (a - 1) == testBit i (b - 1) = "true"
| otherwise = "false"
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (compareBits . map read . splitOn ",") $ lines input
| nikai3d/ce-challenges | easy/position.hs | bsd-3-clause | 442 | 0 | 13 | 139 | 177 | 90 | 87 | 11 | 1 |
{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
{-# LANGUAGE PatternGuards, ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards, TemplateHaskell #-}
{- |
Module: Database.PostgreSQL.Simple.FromField
Copyright: (c) 2011 MailRank, Inc.
(c) 2011-2013 Leon P Smith
License: BSD3
Maintainer: Leon P Smith <[email protected]>
Stability: experimental
The 'FromField' typeclass, for converting a single value in a row
returned by a SQL query into a more useful Haskell representation.
Note that each instance of 'FromField' is documented by a list of
compatible postgresql types.
A Haskell numeric type is considered to be compatible with all
PostgreSQL numeric types that are less accurate than it. For instance,
the Haskell 'Double' type is compatible with the PostgreSQL's 32-bit
@int@ type because it can represent a @int@ exactly. On the other hand,
since a 'Double' might lose precision if representing PostgreSQL's 64-bit
@bigint@, the two are /not/ considered compatible.
Note that the 'Float' and 'Double' instances use attoparsec's 'double'
conversion routine, which sacrifices some accuracy for speed. If you
need accuracy, consider first converting data to a 'Scientific' or 'Rational'
type, and then converting to a floating-point type. If you are defining
your own 'Database.PostgreSQL.Simple.FromRow.FromRow' instances, this can be
acheived simply by
@'fromRational' '<$>' 'Database.PostgreSQL.Simple.FromRow.field'@, although
this idiom is additionally compatible with PostgreSQL's @numeric@ type.
If this is unacceptable, you may find
'Database.PostgreSQL.Simple.FromRow.fieldWith' useful.
Also note that while converting to a 'Double' through the 'Scientific' type
is likely somewhat faster than converting through the 'Rational' type,
the 'Scientific' type has no way to represent @NaN@ and @±Infinity@ values.
Thus, if you need precision conversion of regular floating point values
and the possibility of receiving these special values from the backend,
stick with 'Rational'.
Because 'FromField' is a typeclass, one may provide conversions to
additional Haskell types without modifying postgresql-simple. This is
particularly useful for supporting PostgreSQL types that postgresql-simple
does not support out-of-box. Here's an example of what such an instance
might look like for a UUID type that implements the @Read@ class:
@
import Data.UUID ( UUID )
import Database.PostgreSQL.Simple.FromField
( FromField (fromField) , typeOid, returnError, ResultError (..) )
import Database.PostgreSQL.Simple.TypeInfo.Static (typoid, uuid)
import qualified Data.ByteString.Char8 as B
instance FromField UUID where
fromField f mdata =
if typeOid f /= typoid uuid
then returnError Incompatible f \"\"
else case B.unpack \`fmap\` mdata of
Nothing -> returnError UnexpectedNull f \"\"
Just dat ->
case [ x | (x,t) <- reads dat, (\"\",\"\") <- lex t ] of
[x] -> return x
_ -> returnError ConversionFailed f dat
@
Note that because PostgreSQL's @uuid@ type is built into postgres and is
not provided by an extension, the 'typeOid' of @uuid@ does not change and
thus we can examine it directly. One could hard-code the type oid, or
obtain it by other means, but in this case we simply pull it out of the
static table provided by postgresql-simple.
On the other hand if the type is provided by an extension, such as
@PostGIS@ or @hstore@, then the 'typeOid' is not stable and can vary from
database to database. In this case it is recommended that FromField
instances use 'typename' instead.
-}
module Database.PostgreSQL.Simple.FromField
(
FromField(..)
, FieldParser
, Conversion()
, runConversion
, conversionMap
, conversionError
, ResultError(..)
, returnError
, Field
, typename
, TypeInfo(..)
, Attribute(..)
, typeInfo
, typeInfoByOid
, name
, tableOid
, tableColumn
, format
, typeOid
, PQ.Oid(..)
, PQ.Format(..)
, pgArrayFieldParser
, fromJSONField
) where
#include "MachDeps.h"
import Control.Applicative ( (<|>), (<$>), pure, (*>) )
import Control.Concurrent.MVar (MVar, newMVar)
import Control.Exception (Exception)
import qualified Data.Aeson as JSON
import Data.Attoparsec.ByteString.Char8 hiding (Result)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as B
import Data.Int (Int16, Int32, Int64)
import Data.IORef (IORef, newIORef)
import Data.Ratio (Ratio)
import Data.Time ( UTCTime, ZonedTime, LocalTime, Day, TimeOfDay )
import Data.Typeable (Typeable, typeOf)
import Data.Vector (Vector)
import Data.Vector.Mutable (IOVector)
import qualified Data.Vector as V
import Database.PostgreSQL.Simple.Internal
import Database.PostgreSQL.Simple.Compat
import Database.PostgreSQL.Simple.Ok
import Database.PostgreSQL.Simple.Types
import Database.PostgreSQL.Simple.TypeInfo as TI
import qualified Database.PostgreSQL.Simple.TypeInfo.Static as TI
import Database.PostgreSQL.Simple.TypeInfo.Macro as TI
import Database.PostgreSQL.Simple.Time
import Database.PostgreSQL.Simple.Arrays as Arrays
import qualified Database.PostgreSQL.LibPQ as PQ
import qualified Data.ByteString as SB
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as LB
import qualified Data.Text as ST
import qualified Data.Text.Encoding as ST
import qualified Data.Text.Lazy as LT
import Data.CaseInsensitive (CI)
import qualified Data.CaseInsensitive as CI
import Data.UUID (UUID)
import qualified Data.UUID as UUID
import Data.Scientific (Scientific)
import GHC.Real (infinity, notANumber)
-- | Exception thrown if conversion from a SQL value to a Haskell
-- value fails.
data ResultError = Incompatible { errSQLType :: String
, errSQLTableOid :: Maybe PQ.Oid
, errSQLField :: String
, errHaskellType :: String
, errMessage :: String }
-- ^ The SQL and Haskell types are not compatible.
| UnexpectedNull { errSQLType :: String
, errSQLTableOid :: Maybe PQ.Oid
, errSQLField :: String
, errHaskellType :: String
, errMessage :: String }
-- ^ A SQL @NULL@ was encountered when the Haskell
-- type did not permit it.
| ConversionFailed { errSQLType :: String
, errSQLTableOid :: Maybe PQ.Oid
, errSQLField :: String
, errHaskellType :: String
, errMessage :: String }
-- ^ The SQL value could not be parsed, or could not
-- be represented as a valid Haskell value, or an
-- unexpected low-level error occurred (e.g. mismatch
-- between metadata and actual data in a row).
deriving (Eq, Show, Typeable)
instance Exception ResultError
left :: Exception a => a -> Conversion b
left = conversionError
type FieldParser a = Field -> Maybe ByteString -> Conversion a
-- | A type that may be converted from a SQL type.
class FromField a where
fromField :: FieldParser a
-- ^ Convert a SQL value to a Haskell value.
--
-- Returns a list of exceptions if the conversion fails. In the case of
-- library instances, this will usually be a single 'ResultError', but
-- may be a 'UnicodeException'.
--
-- Note that retaining any reference to the 'Field' argument causes
-- the entire @LibPQ.'PQ.Result'@ to be retained. Thus, implementations
-- of 'fromField' should return results that do not refer to this value
-- after the result have been evaluated to WHNF.
--
-- Note that as of @postgresql-simple-0.4.0.0@, the 'ByteString' value
-- has already been copied out of the @LibPQ.'PQ.Result'@ before it has
-- been passed to 'fromField'. This is because for short strings, it's
-- cheaper to copy the string than to set up a finalizer.
-- | Returns the data type name. This is the preferred way of identifying
-- types that do not have a stable type oid, such as types provided by
-- extensions to PostgreSQL.
--
-- More concretely, it returns the @typname@ column associated with the
-- type oid in the @pg_type@ table. First, postgresql-simple will check
-- the built-in, static table. If the type oid is not there,
-- postgresql-simple will check a per-connection cache, and then
-- finally query the database's meta-schema.
typename :: Field -> Conversion ByteString
typename field = typname <$> typeInfo field
typeInfo :: Field -> Conversion TypeInfo
typeInfo Field{..} = Conversion $ \conn -> do
Ok <$> (getTypeInfo conn =<< PQ.ftype result column)
typeInfoByOid :: PQ.Oid -> Conversion TypeInfo
typeInfoByOid oid = Conversion $ \conn -> do
Ok <$> (getTypeInfo conn oid)
-- | Returns the name of the column. This is often determined by a table
-- definition, but it can be set using an @as@ clause.
name :: Field -> Maybe ByteString
name Field{..} = unsafeDupablePerformIO (PQ.fname result column)
-- | Returns the name of the object id of the @table@ associated with the
-- column, if any. Returns 'Nothing' when there is no such table;
-- for example a computed column does not have a table associated with it.
-- Analogous to libpq's @PQftable@.
tableOid :: Field -> Maybe PQ.Oid
tableOid Field{..} = toMaybeOid (unsafeDupablePerformIO (PQ.ftable result column))
where
toMaybeOid x
= if x == PQ.invalidOid
then Nothing
else Just x
-- | If the column has a table associated with it, this returns the number
-- off the associated table column. Numbering starts from 0. Analogous
-- to libpq's @PQftablecol@.
tableColumn :: Field -> Int
tableColumn Field{..} = fromCol (unsafeDupablePerformIO (PQ.ftablecol result column))
where
fromCol (PQ.Col x) = fromIntegral x
-- | This returns whether the data was returned in a binary or textual format.
-- Analogous to libpq's @PQfformat@.
format :: Field -> PQ.Format
format Field{..} = unsafeDupablePerformIO (PQ.fformat result column)
-- | void
instance FromField () where
fromField f _bs
| typeOid f /= $(inlineTypoid TI.void) = returnError Incompatible f ""
| otherwise = pure ()
-- | For dealing with null values. Compatible with any postgresql type
-- compatible with type @a@. Note that the type is not checked if
-- the value is null, although it is inadvisable to rely on this
-- behavior.
instance (FromField a) => FromField (Maybe a) where
fromField _ Nothing = pure Nothing
fromField f bs = Just <$> fromField f bs
-- | compatible with any data type, but the value must be null
instance FromField Null where
fromField _ Nothing = pure Null
fromField f (Just _) = returnError ConversionFailed f "data is not null"
-- | bool
instance FromField Bool where
fromField f bs
| typeOid f /= $(inlineTypoid TI.bool) = returnError Incompatible f ""
| bs == Nothing = returnError UnexpectedNull f ""
| bs == Just "t" = pure True
| bs == Just "f" = pure False
| otherwise = returnError ConversionFailed f ""
-- | \"char\"
instance FromField Char where
fromField f bs =
if typeOid f /= $(inlineTypoid TI.char)
then returnError Incompatible f ""
else case bs of
Nothing -> returnError UnexpectedNull f ""
Just bs -> if B.length bs /= 1
then returnError ConversionFailed f "length not 1"
else return $! (B.head bs)
-- | int2
instance FromField Int16 where
fromField = atto ok16 $ signed decimal
-- | int2, int4
instance FromField Int32 where
fromField = atto ok32 $ signed decimal
#if WORD_SIZE_IN_BITS < 64
-- | int2, int4, and if compiled as 64-bit code, int8 as well.
-- This library was compiled as 32-bit code.
#else
-- | int2, int4, and if compiled as 64-bit code, int8 as well.
-- This library was compiled as 64-bit code.
#endif
instance FromField Int where
fromField = atto okInt $ signed decimal
-- | int2, int4, int8
instance FromField Int64 where
fromField = atto ok64 $ signed decimal
-- | int2, int4, int8
instance FromField Integer where
fromField = atto ok64 $ signed decimal
-- | int2, float4 (Uses attoparsec's 'double' routine, for
-- better accuracy convert to 'Scientific' or 'Rational' first)
instance FromField Float where
fromField = atto ok (realToFrac <$> pg_double)
where ok = $(mkCompats [TI.float4,TI.int2])
-- | int2, int4, float4, float8 (Uses attoparsec's 'double' routine, for
-- better accuracy convert to 'Scientific' or 'Rational' first)
instance FromField Double where
fromField = atto ok pg_double
where ok = $(mkCompats [TI.float4,TI.float8,TI.int2,TI.int4])
-- | int2, int4, float4, float8, numeric
instance FromField (Ratio Integer) where
fromField = atto ok pg_rational
where ok = $(mkCompats [TI.float4,TI.float8,TI.int2,TI.int4,TI.numeric])
-- | int2, int4, float4, float8, numeric
instance FromField Scientific where
fromField = atto ok rational
where ok = $(mkCompats [TI.float4,TI.float8,TI.int2,TI.int4,TI.numeric])
unBinary :: Binary t -> t
unBinary (Binary x) = x
pg_double :: Parser Double
pg_double
= (string "NaN" *> pure ( 0 / 0))
<|> (string "Infinity" *> pure ( 1 / 0))
<|> (string "-Infinity" *> pure (-1 / 0))
<|> double
pg_rational :: Parser Rational
pg_rational
= (string "NaN" *> pure notANumber )
<|> (string "Infinity" *> pure infinity )
<|> (string "-Infinity" *> pure (-infinity))
<|> rational
-- | bytea, name, text, \"char\", bpchar, varchar, unknown
instance FromField SB.ByteString where
fromField f dat = if typeOid f == $(inlineTypoid TI.bytea)
then unBinary <$> fromField f dat
else doFromField f okText' pure dat
-- | oid
instance FromField PQ.Oid where
fromField f dat = PQ.Oid <$> atto (== $(inlineTypoid TI.oid)) decimal f dat
-- | bytea, name, text, \"char\", bpchar, varchar, unknown
instance FromField LB.ByteString where
fromField f dat = LB.fromChunks . (:[]) <$> fromField f dat
unescapeBytea :: Field -> SB.ByteString
-> Conversion (Binary SB.ByteString)
unescapeBytea f str = case unsafeDupablePerformIO (PQ.unescapeBytea str) of
Nothing -> returnError ConversionFailed f "unescapeBytea failed"
Just str -> pure (Binary str)
-- | bytea
instance FromField (Binary SB.ByteString) where
fromField f dat = case format f of
PQ.Text -> doFromField f okBinary (unescapeBytea f) dat
PQ.Binary -> doFromField f okBinary (pure . Binary) dat
-- | bytea
instance FromField (Binary LB.ByteString) where
fromField f dat = Binary . LB.fromChunks . (:[]) . unBinary <$> fromField f dat
-- | name, text, \"char\", bpchar, varchar
instance FromField ST.Text where
fromField f = doFromField f okText $ (either left pure . ST.decodeUtf8')
-- FIXME: check character encoding
-- | name, text, \"char\", bpchar, varchar
instance FromField LT.Text where
fromField f dat = LT.fromStrict <$> fromField f dat
-- | citext
instance FromField (CI ST.Text) where
fromField f mdat = do
typ <- typename f
if typ /= "citext"
then returnError Incompatible f ""
else case mdat of
Nothing -> returnError UnexpectedNull f ""
Just dat -> either left (pure . CI.mk)
(ST.decodeUtf8' dat)
-- | citext
instance FromField (CI LT.Text) where
fromField f mdat = do
typ <- typename f
if typ /= "citext"
then returnError Incompatible f ""
else case mdat of
Nothing -> returnError UnexpectedNull f ""
Just dat -> either left (pure . CI.mk . LT.fromStrict)
(ST.decodeUtf8' dat)
-- | name, text, \"char\", bpchar, varchar
instance FromField [Char] where
fromField f dat = ST.unpack <$> fromField f dat
-- | timestamptz
instance FromField UTCTime where
fromField = ff $(inlineTypoid TI.timestamptz) "UTCTime" parseUTCTime
-- | timestamptz
instance FromField ZonedTime where
fromField = ff $(inlineTypoid TI.timestamptz) "ZonedTime" parseZonedTime
-- | timestamp
instance FromField LocalTime where
fromField = ff $(inlineTypoid TI.timestamp) "LocalTime" parseLocalTime
-- | date
instance FromField Day where
fromField = ff $(inlineTypoid TI.date) "Day" parseDay
-- | time
instance FromField TimeOfDay where
fromField = ff $(inlineTypoid TI.time) "TimeOfDay" parseTimeOfDay
-- | timestamptz
instance FromField UTCTimestamp where
fromField = ff $(inlineTypoid TI.timestamptz) "UTCTimestamp" parseUTCTimestamp
-- | timestamptz
instance FromField ZonedTimestamp where
fromField = ff $(inlineTypoid TI.timestamptz) "ZonedTimestamp" parseZonedTimestamp
-- | timestamp
instance FromField LocalTimestamp where
fromField = ff $(inlineTypoid TI.timestamp) "LocalTimestamp" parseLocalTimestamp
-- | date
instance FromField Date where
fromField = ff $(inlineTypoid TI.date) "Date" parseDate
ff :: PQ.Oid -> String -> (B8.ByteString -> Either String a)
-> Field -> Maybe B8.ByteString -> Conversion a
ff compatOid hsType parse f mstr =
if typeOid f /= compatOid
then err Incompatible ""
else case mstr of
Nothing -> err UnexpectedNull ""
Just str -> case parse str of
Left msg -> err ConversionFailed msg
Right val -> return val
where
err errC msg = do
typnam <- typename f
left $ errC (B8.unpack typnam)
(tableOid f)
(maybe "" B8.unpack (name f))
hsType
msg
{-# INLINE ff #-}
-- | Compatible with both types. Conversions to type @b@ are
-- preferred, the conversion to type @a@ will be tried after
-- the 'Right' conversion fails.
instance (FromField a, FromField b) => FromField (Either a b) where
fromField f dat = (Right <$> fromField f dat)
<|> (Left <$> fromField f dat)
-- | any postgresql array whose elements are compatible with type @a@
instance (FromField a, Typeable a) => FromField (PGArray a) where
fromField = pgArrayFieldParser fromField
pgArrayFieldParser :: Typeable a => FieldParser a -> FieldParser (PGArray a)
pgArrayFieldParser fieldParser f mdat = do
info <- typeInfo f
case info of
TI.Array{} ->
case mdat of
Nothing -> returnError UnexpectedNull f ""
Just dat -> do
case parseOnly (fromArray fieldParser info f) dat of
Left err -> returnError ConversionFailed f err
Right conv -> PGArray <$> conv
_ -> returnError Incompatible f ""
fromArray :: FieldParser a -> TypeInfo -> Field -> Parser (Conversion [a])
fromArray fieldParser typeInfo f = sequence . (parseIt <$>) <$> array delim
where
delim = typdelim (typelem typeInfo)
fElem = f{ typeOid = typoid (typelem typeInfo) }
parseIt item =
fieldParser f' $ if item' == "NULL" then Nothing else Just item'
where
item' = fmt delim item
f' | Arrays.Array _ <- item = f
| otherwise = fElem
instance (FromField a, Typeable a) => FromField (Vector a) where
fromField f v = V.fromList . fromPGArray <$> fromField f v
instance (FromField a, Typeable a) => FromField (IOVector a) where
fromField f v = liftConversion . V.unsafeThaw =<< fromField f v
-- | uuid
instance FromField UUID where
fromField f mbs =
if typeOid f /= $(inlineTypoid TI.uuid)
then returnError Incompatible f ""
else case mbs of
Nothing -> returnError UnexpectedNull f ""
Just bs ->
case UUID.fromASCIIBytes bs of
Nothing -> returnError ConversionFailed f "Invalid UUID"
Just uuid -> pure uuid
-- | json
instance FromField JSON.Value where
fromField f mbs =
if typeOid f /= $(inlineTypoid TI.json) && typeOid f /= $(inlineTypoid TI.jsonb)
then returnError Incompatible f ""
else case mbs of
Nothing -> returnError UnexpectedNull f ""
Just bs ->
#if MIN_VERSION_aeson(0,6,3)
case JSON.eitherDecodeStrict' bs of
#elif MIN_VERSION_bytestring(0,10,0)
case JSON.eitherDecode' $ LB.fromStrict bs of
#else
case JSON.eitherDecode' $ LB.fromChunks [bs] of
#endif
Left err -> returnError ConversionFailed f err
Right val -> pure val
-- | Parse a field to a JSON 'JSON.Value' and convert that into a
-- Haskell value using 'JSON.fromJSON'.
--
-- This can be used as the default implementation for the 'fromField'
-- method for Haskell types that have a JSON representation in
-- PostgreSQL.
--
-- The 'Typeable' constraint is required to show more informative
-- error messages when parsing fails.
fromJSONField :: (JSON.FromJSON a, Typeable a) => FieldParser a
fromJSONField f mbBs = do
value <- fromField f mbBs
case JSON.fromJSON value of
JSON.Error err -> returnError ConversionFailed f $
"JSON decoding error: " ++ err
JSON.Success x -> pure x
-- | Compatible with the same set of types as @a@. Note that
-- modifying the 'IORef' does not have any effects outside
-- the local process on the local machine.
instance FromField a => FromField (IORef a) where
fromField f v = liftConversion . newIORef =<< fromField f v
-- | Compatible with the same set of types as @a@. Note that
-- modifying the 'MVar' does not have any effects outside
-- the local process on the local machine.
instance FromField a => FromField (MVar a) where
fromField f v = liftConversion . newMVar =<< fromField f v
type Compat = PQ.Oid -> Bool
okText, okText', okBinary, ok16, ok32, ok64, okInt :: Compat
okText = $( mkCompats [ TI.name, TI.text, TI.char,
TI.bpchar, TI.varchar ] )
okText' = $( mkCompats [ TI.name, TI.text, TI.char,
TI.bpchar, TI.varchar, TI.unknown ] )
okBinary = (== $( inlineTypoid TI.bytea ))
ok16 = (== $( inlineTypoid TI.int2 ))
ok32 = $( mkCompats [TI.int2,TI.int4] )
ok64 = $( mkCompats [TI.int2,TI.int4,TI.int8] )
#if WORD_SIZE_IN_BITS < 64
okInt = ok32
#else
okInt = ok64
#endif
doFromField :: forall a . (Typeable a)
=> Field -> Compat -> (ByteString -> Conversion a)
-> Maybe ByteString -> Conversion a
doFromField f isCompat cvt (Just bs)
| isCompat (typeOid f) = cvt bs
| otherwise = returnError Incompatible f "types incompatible"
doFromField f _ _ _ = returnError UnexpectedNull f ""
-- | Given one of the constructors from 'ResultError', the field,
-- and an 'errMessage', this fills in the other fields in the
-- exception value and returns it in a 'Left . SomeException'
-- constructor.
returnError :: forall a err . (Typeable a, Exception err)
=> (String -> Maybe PQ.Oid -> String -> String -> String -> err)
-> Field -> String -> Conversion a
returnError mkErr f msg = do
typnam <- typename f
left $ mkErr (B.unpack typnam)
(tableOid f)
(maybe "" B.unpack (name f))
(show (typeOf (undefined :: a)))
msg
atto :: forall a. (Typeable a)
=> Compat -> Parser a -> Field -> Maybe ByteString
-> Conversion a
atto types p0 f dat = doFromField f types (go p0) dat
where
go :: Parser a -> ByteString -> Conversion a
go p s =
case parseOnly p s of
Left err -> returnError ConversionFailed f err
Right v -> pure v
| avieth/postgresql-simple | src/Database/PostgreSQL/Simple/FromField.hs | bsd-3-clause | 24,587 | 0 | 18 | 6,421 | 4,850 | 2,561 | 2,289 | 351 | 4 |
{-
Copyright (c) 2004, Philippa Jane Cowderoy
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the original author nor the names of any
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
-}
module RecentChanges (recentChanges) where
import Text.XHtml
import PageTemplates
import PageIO
import Data.List
recentChanges env = do pns <- getPagenames
pds <- mapM getPageLastUpdated pns
pnds <- return $ filter (\(_,md) ->
case md of
Nothing -> False
Just d -> True
)
(zip pns pds)
opnds <- return $ sortBy ordering pnds
count <- case (lookup "count" env) of
Nothing -> return defaultCount
Just x -> case (reads x) of
[(i,_)]-> return i
_ -> return defaultCount
out <- return $
(concat
(intersperse
"\n\n"
(take count
(map (\(pn,Just d) ->
(linkTo pn) ++
" - " ++
(show d)
)
opnds
)
)
)
)
page ("Showing the "
++
(show count)
++
" most [:RecentChanges||RecentChanges:]:\n\n"
++
out
)
"RecentChanges"
env
where ordering (_,d1) (_,d2) = case d1 `compare` d2 of
LT -> GT
EQ -> EQ
GT -> LT
defaultCount = 50
linkTo pn = "["++pn++"|"++pn++"]"
| nh2/flippi | src/RecentChanges.hs | bsd-3-clause | 4,025 | 0 | 22 | 2,068 | 390 | 200 | 190 | 43 | 6 |
module Main (main) where
import System.Environment (getArgs)
import Language.Java.Paragon.Error
import Language.Java.Paragon.Interaction.Flags
import Language.Java.Paragon.Parac
-- | Main method, invokes the compiler
main :: IO ()
main = do
(flags, files) <- compilerOpts =<< getArgs
mapM_ (compileFile flags) files
compileFile :: [Flag] -> String -> IO ()
compileFile flags file = do
err <- parac flags file
case err of
[] -> return ()
_ -> putStrLn $ showErrors err
showErrors :: [Error] -> String
showErrors [] = ""
showErrors (e:es) = showContext (errContext e)
++ pretty e ++ "\n"
++ showErrors es
showContext :: [ErrorContext] -> String
showContext [] = ""
showContext (c:cs) = context c ++ "\n"
++ showContext cs
| bvdelft/paragon | src/Language/Java/Paragon.hs | bsd-3-clause | 812 | 0 | 11 | 202 | 279 | 146 | 133 | 24 | 2 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-|
Module : Numeric.AERN.RefinementOrder.ApproxOrder
Description : Comparisons with semidecidable order
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
Comparisons with semidecidable order.
This module is hidden and reexported via its parent RefinementOrder.
-}
module Numeric.AERN.RefinementOrder.PartialComparison
where
import Prelude hiding (EQ, LT, GT)
import Numeric.AERN.RefinementOrder.Extrema
import Numeric.AERN.RefinementOrder.Arbitrary
import Numeric.AERN.Basics.Arbitrary
import Numeric.AERN.Basics.Effort
import Numeric.AERN.Misc.Maybe
import Numeric.AERN.Basics.PartialOrdering
import Numeric.AERN.Basics.Laws.PartialRelation
import Numeric.AERN.Misc.Maybe
import Numeric.AERN.Misc.Bool
import Test.QuickCheck
import Test.Framework (testGroup, Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
infix 4 |==?, |<==>?, |</=>?, |<?, |<=?, |>=?, |>?, ⊏?, ⊑?, ⊒?, ⊐?
{-|
A type with semi-decidable equality and partial order
-}
class
(EffortIndicator (PartialCompareEffortIndicator t))
=>
PartialComparison t
where
type PartialCompareEffortIndicator t
pCompareDefaultEffort :: t -> PartialCompareEffortIndicator t
pCompareEff :: PartialCompareEffortIndicator t -> t -> t -> Maybe PartialOrdering
pCompareInFullEff :: PartialCompareEffortIndicator t -> t -> t -> PartialOrderingPartialInfo
pCompareInFullEff eff a b = partialOrdering2PartialInfo $ pCompareEff eff a b
-- | Partial equality
pEqualEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
-- | Partial `is comparable to`.
pComparableEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
-- | Partial `is not comparable to`.
pIncomparableEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
pLessEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
pLeqEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
pGeqEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
pGreaterEff :: (PartialCompareEffortIndicator t) -> t -> t -> Maybe Bool
-- defaults for all convenience operations:
pEqualEff effort a b =
pOrdInfEQ $ pCompareInFullEff effort a b
pLessEff effort a b =
pOrdInfLT $ pCompareInFullEff effort a b
pGreaterEff effort a b =
pOrdInfGT $ pCompareInFullEff effort a b
pLeqEff effort a b =
pOrdInfLEQ $ pCompareInFullEff effort a b
pGeqEff effort a b =
pOrdInfGEQ $ pCompareInFullEff effort a b
pComparableEff effort a b =
fmap not $ pOrdInfNC $ pCompareInFullEff effort a b
pIncomparableEff effort a b =
pOrdInfNC $ pCompareInFullEff effort a b
-- | Partial comparison with default effort
pCompare :: (PartialComparison t) => t -> t -> Maybe PartialOrdering
pCompare a = pCompareEff (pCompareDefaultEffort a) a
-- | Partial comparison with default effort
pCompareInFull :: (PartialComparison t) => t -> t -> PartialOrderingPartialInfo
pCompareInFull a = pCompareInFullEff (pCompareDefaultEffort a) a
-- | Partial `is comparable to` with default effort
pComparable :: (PartialComparison t) => t -> t -> Maybe Bool
pComparable a = pComparableEff (pCompareDefaultEffort a) a
-- | Partial `is comparable to` with default effort
(|<==>?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|<==>?) = pComparable
-- | Partial `is not comparable to` with default effort
pIncomparable :: (PartialComparison t) => t -> t -> Maybe Bool
pIncomparable a = pIncomparableEff (pCompareDefaultEffort a) a
-- | Partial `is not comparable to` with default effort
(|</=>?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|</=>?) = pIncomparable
-- | Partial equality with default effort
pEqual :: (PartialComparison t) => t -> t -> Maybe Bool
pEqual a = pEqualEff (pCompareDefaultEffort a) a
-- | Partial equality with default effort
(|==?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|==?) = pEqual
-- | Partial `strictly less than` with default effort
pLess :: (PartialComparison t) => t -> t -> Maybe Bool
pLess a = pLessEff (pCompareDefaultEffort a) a
-- | Partial `strictly below` with default effort
(|<?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|<?) = pLess
{-| Convenience Unicode notation for '|<?' -}
(⊏?) :: (PartialComparison t) => t -> t -> Maybe Bool
(⊏?) = (|<?)
-- | Partial `less than or equal to` with default effort
pLeq :: (PartialComparison t) => t -> t -> Maybe Bool
pLeq a = pLeqEff (pCompareDefaultEffort a) a
-- | Partial `below or equal to` with default effort
(|<=?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|<=?) = pLeq
-- | Partial `strictly greater than` with default effort
pGreater :: (PartialComparison t) => t -> t -> Maybe Bool
pGreater a = pGreaterEff (pCompareDefaultEffort a) a
-- | Partial `strictly above` with default effort
(|>?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|>?) = pGreater
{-| Convenience Unicode notation for '|>?' -}
(⊐?) :: (PartialComparison t) => t -> t -> Maybe Bool
(⊐?) = (|>?)
{-| Convenience Unicode notation for '|<=?' -}
(⊑?) :: (PartialComparison t) => t -> t -> Maybe Bool
(⊑?) = (|<=?)
-- | Partial `greater than or equal to` with default effort
pGeq :: (PartialComparison t) => t -> t -> Maybe Bool
pGeq a = pGeqEff (pCompareDefaultEffort a) a
-- | Partial `above or equal to` with default effort
(|>=?) :: (PartialComparison t) => t -> t -> Maybe Bool
(|>=?) = pGeq
{-| Convenience Unicode notation for '|>=?' -}
(⊒?) :: (PartialComparison t) => t -> t -> Maybe Bool
(⊒?) = (|>=?)
propPartialComparisonReflexiveEQ ::
(PartialComparison t) =>
t ->
(PartialCompareEffortIndicator t) ->
(UniformlyOrderedSingleton t) ->
Bool
propPartialComparisonReflexiveEQ _
effort
(UniformlyOrderedSingleton e)
=
case pCompareEff effort e e of Just EQ -> True; Nothing -> True; _ -> False
propPartialComparisonAntiSymmetric ::
(PartialComparison t) =>
t ->
UniformlyOrderedPair t ->
(PartialCompareEffortIndicator t) ->
Bool
propPartialComparisonAntiSymmetric _
(UniformlyOrderedPair (e1, e2))
effort
=
case (pCompareEff effort e2 e1, pCompareEff effort e1 e2) of
(Just b1, Just b2) -> b1 == partialOrderingTranspose b2
_ -> True
propPartialComparisonTransitiveEQ ::
(PartialComparison t) =>
t ->
UniformlyOrderedTriple t ->
(PartialCompareEffortIndicator t) ->
Bool
propPartialComparisonTransitiveEQ _
(UniformlyOrderedTriple (e1,e2,e3))
effort
=
partialTransitive (pEqualEff effort) e1 e2 e3
propPartialComparisonTransitiveLT ::
(PartialComparison t) =>
t ->
UniformlyOrderedTriple t ->
(PartialCompareEffortIndicator t) ->
Bool
propPartialComparisonTransitiveLT _
(UniformlyOrderedTriple (e1,e2,e3))
effort
=
partialTransitive (pLessEff effort) e1 e2 e3
propPartialComparisonTransitiveLE ::
(PartialComparison t) =>
t ->
UniformlyOrderedTriple t ->
(PartialCompareEffortIndicator t) ->
Bool
propPartialComparisonTransitiveLE _
(UniformlyOrderedTriple (e1,e2,e3))
effort
=
partialTransitive (pLeqEff effort) e1 e2 e3
propExtremaInPartialComparison ::
(PartialComparison t, HasExtrema t) =>
t ->
(UniformlyOrderedSingleton t) ->
(PartialCompareEffortIndicator t) ->
Bool
propExtremaInPartialComparison _
(UniformlyOrderedSingleton e)
effort
=
partialOrderExtrema (pLeqEff effort) (bottom e) (top e) e
testsPartialComparison ::
(PartialComparison t,
HasExtrema t,
ArbitraryOrderedTuple t, Show t)
=>
(String, t) ->
(Area t) ->
Test
testsPartialComparison (name, sample) area =
testGroup (name ++ " (⊑?)")
[
testProperty "anti symmetric" (area, propPartialComparisonAntiSymmetric sample)
,
testProperty "transitive EQ" (area, propPartialComparisonTransitiveEQ sample)
,
testProperty "transitive LE" (area, propPartialComparisonTransitiveLE sample)
,
testProperty "transitive LT" (area, propPartialComparisonTransitiveLT sample)
,
testProperty "extrema" (area, propExtremaInPartialComparison sample)
]
| michalkonecny/aern | aern-order/src/Numeric/AERN/RefinementOrder/PartialComparison.hs | bsd-3-clause | 8,565 | 0 | 10 | 1,839 | 2,100 | 1,138 | 962 | 169 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Api
( app
) where
import Control.Applicative ((<$>))
import Control.Monad (when)
import Data.Maybe (isNothing)
import Data.Text ()
import qualified Database.Persist.Sqlite as P
import DB
import Helper
import qualified Network.HTTP.Types as HT
import Types
import Web.Scotty
app :: P.ConnectionPool -> ScottyM ()
app p = do
let db = runDB p
get "/spots" $ withRescue $ do
resources <- db $ map P.entityVal <$> P.selectList ([] :: [P.Filter Spot]) []
json $ toSpotsResponse resources
get "/spots/:id" $ do
key <- toKey <$> param "id"
resource <- db $ P.get (key :: SpotId)
case resource of
Just r -> json $ SpotResponse r
Nothing -> status HT.status404
put "/spots/:id" $ withRescue $ do
key <- toKey <$> param "id"
value <- fromSpotResponse <$> jsonData
db $ P.update key $ toUpdateQuery value
resource <- db $ P.get (key :: SpotId)
case resource of
Just r -> json $ SpotResponse r
Nothing -> status HT.status404
post "/spots" $ withRescue $ do
value <- fromSpotResponse <$> jsonData
key <- db $ P.insert value
resource <- db $ P.get key
json resource
delete "/spots/:id" $ withRescue $ do
key <- toKey <$> param "id"
resource <- db $ P.get (key :: SpotId)
when (isNothing resource) (status HT.status404)
_ <- db $ P.delete (key :: SpotId)
json True
| fujimura/spot | src/Api.hs | bsd-3-clause | 1,675 | 0 | 16 | 600 | 544 | 264 | 280 | 44 | 3 |
{-|
Module : Idris.Erasure
Description : Utilities to erase irrelevant stuff.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards #-}
module Idris.Erasure (performUsageAnalysis, mkFieldName) where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.Core.CaseTree
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.Primitives
import Idris.Error
import Debug.Trace
import System.IO.Unsafe
import Control.Category
import Prelude hiding (id, (.))
import Control.Arrow
import Control.Applicative
import Control.Monad.State
import Data.Maybe
import Data.List
import qualified Data.Set as S
import qualified Data.IntSet as IS
import qualified Data.Map as M
import qualified Data.IntMap as IM
import Data.Set (Set)
import Data.IntSet (IntSet)
import Data.Map (Map)
import Data.IntMap (IntMap)
import Data.Text (pack)
import qualified Data.Text as T
-- | UseMap maps names to the set of used (reachable) argument
-- positions.
type UseMap = Map Name (IntMap (Set Reason))
data Arg = Arg Int | Result deriving (Eq, Ord)
instance Show Arg where
show (Arg i) = show i
show Result = "*"
type Node = (Name, Arg)
type Deps = Map Cond DepSet
type Reason = (Name, Int) -- function name, argument index
-- | Nodes along with sets of reasons for every one.
type DepSet = Map Node (Set Reason)
-- | "Condition" is the conjunction of elementary assumptions along
-- the path from the root. Elementary assumption (f, i) means that
-- "function f uses the argument i".
type Cond = Set Node
-- | Variables carry certain information with them.
data VarInfo = VI
{ viDeps :: DepSet -- ^ dependencies drawn in by the variable
, viFunArg :: Maybe Int -- ^ which function argument this variable came from (defined only for patvars)
, viMethod :: Maybe Name -- ^ name of the metamethod represented by the var, if any
}
deriving Show
type Vars = Map Name VarInfo
-- | Perform usage analysis, write the relevant information in the
-- internal structures, returning the list of reachable names.
performUsageAnalysis :: [Name] -> Idris [Name]
performUsageAnalysis startNames = do
ctx <- tt_ctxt <$> getIState
case startNames of
[] -> return [] -- no main -> not compiling -> reachability irrelevant
main -> do
ci <- idris_interfaces <$> getIState
cg <- idris_callgraph <$> getIState
opt <- idris_optimisation <$> getIState
used <- idris_erasureUsed <$> getIState
externs <- idris_externs <$> getIState
-- Build the dependency graph.
let depMap = buildDepMap ci used (S.toList externs) ctx main
-- Search for reachable nodes in the graph.
let (residDeps, (reachableNames, minUse)) = minimalUsage depMap
usage = M.toList minUse
-- Print some debug info.
logErasure 5 $ "Original deps:\n" ++ unlines (map fmtItem . M.toList $ depMap)
logErasure 3 $ "Reachable names:\n" ++ unlines (map (indent . show) . S.toList $ reachableNames)
logErasure 4 $ "Minimal usage:\n" ++ fmtUseMap usage
logErasure 5 $ "Residual deps:\n" ++ unlines (map fmtItem . M.toList $ residDeps)
-- Check that everything reachable is accessible.
checkEnabled <- (WarnReach `elem`) . opt_cmdline . idris_options <$> getIState
when checkEnabled $
mapM_ (checkAccessibility opt) usage
-- Check that no postulates are reachable.
reachablePostulates <- S.intersection reachableNames . idris_postulates <$> getIState
when (not . S.null $ reachablePostulates)
$ ifail ("reachable postulates:\n" ++ intercalate "\n" [" " ++ show n | n <- S.toList reachablePostulates])
-- Store the usage info in the internal state.
mapM_ storeUsage usage
return $ S.toList reachableNames
where
indent = (" " ++)
fmtItem :: (Cond, DepSet) -> String
fmtItem (cond, deps) = indent $ show (S.toList cond) ++ " -> " ++ show (M.toList deps)
fmtUseMap :: [(Name, IntMap (Set Reason))] -> String
fmtUseMap = unlines . map (\(n,is) -> indent $ show n ++ " -> " ++ fmtIxs is)
fmtIxs :: IntMap (Set Reason) -> String
fmtIxs = intercalate ", " . map fmtArg . IM.toList
where
fmtArg (i, rs)
| S.null rs = show i
| otherwise = show i ++ " from " ++ intercalate ", " (map show $ S.toList rs)
storeUsage :: (Name, IntMap (Set Reason)) -> Idris ()
storeUsage (n, args) = fputState (cg_usedpos . ist_callgraph n) flat
where
flat = [(i, S.toList rs) | (i,rs) <- IM.toList args]
checkAccessibility :: Ctxt OptInfo -> (Name, IntMap (Set Reason)) -> Idris ()
checkAccessibility opt (n, reachable)
| Just (Optimise inaccessible dt) <- lookupCtxtExact n opt
, eargs@(_:_) <- [fmt n (S.toList rs) | (i,n) <- inaccessible, rs <- maybeToList $ IM.lookup i reachable]
= warn $ show n ++ ": inaccessible arguments reachable:\n " ++ intercalate "\n " eargs
| otherwise = return ()
where
fmt n [] = show n ++ " (no more information available)"
fmt n rs = show n ++ " from " ++ intercalate ", " [show rn ++ " arg# " ++ show ri | (rn,ri) <- rs]
warn = logErasure 0
-- | Find the minimal consistent usage by forward chaining.
minimalUsage :: Deps -> (Deps, (Set Name, UseMap))
minimalUsage = second gather . forwardChain
where
gather :: DepSet -> (Set Name, UseMap)
gather = foldr ins (S.empty, M.empty) . M.toList
where
ins :: (Node, Set Reason) -> (Set Name, UseMap) -> (Set Name, UseMap)
ins ((n, Result), rs) (ns, umap) = (S.insert n ns, umap)
ins ((n, Arg i ), rs) (ns, umap) = (ns, M.insertWith (IM.unionWith S.union) n (IM.singleton i rs) umap)
forwardChain :: Deps -> (Deps, DepSet)
forwardChain deps
| Just trivials <- M.lookup S.empty deps
= (M.unionWith S.union trivials)
`second` forwardChain (remove trivials . M.delete S.empty $ deps)
| otherwise = (deps, M.empty)
where
-- Remove the given nodes from the Deps entirely,
-- possibly creating new empty Conds.
remove :: DepSet -> Deps -> Deps
remove ds = M.mapKeysWith (M.unionWith S.union) (S.\\ M.keysSet ds)
-- | Build the dependency graph, starting the depth-first search from
-- a list of Names.
buildDepMap :: Ctxt InterfaceInfo -> [(Name, Int)] -> [(Name, Int)] ->
Context -> [Name] -> Deps
buildDepMap ci used externs ctx startNames
= addPostulates used $ dfs S.empty M.empty startNames
where
-- mark the result of Main.main as used with the empty assumption
addPostulates :: [(Name, Int)] -> Deps -> Deps
addPostulates used deps = foldr (\(ds, rs) -> M.insertWith (M.unionWith S.union) ds rs) deps (postulates used)
where
-- mini-DSL for postulates
(==>) ds rs = (S.fromList ds, M.fromList [(r, S.empty) | r <- rs])
it n is = [(sUN n, Arg i) | i <- is]
mn n is = [(MN 0 $ pack n, Arg i) | i <- is]
-- believe_me is special because it does not use all its arguments
specialPrims = S.fromList [sUN "prim__believe_me"]
usedNames = allNames deps S.\\ specialPrims
usedPrims = [(p_name p, p_arity p) | p <- primitives, p_name p `S.member` usedNames]
postulates used =
[ [] ==> concat
-- Main.main ( + export lists) and run__IO, are always evaluated
-- but they elude analysis since they come from the seed term.
[(map (\n -> (n, Result)) startNames)
,[(sUN "run__IO", Result), (sUN "run__IO", Arg 1)]
,[(sUN "call__IO", Result), (sUN "call__IO", Arg 2)]
-- Explicit usage declarations from a %used pragma
, map (\(n, i) -> (n, Arg i)) used
-- MkIO is read by run__IO,
-- but this cannot be observed in the source code of programs.
, it "MkIO" [2]
, it "prim__IO" [1]
-- Foreign calls are built with pairs, but mkForeign doesn't
-- have an implementation so analysis won't see them
, [(pairCon, Arg 2),
(pairCon, Arg 3)] -- Used in foreign calls
-- these have been discovered as builtins but are not listed
-- among Idris.Primitives.primitives
--, mn "__MkPair" [2,3]
, it "prim_fork" [0]
, it "unsafePerformPrimIO" [1]
-- believe_me is a primitive but it only uses its third argument
-- it is special-cased in usedNames above
, it "prim__believe_me" [2]
-- in general, all other primitives use all their arguments
, [(n, Arg i) | (n,arity) <- usedPrims, i <- [0..arity-1]]
-- %externs are assumed to use all their arguments
, [(n, Arg i) | (n,arity) <- externs, i <- [0..arity-1]]
-- mkForeign* functions are special-cased below
]
]
-- perform depth-first search
-- to discover all the names used in the program
-- and call getDeps for every name
dfs :: Set Name -> Deps -> [Name] -> Deps
dfs visited deps [] = deps
dfs visited deps (n : ns)
| n `S.member` visited = dfs visited deps ns
| otherwise = dfs (S.insert n visited) (M.unionWith (M.unionWith S.union) deps' deps) (next ++ ns)
where
next = [n | n <- S.toList depn, n `S.notMember` visited]
depn = S.delete n $ allNames deps'
deps' = getDeps n
-- extract all names that a function depends on
-- from the Deps of the function
allNames :: Deps -> Set Name
allNames = S.unions . map names . M.toList
where
names (cs, ns) = S.map fst cs `S.union` S.map fst (M.keysSet ns)
-- get Deps for a Name
getDeps :: Name -> Deps
getDeps (SN (WhereN i (SN (ImplementationCtorN interfaceN)) (MN i' field)))
= M.empty -- these deps are created when applying implementation ctors
getDeps n = case lookupDefExact n ctx of
Just def -> getDepsDef n def
Nothing -> error $ "erasure checker: unknown reference: " ++ show n
getDepsDef :: Name -> Def -> Deps
getDepsDef fn (Function ty t) = error "a function encountered" -- TODO
getDepsDef fn (TyDecl ty t) = M.empty
getDepsDef fn (Operator ty n' f) = M.empty -- TODO: what's this?
getDepsDef fn (CaseOp ci ty tys def tot cdefs)
= getDepsSC fn etaVars (etaMap `M.union` varMap) sc
where
-- we must eta-expand the definition with fresh variables
-- to capture these dependencies as well
etaIdx = [length vars .. length tys - 1]
etaVars = [eta i | i <- etaIdx]
etaMap = M.fromList [varPair (eta i) i | i <- etaIdx]
eta i = MN i (pack "eta")
-- the variables that arose as function arguments only depend on (n, i)
varMap = M.fromList [varPair v i | (v,i) <- zip vars [0..]]
varPair n argNo = (n, VI
{ viDeps = M.singleton (fn, Arg argNo) S.empty
, viFunArg = Just argNo
, viMethod = Nothing
})
(vars, sc) = cases_runtime cdefs
-- we use cases_runtime in order to have case-blocks
-- resolved to top-level functions before our analysis
etaExpand :: [Name] -> Term -> Term
etaExpand [] t = t
etaExpand (n : ns) t = etaExpand ns (App Complete t (P Ref n Erased))
getDepsSC :: Name -> [Name] -> Vars -> SC -> Deps
getDepsSC fn es vs ImpossibleCase = M.empty
getDepsSC fn es vs (UnmatchedCase msg) = M.empty
-- for the purposes of erasure, we can disregard the projection
getDepsSC fn es vs (ProjCase (Proj t i) alts) = getDepsSC fn es vs (ProjCase t alts) -- step
getDepsSC fn es vs (ProjCase (P _ n _) alts) = getDepsSC fn es vs (Case Shared n alts) -- base
-- other ProjCase's are not supported
getDepsSC fn es vs (ProjCase t alts) = error $ "ProjCase not supported:\n" ++ show (ProjCase t alts)
getDepsSC fn es vs (STerm t) = getDepsTerm vs [] (S.singleton (fn, Result)) (etaExpand es t)
getDepsSC fn es vs (Case sh n alts)
-- we case-split on this variable, which marks it as used
-- (unless there is exactly one case branch)
-- hence we add a new dependency, whose only precondition is
-- that the result of this function is used at all
= addTagDep $ unionMap (getDepsAlt fn es vs casedVar) alts -- coming from the whole subtree
where
addTagDep = case alts of
[_] -> id -- single branch, tag not used
_ -> M.insertWith (M.unionWith S.union) (S.singleton (fn, Result)) (viDeps casedVar)
casedVar = fromMaybe (error $ "nonpatvar in case: " ++ show n) (M.lookup n vs)
getDepsAlt :: Name -> [Name] -> Vars -> VarInfo -> CaseAlt -> Deps
getDepsAlt fn es vs var (FnCase n ns sc) = M.empty -- can't use FnCase at runtime
getDepsAlt fn es vs var (ConstCase c sc) = getDepsSC fn es vs sc
getDepsAlt fn es vs var (DefaultCase sc) = getDepsSC fn es vs sc
getDepsAlt fn es vs var (SucCase n sc)
= getDepsSC fn es (M.insert n var vs) sc -- we're not inserting the S-dependency here because it's special-cased
-- data constructors
getDepsAlt fn es vs var (ConCase n cnt ns sc)
= getDepsSC fn es (vs' `M.union` vs) sc -- left-biased union
where
-- Here we insert dependencies that arose from pattern matching on a constructor.
-- n = ctor name, j = ctor arg#, i = fun arg# of the cased var, cs = ctors of the cased var
vs' = M.fromList [(v, VI
{ viDeps = M.insertWith S.union (n, Arg j) (S.singleton (fn, varIdx)) (viDeps var)
, viFunArg = viFunArg var
, viMethod = meth j
})
| (v, j) <- zip ns [0..]]
-- this is safe because it's certainly a patvar
varIdx = fromJust (viFunArg var)
-- generate metamethod names, "n" is the implementation ctor
meth :: Int -> Maybe Name
meth | SN (ImplementationCtorN interfaceName) <- n = \j -> Just (mkFieldName n j)
| otherwise = \j -> Nothing
-- Named variables -> DeBruijn variables -> Conds/guards -> Term -> Deps
getDepsTerm :: Vars -> [(Name, Cond -> Deps)] -> Cond -> Term -> Deps
-- named variables introduce dependencies as described in `vs'
getDepsTerm vs bs cd (P _ n _)
-- de bruijns (lambda-bound, let-bound vars)
| Just deps <- lookup n bs
= deps cd
-- ctor-bound/arg-bound variables
| Just var <- M.lookup n vs
= M.singleton cd (viDeps var)
-- sanity check: machine-generated names shouldn't occur at top-level
| MN _ _ <- n
= error $ "erasure analysis: variable " ++ show n ++ " unbound in " ++ show (S.toList cd)
-- assumed to be a global reference
| otherwise = M.singleton cd (M.singleton (n, Result) S.empty)
-- dependencies of de bruijn variables are described in `bs'
getDepsTerm vs bs cd (V i) = snd (bs !! i) cd
getDepsTerm vs bs cd (Bind n bdr body)
-- here we just push IM.empty on the de bruijn stack
-- the args will be marked as used at the usage site
| Lam ty <- bdr = getDepsTerm vs ((n, const M.empty) : bs) cd body
| Pi _ ty _ <- bdr = getDepsTerm vs ((n, const M.empty) : bs) cd body
-- let-bound variables can get partially evaluated
-- it is sufficient just to plug the Cond in when the bound names are used
| Let ty t <- bdr = var t cd `union` getDepsTerm vs ((n, const M.empty) : bs) cd body
| NLet ty t <- bdr = var t cd `union` getDepsTerm vs ((n, const M.empty) : bs) cd body
where
var t cd = getDepsTerm vs bs cd t
-- applications may add items to Cond
getDepsTerm vs bs cd app@(App _ _ _)
| (fun, args) <- unApply app = case fun of
-- implementation constructors -> create metamethod deps
P (DCon _ _ _) ctorName@(SN (ImplementationCtorN interfaceName)) _
-> conditionalDeps ctorName args -- regular data ctor stuff
`union` unionMap (methodDeps ctorName) (zip [0..] args) -- method-specific stuff
-- ordinary constructors
P (TCon _ _) n _ -> unconditionalDeps args -- does not depend on anything
P (DCon _ _ _) n _ -> conditionalDeps n args -- depends on whether (n,#) is used
-- mkForeign* calls must be special-cased because they are variadic
-- All arguments must be marked as used, except for the first four,
-- which define the call type and are not needed at runtime.
P _ (UN n) _
| n == T.pack "mkForeignPrim"
-> unconditionalDeps $ drop 4 args
-- a bound variable might draw in additional dependencies,
-- think: f x = x 0 <-- here, `x' _is_ used
P _ n _
-- debruijn-bound name
| Just deps <- lookup n bs
-> deps cd `union` unconditionalDeps args
-- local name that refers to a method
| Just var <- M.lookup n vs
, Just meth <- viMethod var
-> viDeps var `ins` conditionalDeps meth args -- use the method instead
-- local name
| Just var <- M.lookup n vs
-- unconditional use
-> viDeps var `ins` unconditionalDeps args
-- global name
| otherwise
-- depends on whether the referred thing uses its argument
-> conditionalDeps n args
-- TODO: could we somehow infer how bound variables use their arguments?
V i -> snd (bs !! i) cd `union` unconditionalDeps args
-- we interpret applied lambdas as lets in order to reuse code here
Bind n (Lam ty) t -> getDepsTerm vs bs cd (lamToLet app)
-- and we interpret applied lets as lambdas
Bind n ( Let ty t') t -> getDepsTerm vs bs cd (App Complete (Bind n (Lam ty) t) t')
Bind n (NLet ty t') t -> getDepsTerm vs bs cd (App Complete (Bind n (Lam ty) t) t')
Proj t i
-> error $ "cannot[0] analyse projection !" ++ show i ++ " of " ++ show t
Erased -> M.empty
_ -> error $ "cannot analyse application of " ++ show fun ++ " to " ++ show args
where
union = M.unionWith $ M.unionWith S.union
ins = M.insertWith (M.unionWith S.union) cd
unconditionalDeps :: [Term] -> Deps
unconditionalDeps = unionMap (getDepsTerm vs bs cd)
conditionalDeps :: Name -> [Term] -> Deps
conditionalDeps n
= ins (M.singleton (n, Result) S.empty) . unionMap (getDepsArgs n) . zip indices
where
indices = map Just [0 .. getArity n - 1] ++ repeat Nothing
getDepsArgs n (Just i, t) = getDepsTerm vs bs (S.insert (n, Arg i) cd) t -- conditional
getDepsArgs n (Nothing, t) = getDepsTerm vs bs cd t -- unconditional
methodDeps :: Name -> (Int, Term) -> Deps
methodDeps ctorName (methNo, t)
= getDepsTerm (vars `M.union` vs) (bruijns ++ bs) cond body
where
vars = M.fromList [(v, VI
{ viDeps = deps i
, viFunArg = Just i
, viMethod = Nothing
}) | (v, i) <- zip args [0..]]
deps i = M.singleton (metameth, Arg i) S.empty
bruijns = reverse [(n, \cd -> M.singleton cd (deps i)) | (i, n) <- zip [0..] args]
cond = S.singleton (metameth, Result)
metameth = mkFieldName ctorName methNo
(args, body) = unfoldLams t
-- projections
getDepsTerm vs bs cd (Proj t (-1)) = getDepsTerm vs bs cd t -- naturals, (S n) -> n
getDepsTerm vs bs cd (Proj t i) = error $ "cannot[1] analyse projection !" ++ show i ++ " of " ++ show t
-- the easy cases
getDepsTerm vs bs cd (Constant _) = M.empty
getDepsTerm vs bs cd (TType _) = M.empty
getDepsTerm vs bs cd (UType _) = M.empty
getDepsTerm vs bs cd Erased = M.empty
getDepsTerm vs bs cd Impossible = M.empty
getDepsTerm vs bs cd t = error $ "cannot get deps of: " ++ show t
-- Get the number of arguments that might be considered for erasure.
getArity :: Name -> Int
getArity (SN (WhereN i' ctorName (MN i field)))
| Just (TyDecl (DCon _ _ _) ty) <- lookupDefExact ctorName ctx
= let argTys = map snd $ getArgTys ty
in if i <= length argTys
then length $ getArgTys (argTys !! i)
else error $ "invalid field number " ++ show i ++ " for " ++ show ctorName
| otherwise = error $ "unknown implementation constructor: " ++ show ctorName
getArity n = case lookupDefExact n ctx of
Just (CaseOp ci ty tys def tot cdefs) -> length tys
Just (TyDecl (DCon tag arity _) _) -> arity
Just (TyDecl (Ref) ty) -> length $ getArgTys ty
Just (Operator ty arity op) -> arity
Just df -> error $ "Erasure/getArity: unrecognised entity '"
++ show n ++ "' with definition: " ++ show df
Nothing -> error $ "Erasure/getArity: definition not found for " ++ show n
-- convert applications of lambdas to lets
-- Note that this transformation preserves de bruijn numbering
lamToLet :: Term -> Term
lamToLet (App _ (Bind n (Lam ty) tm) val) = Bind n (Let ty val) tm
-- split "\x_i -> T(x_i)" into [x_i] and T
unfoldLams :: Term -> ([Name], Term)
unfoldLams (Bind n (Lam ty) t) = let (ns,t') = unfoldLams t in (n:ns, t')
unfoldLams t = ([], t)
union :: Deps -> Deps -> Deps
union = M.unionWith (M.unionWith S.union)
unions :: [Deps] -> Deps
unions = M.unionsWith (M.unionWith S.union)
unionMap :: (a -> Deps) -> [a] -> Deps
unionMap f = M.unionsWith (M.unionWith S.union) . map f
-- | Make a field name out of a data constructor name and field number.
mkFieldName :: Name -> Int -> Name
mkFieldName ctorName fieldNo = SN (WhereN fieldNo ctorName $ sMN fieldNo "field")
| enolan/Idris-dev | src/Idris/Erasure.hs | bsd-3-clause | 22,527 | 0 | 21 | 6,840 | 6,621 | 3,443 | 3,178 | 304 | 50 |
{-# LANGUAGE CPP #-}
-- |Routines for integrating Tor with the standard network library.
module Tor.NetworkStack.System(systemNetworkStack) where
import Data.Binary.Put
import Data.ByteString(ByteString)
import Data.ByteString.Lazy(toStrict)
import qualified Data.ByteString as BS
import Data.Word
import Network(listenOn, PortID(..))
import Network.BSD
import Network.Socket as Sys hiding (recv)
import Network.Socket.ByteString.Lazy(sendAll)
import qualified Network.Socket.ByteString as Sys
import Tor.DataFormat.TorAddress
import Tor.NetworkStack
-- |A Tor-compatible network stack that uses the 'network' library.
systemNetworkStack :: TorNetworkStack Socket Socket
systemNetworkStack = TorNetworkStack {
Tor.NetworkStack.connect = systemConnect
, Tor.NetworkStack.getAddress = systemLookup
, Tor.NetworkStack.listen = systemListen
, Tor.NetworkStack.accept = systemAccept
, Tor.NetworkStack.recv = systemRead
, Tor.NetworkStack.write = sendAll
, Tor.NetworkStack.flush = const (return ())
, Tor.NetworkStack.close = Sys.close
, Tor.NetworkStack.lclose = Sys.close
}
systemConnect :: String -> Word16 -> IO (Maybe Socket)
systemConnect addrStr port =
do let ainfo = defaultHints { addrFamily = AF_INET, addrSocketType = Stream }
hname = addrStr
sname = show port
addrinfos <- getAddrInfo (Just ainfo) (Just hname) (Just sname)
case addrinfos of
[] -> return Nothing
(x:_) ->
do sock <- socket AF_INET Stream defaultProtocol
Sys.connect sock (addrAddress x)
return (Just sock)
systemLookup :: String -> IO [TorAddress]
systemLookup hostname =
-- FIXME: Tack the hostname on the end, as a default?
do res <- getAddrInfo Nothing (Just hostname) Nothing
return (map (convertAddress . addrAddress) res)
systemListen :: Word16 -> IO Socket
systemListen port = listenOn (PortNumber (fromIntegral port))
convertAddress :: SockAddr -> TorAddress
convertAddress (SockAddrInet _ x) =
IP4 (ip4ToString (toStrict (runPut (putWord32be x))))
convertAddress (SockAddrInet6 _ _ (a,b,c,d) _) =
IP6 (ip6ToString (toStrict (runPut (mapM_ putWord32be [a,b,c,d]))))
convertAddress x =
error ("Incompatible address type: " ++ show x)
systemAccept :: Socket -> IO (Socket, TorAddress)
systemAccept lsock =
do (res, addr) <- Sys.accept lsock
return (res, convertAddress addr)
systemRead :: Socket -> Int -> IO ByteString
systemRead _ 0 = return BS.empty
systemRead sock amt =
do start <- Sys.recv sock (fromIntegral amt)
let left = fromIntegral (amt - fromIntegral (BS.length start))
if BS.null start
then return BS.empty
else (start `BS.append`) `fmap` systemRead sock left
| GaloisInc/haskell-tor | src/Tor/NetworkStack/System.hs | bsd-3-clause | 2,757 | 0 | 16 | 531 | 842 | 455 | 387 | 62 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Examples.Misc.Floating
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Several examples involving IEEE-754 floating point numbers, i.e., single
-- precision 'Float' ('SFloat') and double precision 'Double' ('SDouble') types.
--
-- Note that arithmetic with floating point is full of surprises; due to precision
-- issues associativity of arithmetic operations typically do not hold. Also,
-- the presence of @NaN@ is always something to look out for.
-----------------------------------------------------------------------------
{-# LANGUAGE ScopedTypeVariables #-}
module Data.SBV.Examples.Misc.Floating where
import Data.SBV
-----------------------------------------------------------------------------
-- * FP addition is not associative
-----------------------------------------------------------------------------
-- | Prove that floating point addition is not associative. We have:
--
-- >>> prove assocPlus
-- Falsifiable. Counter-example:
-- s0 = -9.62965e-35 :: Float
-- s1 = Infinity :: Float
-- s2 = -Infinity :: Float
--
-- Indeed:
--
-- >>> let i = 1/0 :: Float
-- >>> (-9.62965e-35 + (i + (-i)))
-- NaN
-- >>> ((-9.62965e-35 + i) + (-i))
-- NaN
--
-- But keep in mind that @NaN@ does not equal itself in the floating point world! We have:
--
-- >>> let nan = 0/0 :: Float in nan == nan
-- False
assocPlus :: SFloat -> SFloat -> SFloat -> SBool
assocPlus x y z = x + (y + z) .== (x + y) + z
-- | Prove that addition is not associative, even if we ignore @NaN@/@Infinity@ values.
-- To do this, we use the predicate 'isPointFP', which is true of a floating point
-- number ('SFloat' or 'SDouble') if it is neither @NaN@ nor @Infinity@. (That is, it's a
-- representable point in the real-number line.)
--
-- We have:
--
-- >>> assocPlusRegular
-- Falsifiable. Counter-example:
-- x = -1.0491915e7 :: Float
-- y = 1967115.5 :: Float
-- z = 982003.94 :: Float
--
-- Indeed, we have:
--
-- >>> ((-1.0491915e7) + (1967115.5 + 982003.94)) :: Float
-- -7542795.5
-- >>> (((-1.0491915e7) + 1967115.5) + 982003.94) :: Float
-- -7542796.0
--
-- Note the significant difference between two additions!
assocPlusRegular :: IO ThmResult
assocPlusRegular = prove $ do [x, y, z] <- sFloats ["x", "y", "z"]
let lhs = x+(y+z)
rhs = (x+y)+z
-- make sure we do not overflow at the intermediate points
constrain $ isPointFP lhs
constrain $ isPointFP rhs
return $ lhs .== rhs
-----------------------------------------------------------------------------
-- * FP addition by non-zero can result in no change
-----------------------------------------------------------------------------
-- | Demonstrate that @a+b = a@ does not necessarily mean @b@ is @0@ in the floating point world,
-- even when we disallow the obvious solution when @a@ and @b@ are @Infinity.@
-- We have:
--
-- >>> nonZeroAddition
-- Falsifiable. Counter-example:
-- a = -2.0 :: Float
-- b = -3.0e-45 :: Float
--
-- Indeed, we have:
--
-- >>> (-2.0) + (-3.0e-45) == (-2.0 :: Float)
-- True
--
-- But:
--
-- >>> -3.0e-45 == (0::Float)
-- False
--
nonZeroAddition :: IO ThmResult
nonZeroAddition = prove $ do [a, b] <- sFloats ["a", "b"]
constrain $ isPointFP a
constrain $ isPointFP b
constrain $ a + b .== a
return $ b .== 0
-----------------------------------------------------------------------------
-- * FP multiplicative inverses may not exist
-----------------------------------------------------------------------------
-- | This example illustrates that @a * (1/a)@ does not necessarily equal @1@. Again,
-- we protect against division by @0@ and @NaN@/@Infinity@.
--
-- We have:
--
-- >>> multInverse
-- Falsifiable. Counter-example:
-- a = -2.0445642768532407e154 :: Double
--
-- Indeed, we have:
--
-- >>> let a = -2.0445642768532407e154 :: Double
-- >>> a * (1/a)
-- 0.9999999999999999
multInverse :: IO ThmResult
multInverse = prove $ do a <- sDouble "a"
constrain $ isPointFP a
constrain $ isPointFP (1/a)
return $ a * (1/a) .== 1
-----------------------------------------------------------------------------
-- * Effect of rounding modes
-----------------------------------------------------------------------------
-- | One interesting aspect of floating-point is that the chosen rounding-mode
-- can effect the results of a computation if the exact result cannot be precisely
-- represented. SBV exports the functions 'fpAdd', 'fpSub', 'fpMul', 'fpDiv', 'fpFMA'
-- and 'fpSqrt' which allows users to specify the IEEE supported 'RoundingMode' for
-- the operation. (Also see the class 'RoundingFloat'.) This example illustrates how SBV
-- can be used to find rounding-modes where, for instance, addition can produce different
-- results. We have:
--
-- >>> roundingAdd
-- Satisfiable. Model:
-- rm = RoundTowardPositive :: RoundingMode
-- x = 246080.08 :: Float
-- y = 16255.999 :: Float
--
-- Unfortunately we can't directly validate this result at the Haskell level, as Haskell only supports
-- 'RoundNearestTiesToEven'. We have:
--
-- >>> (246080.08 + 16255.999) :: Float
-- 262336.06
--
-- While we cannot directly see the result when the mode is 'RoundTowardPositive' in Haskell, we can use
-- SBV to provide us with that result thusly:
--
-- >>> sat $ \z -> z .== fpAdd sRoundTowardPositive 246080.08 (16255.999::SFloat)
-- Satisfiable. Model:
-- s0 = 262336.1 :: Float
--
-- We can see why these two resuls are indeed different. To see why, one would have to convert the
-- individual numbers to Float's, which would induce rounding-errors, add them up, and round-back;
-- a tedious operation, but one that might prove illimunating for the interested reader. We'll merely
-- note that floating point representation and semantics is indeed a thorny
-- subject, and point to <https://ece.uwaterloo.ca/~dwharder/NumericalAnalysis/02Numerics/Double/paper.pdf> as
-- an excellent guide.
roundingAdd :: IO SatResult
roundingAdd = sat $ do m :: SRoundingMode <- free "rm"
constrain $ m ./= literal RoundNearestTiesToEven
x <- sFloat "x"
y <- sFloat "y"
let lhs = fpAdd m x y
let rhs = x + y
constrain $ isPointFP lhs
constrain $ isPointFP rhs
return $ lhs ./= rhs
| Copilot-Language/sbv-for-copilot | Data/SBV/Examples/Misc/Floating.hs | bsd-3-clause | 6,829 | 0 | 13 | 1,570 | 603 | 364 | 239 | 33 | 1 |
{-# LANGUAGE FlexibleContexts #-}
-- | @futhark c@
module Futhark.CLI.C (main) where
import Futhark.Actions (compileCAction)
import Futhark.Compiler.CLI
import Futhark.Passes (sequentialCpuPipeline)
-- | Run @futhark c@
main :: String -> [String] -> IO ()
main = compilerMain
()
[]
"Compile sequential C"
"Generate sequential C code from optimised Futhark program."
sequentialCpuPipeline
$ \fcfg () mode outpath prog ->
actionProcedure (compileCAction fcfg mode outpath) prog
| diku-dk/futhark | src/Futhark/CLI/C.hs | isc | 495 | 0 | 9 | 82 | 116 | 65 | 51 | 14 | 1 |
-- | ModKey type: Grouping the modifier keys with the key
{-# OPTIONS -fno-warn-orphans #-}
{-# LANGUAGE NoImplicitPrelude, DeriveGeneric #-}
module Graphics.UI.Bottle.ModKey
( ModKey(..), ctrlMods, altMods, shiftMods, superMods
, ctrl, alt, shift, super
, prettyKey
, pretty
) where
import Prelude.Compat
import Data.Aeson (ToJSON(..), FromJSON(..))
import Data.List (isPrefixOf)
import GHC.Generics (Generic)
import qualified Graphics.UI.GLFW as GLFW
import Graphics.UI.GLFW.Instances ()
instance Monoid GLFW.ModifierKeys where
mempty = GLFW.ModifierKeys False False False False
mappend
(GLFW.ModifierKeys a0 b0 c0 d0)
(GLFW.ModifierKeys a1 b1 c1 d1) =
GLFW.ModifierKeys (a0||a1) (b0||b1) (c0||c1) (d0||d1)
ctrlMods :: GLFW.ModifierKeys
ctrlMods = mempty { GLFW.modifierKeysControl = True }
altMods :: GLFW.ModifierKeys
altMods = mempty { GLFW.modifierKeysAlt = True }
shiftMods :: GLFW.ModifierKeys
shiftMods = mempty { GLFW.modifierKeysShift = True }
superMods :: GLFW.ModifierKeys
superMods = mempty { GLFW.modifierKeysSuper = True }
ctrl :: GLFW.Key -> ModKey
ctrl = ModKey ctrlMods
alt :: GLFW.Key -> ModKey
alt = ModKey altMods
shift :: GLFW.Key -> ModKey
shift = ModKey shiftMods
super :: GLFW.Key -> ModKey
super = ModKey superMods
data ModKey = ModKey GLFW.ModifierKeys GLFW.Key
deriving (Generic, Show, Eq, Ord)
instance ToJSON ModKey
instance FromJSON ModKey
prettyKey :: GLFW.Key -> String
prettyKey k
| "Key'" `isPrefixOf` show k = drop 4 $ show k
| otherwise = show k
prettyModKeys :: GLFW.ModifierKeys -> String
prettyModKeys ms = concat $
["Ctrl+" | GLFW.modifierKeysControl ms] ++
["Alt+" | GLFW.modifierKeysAlt ms] ++
["Shift+" | GLFW.modifierKeysShift ms]
pretty :: ModKey -> String
pretty (ModKey ms key) = prettyModKeys ms ++ prettyKey key
| da-x/lamdu | bottlelib/Graphics/UI/Bottle/ModKey.hs | gpl-3.0 | 1,909 | 0 | 11 | 392 | 588 | 324 | 264 | 50 | 1 |
{-# LANGUAGE OverloadedStrings, FlexibleContexts #-}
module WebParsing.PostParser
(getPost) where
import Network.HTTP
import Database.PostInsertion(insertPost, insertPostCategory)
import Database.Persist.Sqlite(runSqlite, runMigration)
import Config (databasePath)
import WebParsing.ParsingHelp
import qualified Data.Text as T
import Data.List
import Data.Char
import Text.HTML.TagSoup
import Text.HTML.TagSoup.Match
import Database.Tables
import qualified Text.Parsec as P
import WebParsing.ParsecCombinators(getCourseFromTag, getPostType, getDepartmentName,
parsingAlgoOne)
fasCalendarURL :: String
fasCalendarURL = "http://calendar.artsci.utoronto.ca/"
getPost :: String -> IO ()
getPost str = do
let path = fasCalendarURL ++ str
rsp <- simpleHTTP (getRequest path)
body <- getResponseBody rsp
let tags = filter isNotComment $ parseTags body
postsSoup = secondH2 tags
posts = partitions isPostName postsSoup
mapM_ addPostToDatabase posts
print $ "parsing " ++ str
where
isNotComment (TagComment _) = False
isNotComment _ = True
secondH2 tags =
let sect = sections (isTagOpenName "h2") tags
in
if (length sect) < 2
then
[]
else
takeWhile isNotCoursesSection tags
isNotCoursesSection tag = not (tagOpenAttrLit "a" ("name", "courses") tag)
isPostName tag = tagOpenAttrNameLit "a" "name" (\nameValue -> (length nameValue) == 9) tag
addPostToDatabase :: [Tag String] -> IO ()
addPostToDatabase tags = do
let postCode = T.pack (fromAttrib "name" ((take 1 $ filter (isTagOpenName "a") tags) !! 0))
fullPostName = innerText (take 1 $ filter (isTagText) tags)
postType = T.pack $ getPostType postCode
departmentName = T.pack $ (getDepartmentName fullPostName postType)
prereqs = map getCourseFromTag $ map (fromAttrib "href") $ filter isCourseTag tags
addPostCategoriesToDatabase (T.unpack postCode) (innerText tags)
insertPost departmentName postType postCode
where
isCourseTag tag = tagOpenAttrNameLit "a" "href" (\hrefValue -> (length hrefValue) >= 0) tag
addPostCategoriesToDatabase :: String -> String -> IO ()
addPostCategoriesToDatabase postCode tagText = do
let parsed = P.parse parsingAlgoOne "(source)" tagText
case parsed of
Right text ->
mapM_ (addCategoryToDatabase postCode) (filter isCategory text)
Left _ -> print "Failed."
where
isCategory string =
let infixes = map (containsString string)
["First", "Second", "Third", "suitable", "Core", "Electives"]
in
((length string) >= 7) && ((length $ filter (\bool -> bool) infixes) <= 0)
containsString string substring = isInfixOf substring string
addCategoryToDatabase :: String -> String -> IO ()
addCategoryToDatabase postCode category =
insertPostCategory (T.pack category) (T.pack postCode)
| miameng/courseography | app/WebParsing/PostParser.hs | gpl-3.0 | 3,046 | 0 | 19 | 728 | 875 | 448 | 427 | 63 | 3 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Program : prim4.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:47
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Main where
import Qtc.Classes.Qccs
import Qtc.Classes.Qccs_h
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
import Qtc.Gui.Base
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
import Qtc.Enums.Core.Qt
import Qtc.Gui.QApplication
import Qtc.Gui.QMessageBox
import Qtc.Gui.QLabel
import Qtc.Gui.QLabel_h
import Qtc.Gui.QKeyEvent
import Data.IORef
import Data.IntMap
type CountMap = IntMap (IORef Int)
createCM :: IO CountMap
createCM = do
cellList <- mapM (\x -> do
nr <- newIORef 0
return (x, nr)
) [(qEnum_toInt eKey_A)..(qEnum_toInt eKey_Z)]
return $ fromList cellList
main :: IO Int
main = do
qApplication ()
tl <- qLabel "press any key from 'A' to 'Z'"
setAlignment tl (fAlignCenter::Alignment)
resize tl (200::Int, 60::Int)
mb <- qMessageBox tl
cm <- createCM
setHandler tl "keyPressEvent(QKeyEvent*)" $ tlkp cm mb
qshow tl ()
qApplicationExec ()
tlkp :: CountMap -> QMessageBox () -> QLabel () -> QKeyEvent () -> IO ()
tlkp cm mb this ke
= do
k <- key ke ()
if (member k cm)
then
do
cck <- readIORef $ cm ! k
let cp1 = cck + 1
t <- text ke ()
setText mb $ "You have pressed the '" ++ t ++ "' key " ++ (tpf cp1) ++ "!"
modifyIORef (cm ! k) (\_ -> cp1)
qshow mb ()
else
return ()
keyPressEvent_h this ke
where
tpf c
| c == 1 = "once"
| c == 2 = "twice"
| c > 2 = (show c) ++ " times"
| uduki/hsQt | examples/prim4.hs | bsd-2-clause | 1,841 | 0 | 15 | 429 | 576 | 291 | 285 | 54 | 2 |
{-# LANGUAGE CPP #-}
-- | Handy functions for creating much Core syntax
module MkCore (
-- * Constructing normal syntax
mkCoreLet, mkCoreLets,
mkCoreApp, mkCoreApps, mkCoreConApps,
mkCoreLams, mkWildCase, mkIfThenElse,
mkWildValBinder, mkWildEvBinder,
sortQuantVars, castBottomExpr,
-- * Constructing boxed literals
mkWordExpr, mkWordExprWord,
mkIntExpr, mkIntExprInt,
mkIntegerExpr,
mkFloatExpr, mkDoubleExpr,
mkCharExpr, mkStringExpr, mkStringExprFS,
-- * Floats
FloatBind(..), wrapFloat,
-- * Constructing equality evidence boxes
mkEqBox,
-- * Constructing general big tuples
-- $big_tuples
mkChunkified,
-- * Constructing small tuples
mkCoreVarTup, mkCoreVarTupTy, mkCoreTup,
-- * Constructing big tuples
mkBigCoreVarTup, mkBigCoreVarTupTy,
mkBigCoreTup, mkBigCoreTupTy,
-- * Deconstructing small tuples
mkSmallTupleSelector, mkSmallTupleCase,
-- * Deconstructing big tuples
mkTupleSelector, mkTupleCase,
-- * Constructing list expressions
mkNilExpr, mkConsExpr, mkListExpr,
mkFoldrExpr, mkBuildExpr,
-- * Error Ids
mkRuntimeErrorApp, mkImpossibleExpr, errorIds,
rEC_CON_ERROR_ID, iRREFUT_PAT_ERROR_ID, rUNTIME_ERROR_ID,
nON_EXHAUSTIVE_GUARDS_ERROR_ID, nO_METHOD_BINDING_ERROR_ID,
pAT_ERROR_ID, eRROR_ID, rEC_SEL_ERROR_ID, aBSENT_ERROR_ID,
uNDEFINED_ID, tYPE_ERROR_ID, undefinedName
) where
#include "HsVersions.h"
import Id
import Var ( EvVar, setTyVarUnique )
import CoreSyn
import CoreUtils ( exprType, needsCaseBinding, bindNonRec )
import Literal
import HscTypes
import TysWiredIn
import PrelNames
import TcType ( mkSigmaTy )
import Type
import Coercion
import TysPrim
import DataCon ( DataCon, dataConWorkId )
import IdInfo ( vanillaIdInfo, setStrictnessInfo,
setArityInfo )
import Demand
import Name hiding ( varName )
import Outputable
import FastString
import UniqSupply
import BasicTypes
import Util
import Pair
import Constants
import DynFlags
import Data.Char ( ord )
import Data.List
import Data.Ord
#if __GLASGOW_HASKELL__ < 709
import Data.Word ( Word )
#endif
infixl 4 `mkCoreApp`, `mkCoreApps`
{-
************************************************************************
* *
\subsection{Basic CoreSyn construction}
* *
************************************************************************
-}
sortQuantVars :: [Var] -> [Var]
-- Sort the variables (KindVars, TypeVars, and Ids)
-- into order: Kind, then Type, then Id
sortQuantVars = sortBy (comparing withCategory)
where
withCategory v = (category v, v)
category :: Var -> Int
category v
| isKindVar v = 1
| isTyVar v = 2
| otherwise = 3
-- | Bind a binding group over an expression, using a @let@ or @case@ as
-- appropriate (see "CoreSyn#let_app_invariant")
mkCoreLet :: CoreBind -> CoreExpr -> CoreExpr
mkCoreLet (NonRec bndr rhs) body -- See Note [CoreSyn let/app invariant]
| needsCaseBinding (idType bndr) rhs
= Case rhs bndr (exprType body) [(DEFAULT,[],body)]
mkCoreLet bind body
= Let bind body
-- | Bind a list of binding groups over an expression. The leftmost binding
-- group becomes the outermost group in the resulting expression
mkCoreLets :: [CoreBind] -> CoreExpr -> CoreExpr
mkCoreLets binds body = foldr mkCoreLet body binds
-- | Construct an expression which represents the application of one expression
-- to the other
mkCoreApp :: CoreExpr -> CoreExpr -> CoreExpr
-- Respects the let/app invariant by building a case expression where necessary
-- See CoreSyn Note [CoreSyn let/app invariant]
mkCoreApp fun (Type ty) = App fun (Type ty)
mkCoreApp fun (Coercion co) = App fun (Coercion co)
mkCoreApp fun arg = ASSERT2( isFunTy fun_ty, ppr fun $$ ppr arg )
mk_val_app fun arg arg_ty res_ty
where
fun_ty = exprType fun
(arg_ty, res_ty) = splitFunTy fun_ty
-- | Construct an expression which represents the application of a number of
-- expressions to another. The leftmost expression in the list is applied first
-- Respects the let/app invariant by building a case expression where necessary
-- See CoreSyn Note [CoreSyn let/app invariant]
mkCoreApps :: CoreExpr -> [CoreExpr] -> CoreExpr
-- Slightly more efficient version of (foldl mkCoreApp)
mkCoreApps orig_fun orig_args
= go orig_fun (exprType orig_fun) orig_args
where
go fun _ [] = fun
go fun fun_ty (Type ty : args) = go (App fun (Type ty)) (applyTy fun_ty ty) args
go fun fun_ty (Coercion co : args) = go (App fun (Coercion co)) (applyCo fun_ty co) args
go fun fun_ty (arg : args) = ASSERT2( isFunTy fun_ty, ppr fun_ty $$ ppr orig_fun
$$ ppr orig_args )
go (mk_val_app fun arg arg_ty res_ty) res_ty args
where
(arg_ty, res_ty) = splitFunTy fun_ty
-- | Construct an expression which represents the application of a number of
-- expressions to that of a data constructor expression. The leftmost expression
-- in the list is applied first
mkCoreConApps :: DataCon -> [CoreExpr] -> CoreExpr
mkCoreConApps con args = mkCoreApps (Var (dataConWorkId con)) args
mk_val_app :: CoreExpr -> CoreExpr -> Type -> Type -> CoreExpr
-- Build an application (e1 e2),
-- or a strict binding (case e2 of x -> e1 x)
-- using the latter when necessary to respect the let/app invariant
-- See Note [CoreSyn let/app invariant]
mk_val_app fun arg arg_ty res_ty
| not (needsCaseBinding arg_ty arg)
= App fun arg -- The vastly common case
| otherwise
= Case arg arg_id res_ty [(DEFAULT,[],App fun (Var arg_id))]
where
arg_id = mkWildValBinder arg_ty
-- Lots of shadowing, but it doesn't matter,
-- because 'fun ' should not have a free wild-id
--
-- This is Dangerous. But this is the only place we play this
-- game, mk_val_app returns an expression that does not have
-- have a free wild-id. So the only thing that can go wrong
-- is if you take apart this case expression, and pass a
-- fragmet of it as the fun part of a 'mk_val_app'.
-----------
mkWildEvBinder :: PredType -> EvVar
mkWildEvBinder pred = mkWildValBinder pred
-- | Make a /wildcard binder/. This is typically used when you need a binder
-- that you expect to use only at a *binding* site. Do not use it at
-- occurrence sites because it has a single, fixed unique, and it's very
-- easy to get into difficulties with shadowing. That's why it is used so little.
-- See Note [WildCard binders] in SimplEnv
mkWildValBinder :: Type -> Id
mkWildValBinder ty = mkLocalId wildCardName ty
mkWildCase :: CoreExpr -> Type -> Type -> [CoreAlt] -> CoreExpr
-- Make a case expression whose case binder is unused
-- The alts should not have any occurrences of WildId
mkWildCase scrut scrut_ty res_ty alts
= Case scrut (mkWildValBinder scrut_ty) res_ty alts
mkIfThenElse :: CoreExpr -> CoreExpr -> CoreExpr -> CoreExpr
mkIfThenElse guard then_expr else_expr
-- Not going to be refining, so okay to take the type of the "then" clause
= mkWildCase guard boolTy (exprType then_expr)
[ (DataAlt falseDataCon, [], else_expr), -- Increasing order of tag!
(DataAlt trueDataCon, [], then_expr) ]
castBottomExpr :: CoreExpr -> Type -> CoreExpr
-- (castBottomExpr e ty), assuming that 'e' diverges,
-- return an expression of type 'ty'
-- See Note [Empty case alternatives] in CoreSyn
castBottomExpr e res_ty
| e_ty `eqType` res_ty = e
| otherwise = Case e (mkWildValBinder e_ty) res_ty []
where
e_ty = exprType e
{-
The functions from this point don't really do anything cleverer than
their counterparts in CoreSyn, but they are here for consistency
-}
-- | Create a lambda where the given expression has a number of variables
-- bound over it. The leftmost binder is that bound by the outermost
-- lambda in the result
mkCoreLams :: [CoreBndr] -> CoreExpr -> CoreExpr
mkCoreLams = mkLams
{-
************************************************************************
* *
\subsection{Making literals}
* *
************************************************************************
-}
-- | Create a 'CoreExpr' which will evaluate to the given @Int@
mkIntExpr :: DynFlags -> Integer -> CoreExpr -- Result = I# i :: Int
mkIntExpr dflags i = mkConApp intDataCon [mkIntLit dflags i]
-- | Create a 'CoreExpr' which will evaluate to the given @Int@
mkIntExprInt :: DynFlags -> Int -> CoreExpr -- Result = I# i :: Int
mkIntExprInt dflags i = mkConApp intDataCon [mkIntLitInt dflags i]
-- | Create a 'CoreExpr' which will evaluate to the a @Word@ with the given value
mkWordExpr :: DynFlags -> Integer -> CoreExpr
mkWordExpr dflags w = mkConApp wordDataCon [mkWordLit dflags w]
-- | Create a 'CoreExpr' which will evaluate to the given @Word@
mkWordExprWord :: DynFlags -> Word -> CoreExpr
mkWordExprWord dflags w = mkConApp wordDataCon [mkWordLitWord dflags w]
-- | Create a 'CoreExpr' which will evaluate to the given @Integer@
mkIntegerExpr :: MonadThings m => Integer -> m CoreExpr -- Result :: Integer
mkIntegerExpr i = do t <- lookupTyCon integerTyConName
return (Lit (mkLitInteger i (mkTyConTy t)))
-- | Create a 'CoreExpr' which will evaluate to the given @Float@
mkFloatExpr :: Float -> CoreExpr
mkFloatExpr f = mkConApp floatDataCon [mkFloatLitFloat f]
-- | Create a 'CoreExpr' which will evaluate to the given @Double@
mkDoubleExpr :: Double -> CoreExpr
mkDoubleExpr d = mkConApp doubleDataCon [mkDoubleLitDouble d]
-- | Create a 'CoreExpr' which will evaluate to the given @Char@
mkCharExpr :: Char -> CoreExpr -- Result = C# c :: Int
mkCharExpr c = mkConApp charDataCon [mkCharLit c]
-- | Create a 'CoreExpr' which will evaluate to the given @String@
mkStringExpr :: MonadThings m => String -> m CoreExpr -- Result :: String
-- | Create a 'CoreExpr' which will evaluate to a string morally equivalent to the given @FastString@
mkStringExprFS :: MonadThings m => FastString -> m CoreExpr -- Result :: String
mkStringExpr str = mkStringExprFS (mkFastString str)
mkStringExprFS str
| nullFS str
= return (mkNilExpr charTy)
| all safeChar chars
= do unpack_id <- lookupId unpackCStringName
return (App (Var unpack_id) (Lit (MachStr (fastStringToByteString str))))
| otherwise
= do unpack_id <- lookupId unpackCStringUtf8Name
return (App (Var unpack_id) (Lit (MachStr (fastStringToByteString str))))
where
chars = unpackFS str
safeChar c = ord c >= 1 && ord c <= 0x7F
-- This take a ~# b (or a ~# R b) and returns a ~ b (or Coercible a b)
mkEqBox :: Coercion -> CoreExpr
mkEqBox co = ASSERT2( typeKind ty2 `eqKind` k, ppr co $$ ppr ty1 $$ ppr ty2 $$ ppr (typeKind ty1) $$ ppr (typeKind ty2) )
Var (dataConWorkId datacon) `mkTyApps` [k, ty1, ty2] `App` Coercion co
where (Pair ty1 ty2, role) = coercionKindRole co
k = typeKind ty1
datacon = case role of
Nominal -> eqBoxDataCon
Representational -> coercibleDataCon
Phantom -> pprPanic "mkEqBox does not support boxing phantom coercions"
(ppr co)
{-
************************************************************************
* *
\subsection{Tuple constructors}
* *
************************************************************************
-}
-- $big_tuples
-- #big_tuples#
--
-- GHCs built in tuples can only go up to 'mAX_TUPLE_SIZE' in arity, but
-- we might concievably want to build such a massive tuple as part of the
-- output of a desugaring stage (notably that for list comprehensions).
--
-- We call tuples above this size \"big tuples\", and emulate them by
-- creating and pattern matching on >nested< tuples that are expressible
-- by GHC.
--
-- Nesting policy: it's better to have a 2-tuple of 10-tuples (3 objects)
-- than a 10-tuple of 2-tuples (11 objects), so we want the leaves of any
-- construction to be big.
--
-- If you just use the 'mkBigCoreTup', 'mkBigCoreVarTupTy', 'mkTupleSelector'
-- and 'mkTupleCase' functions to do all your work with tuples you should be
-- fine, and not have to worry about the arity limitation at all.
-- | Lifts a \"small\" constructor into a \"big\" constructor by recursive decompositon
mkChunkified :: ([a] -> a) -- ^ \"Small\" constructor function, of maximum input arity 'mAX_TUPLE_SIZE'
-> [a] -- ^ Possible \"big\" list of things to construct from
-> a -- ^ Constructed thing made possible by recursive decomposition
mkChunkified small_tuple as = mk_big_tuple (chunkify as)
where
-- Each sub-list is short enough to fit in a tuple
mk_big_tuple [as] = small_tuple as
mk_big_tuple as_s = mk_big_tuple (chunkify (map small_tuple as_s))
chunkify :: [a] -> [[a]]
-- ^ Split a list into lists that are small enough to have a corresponding
-- tuple arity. The sub-lists of the result all have length <= 'mAX_TUPLE_SIZE'
-- But there may be more than 'mAX_TUPLE_SIZE' sub-lists
chunkify xs
| n_xs <= mAX_TUPLE_SIZE = [xs]
| otherwise = split xs
where
n_xs = length xs
split [] = []
split xs = take mAX_TUPLE_SIZE xs : split (drop mAX_TUPLE_SIZE xs)
{-
Creating tuples and their types for Core expressions
@mkBigCoreVarTup@ builds a tuple; the inverse to @mkTupleSelector@.
* If it has only one element, it is the identity function.
* If there are more elements than a big tuple can have, it nests
the tuples.
-}
-- | Build a small tuple holding the specified variables
mkCoreVarTup :: [Id] -> CoreExpr
mkCoreVarTup ids = mkCoreTup (map Var ids)
-- | Bulid the type of a small tuple that holds the specified variables
mkCoreVarTupTy :: [Id] -> Type
mkCoreVarTupTy ids = mkBoxedTupleTy (map idType ids)
-- | Build a small tuple holding the specified expressions
mkCoreTup :: [CoreExpr] -> CoreExpr
mkCoreTup [] = Var unitDataConId
mkCoreTup [c] = c
mkCoreTup cs = mkConApp (tupleDataCon Boxed (length cs))
(map (Type . exprType) cs ++ cs)
-- | Build a big tuple holding the specified variables
mkBigCoreVarTup :: [Id] -> CoreExpr
mkBigCoreVarTup ids = mkBigCoreTup (map Var ids)
-- | Build the type of a big tuple that holds the specified variables
mkBigCoreVarTupTy :: [Id] -> Type
mkBigCoreVarTupTy ids = mkBigCoreTupTy (map idType ids)
-- | Build a big tuple holding the specified expressions
mkBigCoreTup :: [CoreExpr] -> CoreExpr
mkBigCoreTup = mkChunkified mkCoreTup
-- | Build the type of a big tuple that holds the specified type of thing
mkBigCoreTupTy :: [Type] -> Type
mkBigCoreTupTy = mkChunkified mkBoxedTupleTy
{-
************************************************************************
* *
Floats
* *
************************************************************************
-}
data FloatBind
= FloatLet CoreBind
| FloatCase CoreExpr Id AltCon [Var]
-- case e of y { C ys -> ... }
-- See Note [Floating cases] in SetLevels
instance Outputable FloatBind where
ppr (FloatLet b) = ptext (sLit "LET") <+> ppr b
ppr (FloatCase e b c bs) = hang (ptext (sLit "CASE") <+> ppr e <+> ptext (sLit "of") <+> ppr b)
2 (ppr c <+> ppr bs)
wrapFloat :: FloatBind -> CoreExpr -> CoreExpr
wrapFloat (FloatLet defns) body = Let defns body
wrapFloat (FloatCase e b con bs) body = Case e b (exprType body) [(con, bs, body)]
{-
************************************************************************
* *
\subsection{Tuple destructors}
* *
************************************************************************
-}
-- | Builds a selector which scrutises the given
-- expression and extracts the one name from the list given.
-- If you want the no-shadowing rule to apply, the caller
-- is responsible for making sure that none of these names
-- are in scope.
--
-- If there is just one 'Id' in the tuple, then the selector is
-- just the identity.
--
-- If necessary, we pattern match on a \"big\" tuple.
mkTupleSelector :: [Id] -- ^ The 'Id's to pattern match the tuple against
-> Id -- ^ The 'Id' to select
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr -- ^ Selector expression
-- mkTupleSelector [a,b,c,d] b v e
-- = case e of v {
-- (p,q) -> case p of p {
-- (a,b) -> b }}
-- We use 'tpl' vars for the p,q, since shadowing does not matter.
--
-- In fact, it's more convenient to generate it innermost first, getting
--
-- case (case e of v
-- (p,q) -> p) of p
-- (a,b) -> b
mkTupleSelector vars the_var scrut_var scrut
= mk_tup_sel (chunkify vars) the_var
where
mk_tup_sel [vars] the_var = mkSmallTupleSelector vars the_var scrut_var scrut
mk_tup_sel vars_s the_var = mkSmallTupleSelector group the_var tpl_v $
mk_tup_sel (chunkify tpl_vs) tpl_v
where
tpl_tys = [mkBoxedTupleTy (map idType gp) | gp <- vars_s]
tpl_vs = mkTemplateLocals tpl_tys
[(tpl_v, group)] = [(tpl,gp) | (tpl,gp) <- zipEqual "mkTupleSelector" tpl_vs vars_s,
the_var `elem` gp ]
-- | Like 'mkTupleSelector' but for tuples that are guaranteed
-- never to be \"big\".
--
-- > mkSmallTupleSelector [x] x v e = [| e |]
-- > mkSmallTupleSelector [x,y,z] x v e = [| case e of v { (x,y,z) -> x } |]
mkSmallTupleSelector :: [Id] -- The tuple args
-> Id -- The selected one
-> Id -- A variable of the same type as the scrutinee
-> CoreExpr -- Scrutinee
-> CoreExpr
mkSmallTupleSelector [var] should_be_the_same_var _ scrut
= ASSERT(var == should_be_the_same_var)
scrut
mkSmallTupleSelector vars the_var scrut_var scrut
= ASSERT( notNull vars )
Case scrut scrut_var (idType the_var)
[(DataAlt (tupleDataCon Boxed (length vars)), vars, Var the_var)]
-- | A generalization of 'mkTupleSelector', allowing the body
-- of the case to be an arbitrary expression.
--
-- To avoid shadowing, we use uniques to invent new variables.
--
-- If necessary we pattern match on a \"big\" tuple.
mkTupleCase :: UniqSupply -- ^ For inventing names of intermediate variables
-> [Id] -- ^ The tuple identifiers to pattern match on
-> CoreExpr -- ^ Body of the case
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr
-- ToDo: eliminate cases where none of the variables are needed.
--
-- mkTupleCase uniqs [a,b,c,d] body v e
-- = case e of v { (p,q) ->
-- case p of p { (a,b) ->
-- case q of q { (c,d) ->
-- body }}}
mkTupleCase uniqs vars body scrut_var scrut
= mk_tuple_case uniqs (chunkify vars) body
where
-- This is the case where don't need any nesting
mk_tuple_case _ [vars] body
= mkSmallTupleCase vars body scrut_var scrut
-- This is the case where we must make nest tuples at least once
mk_tuple_case us vars_s body
= let (us', vars', body') = foldr one_tuple_case (us, [], body) vars_s
in mk_tuple_case us' (chunkify vars') body'
one_tuple_case chunk_vars (us, vs, body)
= let (uniq, us') = takeUniqFromSupply us
scrut_var = mkSysLocal (fsLit "ds") uniq
(mkBoxedTupleTy (map idType chunk_vars))
body' = mkSmallTupleCase chunk_vars body scrut_var (Var scrut_var)
in (us', scrut_var:vs, body')
-- | As 'mkTupleCase', but for a tuple that is small enough to be guaranteed
-- not to need nesting.
mkSmallTupleCase
:: [Id] -- ^ The tuple args
-> CoreExpr -- ^ Body of the case
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr
mkSmallTupleCase [var] body _scrut_var scrut
= bindNonRec var scrut body
mkSmallTupleCase vars body scrut_var scrut
-- One branch no refinement?
= Case scrut scrut_var (exprType body)
[(DataAlt (tupleDataCon Boxed (length vars)), vars, body)]
{-
************************************************************************
* *
\subsection{Common list manipulation expressions}
* *
************************************************************************
Call the constructor Ids when building explicit lists, so that they
interact well with rules.
-}
-- | Makes a list @[]@ for lists of the specified type
mkNilExpr :: Type -> CoreExpr
mkNilExpr ty = mkConApp nilDataCon [Type ty]
-- | Makes a list @(:)@ for lists of the specified type
mkConsExpr :: Type -> CoreExpr -> CoreExpr -> CoreExpr
mkConsExpr ty hd tl = mkConApp consDataCon [Type ty, hd, tl]
-- | Make a list containing the given expressions, where the list has the given type
mkListExpr :: Type -> [CoreExpr] -> CoreExpr
mkListExpr ty xs = foldr (mkConsExpr ty) (mkNilExpr ty) xs
-- | Make a fully applied 'foldr' expression
mkFoldrExpr :: MonadThings m
=> Type -- ^ Element type of the list
-> Type -- ^ Fold result type
-> CoreExpr -- ^ "Cons" function expression for the fold
-> CoreExpr -- ^ "Nil" expression for the fold
-> CoreExpr -- ^ List expression being folded acress
-> m CoreExpr
mkFoldrExpr elt_ty result_ty c n list = do
foldr_id <- lookupId foldrName
return (Var foldr_id `App` Type elt_ty
`App` Type result_ty
`App` c
`App` n
`App` list)
-- | Make a 'build' expression applied to a locally-bound worker function
mkBuildExpr :: (MonadThings m, MonadUnique m)
=> Type -- ^ Type of list elements to be built
-> ((Id, Type) -> (Id, Type) -> m CoreExpr) -- ^ Function that, given information about the 'Id's
-- of the binders for the build worker function, returns
-- the body of that worker
-> m CoreExpr
mkBuildExpr elt_ty mk_build_inside = do
[n_tyvar] <- newTyVars [alphaTyVar]
let n_ty = mkTyVarTy n_tyvar
c_ty = mkFunTys [elt_ty, n_ty] n_ty
[c, n] <- sequence [mkSysLocalM (fsLit "c") c_ty, mkSysLocalM (fsLit "n") n_ty]
build_inside <- mk_build_inside (c, c_ty) (n, n_ty)
build_id <- lookupId buildName
return $ Var build_id `App` Type elt_ty `App` mkLams [n_tyvar, c, n] build_inside
where
newTyVars tyvar_tmpls = do
uniqs <- getUniquesM
return (zipWith setTyVarUnique tyvar_tmpls uniqs)
{-
************************************************************************
* *
Error expressions
* *
************************************************************************
-}
mkRuntimeErrorApp
:: Id -- Should be of type (forall a. Addr# -> a)
-- where Addr# points to a UTF8 encoded string
-> Type -- The type to instantiate 'a'
-> String -- The string to print
-> CoreExpr
mkRuntimeErrorApp err_id res_ty err_msg
= mkApps (Var err_id) [Type res_ty, err_string]
where
err_string = Lit (mkMachString err_msg)
mkImpossibleExpr :: Type -> CoreExpr
mkImpossibleExpr res_ty
= mkRuntimeErrorApp rUNTIME_ERROR_ID res_ty "Impossible case alternative"
{-
************************************************************************
* *
Error Ids
* *
************************************************************************
GHC randomly injects these into the code.
@patError@ is just a version of @error@ for pattern-matching
failures. It knows various ``codes'' which expand to longer
strings---this saves space!
@absentErr@ is a thing we put in for ``absent'' arguments. They jolly
well shouldn't be yanked on, but if one is, then you will get a
friendly message from @absentErr@ (rather than a totally random
crash).
@parError@ is a special version of @error@ which the compiler does
not know to be a bottoming Id. It is used in the @_par_@ and @_seq_@
templates, but we don't ever expect to generate code for it.
-}
errorIds :: [Id]
errorIds
= [ eRROR_ID, -- This one isn't used anywhere else in the compiler
-- But we still need it in wiredInIds so that when GHC
-- compiles a program that mentions 'error' we don't
-- import its type from the interface file; we just get
-- the Id defined here. Which has an 'open-tyvar' type.
uNDEFINED_ID, -- Ditto for 'undefined'. The big deal is to give it
-- an 'open-tyvar' type.
rUNTIME_ERROR_ID,
iRREFUT_PAT_ERROR_ID,
nON_EXHAUSTIVE_GUARDS_ERROR_ID,
nO_METHOD_BINDING_ERROR_ID,
pAT_ERROR_ID,
rEC_CON_ERROR_ID,
rEC_SEL_ERROR_ID,
aBSENT_ERROR_ID,
tYPE_ERROR_ID -- Used with Opt_DeferTypeErrors, see #10284
]
recSelErrorName, runtimeErrorName, absentErrorName :: Name
irrefutPatErrorName, recConErrorName, patErrorName :: Name
nonExhaustiveGuardsErrorName, noMethodBindingErrorName :: Name
typeErrorName :: Name
recSelErrorName = err_nm "recSelError" recSelErrorIdKey rEC_SEL_ERROR_ID
absentErrorName = err_nm "absentError" absentErrorIdKey aBSENT_ERROR_ID
runtimeErrorName = err_nm "runtimeError" runtimeErrorIdKey rUNTIME_ERROR_ID
irrefutPatErrorName = err_nm "irrefutPatError" irrefutPatErrorIdKey iRREFUT_PAT_ERROR_ID
recConErrorName = err_nm "recConError" recConErrorIdKey rEC_CON_ERROR_ID
patErrorName = err_nm "patError" patErrorIdKey pAT_ERROR_ID
typeErrorName = err_nm "typeError" typeErrorIdKey tYPE_ERROR_ID
noMethodBindingErrorName = err_nm "noMethodBindingError"
noMethodBindingErrorIdKey nO_METHOD_BINDING_ERROR_ID
nonExhaustiveGuardsErrorName = err_nm "nonExhaustiveGuardsError"
nonExhaustiveGuardsErrorIdKey nON_EXHAUSTIVE_GUARDS_ERROR_ID
err_nm :: String -> Unique -> Id -> Name
err_nm str uniq id = mkWiredInIdName cONTROL_EXCEPTION_BASE (fsLit str) uniq id
rEC_SEL_ERROR_ID, rUNTIME_ERROR_ID, iRREFUT_PAT_ERROR_ID, rEC_CON_ERROR_ID :: Id
pAT_ERROR_ID, nO_METHOD_BINDING_ERROR_ID, nON_EXHAUSTIVE_GUARDS_ERROR_ID :: Id
tYPE_ERROR_ID :: Id
aBSENT_ERROR_ID :: Id
rEC_SEL_ERROR_ID = mkRuntimeErrorId recSelErrorName
rUNTIME_ERROR_ID = mkRuntimeErrorId runtimeErrorName
iRREFUT_PAT_ERROR_ID = mkRuntimeErrorId irrefutPatErrorName
rEC_CON_ERROR_ID = mkRuntimeErrorId recConErrorName
pAT_ERROR_ID = mkRuntimeErrorId patErrorName
nO_METHOD_BINDING_ERROR_ID = mkRuntimeErrorId noMethodBindingErrorName
nON_EXHAUSTIVE_GUARDS_ERROR_ID = mkRuntimeErrorId nonExhaustiveGuardsErrorName
aBSENT_ERROR_ID = mkRuntimeErrorId absentErrorName
tYPE_ERROR_ID = mkRuntimeErrorId typeErrorName
mkRuntimeErrorId :: Name -> Id
mkRuntimeErrorId name = pc_bottoming_Id1 name runtimeErrorTy
runtimeErrorTy :: Type
-- The runtime error Ids take a UTF8-encoded string as argument
runtimeErrorTy = mkSigmaTy [openAlphaTyVar] [] (mkFunTy addrPrimTy openAlphaTy)
errorName :: Name
errorName = mkWiredInIdName gHC_ERR (fsLit "error") errorIdKey eRROR_ID
eRROR_ID :: Id
eRROR_ID = pc_bottoming_Id1 errorName errorTy
errorTy :: Type -- See Note [Error and friends have an "open-tyvar" forall]
errorTy = mkSigmaTy [openAlphaTyVar] [] (mkFunTys [mkListTy charTy] openAlphaTy)
undefinedName :: Name
undefinedName = mkWiredInIdName gHC_ERR (fsLit "undefined") undefinedKey uNDEFINED_ID
uNDEFINED_ID :: Id
uNDEFINED_ID = pc_bottoming_Id0 undefinedName undefinedTy
undefinedTy :: Type -- See Note [Error and friends have an "open-tyvar" forall]
undefinedTy = mkSigmaTy [openAlphaTyVar] [] openAlphaTy
{-
Note [Error and friends have an "open-tyvar" forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'error' and 'undefined' have types
error :: forall (a::OpenKind). String -> a
undefined :: forall (a::OpenKind). a
Notice the 'OpenKind' (manifested as openAlphaTyVar in the code). This ensures that
"error" can be instantiated at
* unboxed as well as boxed types
* polymorphic types
This is OK because it never returns, so the return type is irrelevant.
See Note [OpenTypeKind accepts foralls] in TcUnify.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
-}
pc_bottoming_Id1 :: Name -> Type -> Id
-- Function of arity 1, which diverges after being given one argument
pc_bottoming_Id1 name ty
= mkVanillaGlobalWithInfo name ty bottoming_info
where
bottoming_info = vanillaIdInfo `setStrictnessInfo` strict_sig
`setArityInfo` 1
-- Make arity and strictness agree
-- Do *not* mark them as NoCafRefs, because they can indeed have
-- CAF refs. For example, pAT_ERROR_ID calls GHC.Err.untangle,
-- which has some CAFs
-- In due course we may arrange that these error-y things are
-- regarded by the GC as permanently live, in which case we
-- can give them NoCaf info. As it is, any function that calls
-- any pc_bottoming_Id will itself have CafRefs, which bloats
-- SRTs.
strict_sig = mkClosedStrictSig [evalDmd] botRes
-- These "bottom" out, no matter what their arguments
pc_bottoming_Id0 :: Name -> Type -> Id
-- Same but arity zero
pc_bottoming_Id0 name ty
= mkVanillaGlobalWithInfo name ty bottoming_info
where
bottoming_info = vanillaIdInfo `setStrictnessInfo` strict_sig
strict_sig = mkClosedStrictSig [] botRes
| TomMD/ghc | compiler/coreSyn/MkCore.hs | bsd-3-clause | 31,850 | 0 | 16 | 8,721 | 4,846 | 2,639 | 2,207 | 371 | 4 |
{-# LANGUAGE CPP, GADTs #-}
-----------------------------------------------------------------------------
--
-- Pretty-printing of Cmm as C, suitable for feeding gcc
--
-- (c) The University of Glasgow 2004-2006
--
-- Print Cmm as real C, for -fvia-C
--
-- See wiki:Commentary/Compiler/Backends/PprC
--
-- This is simpler than the old PprAbsC, because Cmm is "macro-expanded"
-- relative to the old AbstractC, and many oddities/decorations have
-- disappeared from the data type.
--
-- This code generator is only supported in unregisterised mode.
--
-----------------------------------------------------------------------------
module PprC (
writeCs,
pprStringInCStyle
) where
#include "HsVersions.h"
-- Cmm stuff
import BlockId
import CLabel
import ForeignCall
import Cmm hiding (pprBBlock)
import PprCmm ()
import Hoopl
import CmmUtils
import CmmSwitch
-- Utils
import CPrim
import DynFlags
import FastString
import Outputable
import Platform
import UniqSet
import Unique
import Util
-- The rest
import Control.Monad.ST
import Data.Bits
import Data.Char
import Data.List
import Data.Map (Map)
import Data.Word
import System.IO
import qualified Data.Map as Map
import Control.Monad (liftM, ap)
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative (Applicative(..))
#endif
import qualified Data.Array.Unsafe as U ( castSTUArray )
import Data.Array.ST
-- --------------------------------------------------------------------------
-- Top level
pprCs :: DynFlags -> [RawCmmGroup] -> SDoc
pprCs dflags cmms
= pprCode CStyle (vcat $ map (\c -> split_marker $$ pprC c) cmms)
where
split_marker
| gopt Opt_SplitObjs dflags = ptext (sLit "__STG_SPLIT_MARKER")
| otherwise = empty
writeCs :: DynFlags -> Handle -> [RawCmmGroup] -> IO ()
writeCs dflags handle cmms
= printForC dflags handle (pprCs dflags cmms)
-- --------------------------------------------------------------------------
-- Now do some real work
--
-- for fun, we could call cmmToCmm over the tops...
--
pprC :: RawCmmGroup -> SDoc
pprC tops = vcat $ intersperse blankLine $ map pprTop tops
--
-- top level procs
--
pprTop :: RawCmmDecl -> SDoc
pprTop (CmmProc infos clbl _ graph) =
(case mapLookup (g_entry graph) infos of
Nothing -> empty
Just (Statics info_clbl info_dat) -> pprDataExterns info_dat $$
pprWordArray info_clbl info_dat) $$
(vcat [
blankLine,
extern_decls,
(if (externallyVisibleCLabel clbl)
then mkFN_ else mkIF_) (ppr clbl) <+> lbrace,
nest 8 temp_decls,
vcat (map pprBBlock blocks),
rbrace ]
)
where
blocks = toBlockListEntryFirst graph
(temp_decls, extern_decls) = pprTempAndExternDecls blocks
-- Chunks of static data.
-- We only handle (a) arrays of word-sized things and (b) strings.
pprTop (CmmData _section (Statics lbl [CmmString str])) =
hcat [
pprLocalness lbl, ptext (sLit "char "), ppr lbl,
ptext (sLit "[] = "), pprStringInCStyle str, semi
]
pprTop (CmmData _section (Statics lbl [CmmUninitialised size])) =
hcat [
pprLocalness lbl, ptext (sLit "char "), ppr lbl,
brackets (int size), semi
]
pprTop (CmmData _section (Statics lbl lits)) =
pprDataExterns lits $$
pprWordArray lbl lits
-- --------------------------------------------------------------------------
-- BasicBlocks are self-contained entities: they always end in a jump.
--
-- Like nativeGen/AsmCodeGen, we could probably reorder blocks to turn
-- as many jumps as possible into fall throughs.
--
pprBBlock :: CmmBlock -> SDoc
pprBBlock block =
nest 4 (pprBlockId (entryLabel block) <> colon) $$
nest 8 (vcat (map pprStmt (blockToList nodes)) $$ pprStmt last)
where
(_, nodes, last) = blockSplit block
-- --------------------------------------------------------------------------
-- Info tables. Just arrays of words.
-- See codeGen/ClosureInfo, and nativeGen/PprMach
pprWordArray :: CLabel -> [CmmStatic] -> SDoc
pprWordArray lbl ds
= sdocWithDynFlags $ \dflags ->
hcat [ pprLocalness lbl, ptext (sLit "StgWord")
, space, ppr lbl, ptext (sLit "[] = {") ]
$$ nest 8 (commafy (pprStatics dflags ds))
$$ ptext (sLit "};")
--
-- has to be static, if it isn't globally visible
--
pprLocalness :: CLabel -> SDoc
pprLocalness lbl | not $ externallyVisibleCLabel lbl = ptext (sLit "static ")
| otherwise = empty
-- --------------------------------------------------------------------------
-- Statements.
--
pprStmt :: CmmNode e x -> SDoc
pprStmt stmt =
sdocWithDynFlags $ \dflags ->
case stmt of
CmmEntry{} -> empty
CmmComment _ -> empty -- (hang (ptext (sLit "/*")) 3 (ftext s)) $$ ptext (sLit "*/")
-- XXX if the string contains "*/", we need to fix it
-- XXX we probably want to emit these comments when
-- some debugging option is on. They can get quite
-- large.
CmmTick _ -> empty
CmmUnwind{} -> empty
CmmAssign dest src -> pprAssign dflags dest src
CmmStore dest src
| typeWidth rep == W64 && wordWidth dflags /= W64
-> (if isFloatType rep then ptext (sLit "ASSIGN_DBL")
else ptext (sLit ("ASSIGN_Word64"))) <>
parens (mkP_ <> pprExpr1 dest <> comma <> pprExpr src) <> semi
| otherwise
-> hsep [ pprExpr (CmmLoad dest rep), equals, pprExpr src <> semi ]
where
rep = cmmExprType dflags src
CmmUnsafeForeignCall target@(ForeignTarget fn conv) results args ->
fnCall
where
(res_hints, arg_hints) = foreignTargetHints target
hresults = zip results res_hints
hargs = zip args arg_hints
ForeignConvention cconv _ _ ret = conv
cast_fn = parens (cCast (pprCFunType (char '*') cconv hresults hargs) fn)
-- See wiki:Commentary/Compiler/Backends/PprC#Prototypes
fnCall =
case fn of
CmmLit (CmmLabel lbl)
| StdCallConv <- cconv ->
pprCall (ppr lbl) cconv hresults hargs
-- stdcall functions must be declared with
-- a function type, otherwise the C compiler
-- doesn't add the @n suffix to the label. We
-- can't add the @n suffix ourselves, because
-- it isn't valid C.
| CmmNeverReturns <- ret ->
pprCall cast_fn cconv hresults hargs <> semi
| not (isMathFun lbl) ->
pprForeignCall (ppr lbl) cconv hresults hargs
_ ->
pprCall cast_fn cconv hresults hargs <> semi
-- for a dynamic call, no declaration is necessary.
CmmUnsafeForeignCall (PrimTarget MO_Touch) _results _args -> empty
CmmUnsafeForeignCall (PrimTarget (MO_Prefetch_Data _)) _results _args -> empty
CmmUnsafeForeignCall target@(PrimTarget op) results args ->
fn_call
where
cconv = CCallConv
fn = pprCallishMachOp_for_C op
(res_hints, arg_hints) = foreignTargetHints target
hresults = zip results res_hints
hargs = zip args arg_hints
fn_call
-- The mem primops carry an extra alignment arg.
-- We could maybe emit an alignment directive using this info.
-- We also need to cast mem primops to prevent conflicts with GCC
-- builtins (see bug #5967).
| Just _align <- machOpMemcpyishAlign op
= (ptext (sLit ";EF_(") <> fn <> char ')' <> semi) $$
pprForeignCall fn cconv hresults hargs
| otherwise
= pprCall fn cconv hresults hargs
CmmBranch ident -> pprBranch ident
CmmCondBranch expr yes no _ -> pprCondBranch expr yes no
CmmCall { cml_target = expr } -> mkJMP_ (pprExpr expr) <> semi
CmmSwitch arg ids -> sdocWithDynFlags $ \dflags ->
pprSwitch dflags arg ids
_other -> pprPanic "PprC.pprStmt" (ppr stmt)
type Hinted a = (a, ForeignHint)
pprForeignCall :: SDoc -> CCallConv -> [Hinted CmmFormal] -> [Hinted CmmActual]
-> SDoc
pprForeignCall fn cconv results args = fn_call
where
fn_call = braces (
pprCFunType (char '*' <> text "ghcFunPtr") cconv results args <> semi
$$ text "ghcFunPtr" <+> equals <+> cast_fn <> semi
$$ pprCall (text "ghcFunPtr") cconv results args <> semi
)
cast_fn = parens (parens (pprCFunType (char '*') cconv results args) <> fn)
pprCFunType :: SDoc -> CCallConv -> [Hinted CmmFormal] -> [Hinted CmmActual] -> SDoc
pprCFunType ppr_fn cconv ress args
= sdocWithDynFlags $ \dflags ->
let res_type [] = ptext (sLit "void")
res_type [(one, hint)] = machRepHintCType (localRegType one) hint
res_type _ = panic "pprCFunType: only void or 1 return value supported"
arg_type (expr, hint) = machRepHintCType (cmmExprType dflags expr) hint
in res_type ress <+>
parens (ccallConvAttribute cconv <> ppr_fn) <>
parens (commafy (map arg_type args))
-- ---------------------------------------------------------------------
-- unconditional branches
pprBranch :: BlockId -> SDoc
pprBranch ident = ptext (sLit "goto") <+> pprBlockId ident <> semi
-- ---------------------------------------------------------------------
-- conditional branches to local labels
pprCondBranch :: CmmExpr -> BlockId -> BlockId -> SDoc
pprCondBranch expr yes no
= hsep [ ptext (sLit "if") , parens(pprExpr expr) ,
ptext (sLit "goto"), pprBlockId yes <> semi,
ptext (sLit "else goto"), pprBlockId no <> semi ]
-- ---------------------------------------------------------------------
-- a local table branch
--
-- we find the fall-through cases
--
pprSwitch :: DynFlags -> CmmExpr -> SwitchTargets -> SDoc
pprSwitch dflags e ids
= (hang (ptext (sLit "switch") <+> parens ( pprExpr e ) <+> lbrace)
4 (vcat ( map caseify pairs ) $$ def)) $$ rbrace
where
(pairs, mbdef) = switchTargetsFallThrough ids
-- fall through case
caseify (ix:ixs, ident) = vcat (map do_fallthrough ixs) $$ final_branch ix
where
do_fallthrough ix =
hsep [ ptext (sLit "case") , pprHexVal ix (wordWidth dflags) <> colon ,
ptext (sLit "/* fall through */") ]
final_branch ix =
hsep [ ptext (sLit "case") , pprHexVal ix (wordWidth dflags) <> colon ,
ptext (sLit "goto") , (pprBlockId ident) <> semi ]
caseify (_ , _ ) = panic "pprSwitch: switch with no cases!"
def | Just l <- mbdef = ptext (sLit "default: goto") <+> pprBlockId l <> semi
| otherwise = empty
-- ---------------------------------------------------------------------
-- Expressions.
--
-- C Types: the invariant is that the C expression generated by
--
-- pprExpr e
--
-- has a type in C which is also given by
--
-- machRepCType (cmmExprType e)
--
-- (similar invariants apply to the rest of the pretty printer).
pprExpr :: CmmExpr -> SDoc
pprExpr e = case e of
CmmLit lit -> pprLit lit
CmmLoad e ty -> sdocWithDynFlags $ \dflags -> pprLoad dflags e ty
CmmReg reg -> pprCastReg reg
CmmRegOff reg 0 -> pprCastReg reg
CmmRegOff reg i
| i < 0 && negate_ok -> pprRegOff (char '-') (-i)
| otherwise -> pprRegOff (char '+') i
where
pprRegOff op i' = pprCastReg reg <> op <> int i'
negate_ok = negate (fromIntegral i :: Integer) <
fromIntegral (maxBound::Int)
-- overflow is undefined; see #7620
CmmMachOp mop args -> pprMachOpApp mop args
CmmStackSlot _ _ -> panic "pprExpr: CmmStackSlot not supported!"
pprLoad :: DynFlags -> CmmExpr -> CmmType -> SDoc
pprLoad dflags e ty
| width == W64, wordWidth dflags /= W64
= (if isFloatType ty then ptext (sLit "PK_DBL")
else ptext (sLit "PK_Word64"))
<> parens (mkP_ <> pprExpr1 e)
| otherwise
= case e of
CmmReg r | isPtrReg r && width == wordWidth dflags && not (isFloatType ty)
-> char '*' <> pprAsPtrReg r
CmmRegOff r 0 | isPtrReg r && width == wordWidth dflags && not (isFloatType ty)
-> char '*' <> pprAsPtrReg r
CmmRegOff r off | isPtrReg r && width == wordWidth dflags
, off `rem` wORD_SIZE dflags == 0 && not (isFloatType ty)
-- ToDo: check that the offset is a word multiple?
-- (For tagging to work, I had to avoid unaligned loads. --ARY)
-> pprAsPtrReg r <> brackets (ppr (off `shiftR` wordShift dflags))
_other -> cLoad e ty
where
width = typeWidth ty
pprExpr1 :: CmmExpr -> SDoc
pprExpr1 (CmmLit lit) = pprLit1 lit
pprExpr1 e@(CmmReg _reg) = pprExpr e
pprExpr1 other = parens (pprExpr other)
-- --------------------------------------------------------------------------
-- MachOp applications
pprMachOpApp :: MachOp -> [CmmExpr] -> SDoc
pprMachOpApp op args
| isMulMayOfloOp op
= ptext (sLit "mulIntMayOflo") <> parens (commafy (map pprExpr args))
where isMulMayOfloOp (MO_U_MulMayOflo _) = True
isMulMayOfloOp (MO_S_MulMayOflo _) = True
isMulMayOfloOp _ = False
pprMachOpApp mop args
| Just ty <- machOpNeedsCast mop
= ty <> parens (pprMachOpApp' mop args)
| otherwise
= pprMachOpApp' mop args
-- Comparisons in C have type 'int', but we want type W_ (this is what
-- resultRepOfMachOp says). The other C operations inherit their type
-- from their operands, so no casting is required.
machOpNeedsCast :: MachOp -> Maybe SDoc
machOpNeedsCast mop
| isComparisonMachOp mop = Just mkW_
| otherwise = Nothing
pprMachOpApp' :: MachOp -> [CmmExpr] -> SDoc
pprMachOpApp' mop args
= case args of
-- dyadic
[x,y] -> pprArg x <+> pprMachOp_for_C mop <+> pprArg y
-- unary
[x] -> pprMachOp_for_C mop <> parens (pprArg x)
_ -> panic "PprC.pprMachOp : machop with wrong number of args"
where
-- Cast needed for signed integer ops
pprArg e | signedOp mop = sdocWithDynFlags $ \dflags ->
cCast (machRep_S_CType (typeWidth (cmmExprType dflags e))) e
| needsFCasts mop = sdocWithDynFlags $ \dflags ->
cCast (machRep_F_CType (typeWidth (cmmExprType dflags e))) e
| otherwise = pprExpr1 e
needsFCasts (MO_F_Eq _) = False
needsFCasts (MO_F_Ne _) = False
needsFCasts (MO_F_Neg _) = True
needsFCasts (MO_F_Quot _) = True
needsFCasts mop = floatComparison mop
-- --------------------------------------------------------------------------
-- Literals
pprLit :: CmmLit -> SDoc
pprLit lit = case lit of
CmmInt i rep -> pprHexVal i rep
CmmFloat f w -> parens (machRep_F_CType w) <> str
where d = fromRational f :: Double
str | isInfinite d && d < 0 = ptext (sLit "-INFINITY")
| isInfinite d = ptext (sLit "INFINITY")
| isNaN d = ptext (sLit "NAN")
| otherwise = text (show d)
-- these constants come from <math.h>
-- see #1861
CmmVec {} -> panic "PprC printing vector literal"
CmmBlock bid -> mkW_ <> pprCLabelAddr (infoTblLbl bid)
CmmHighStackMark -> panic "PprC printing high stack mark"
CmmLabel clbl -> mkW_ <> pprCLabelAddr clbl
CmmLabelOff clbl i -> mkW_ <> pprCLabelAddr clbl <> char '+' <> int i
CmmLabelDiffOff clbl1 _ i
-- WARNING:
-- * the lit must occur in the info table clbl2
-- * clbl1 must be an SRT, a slow entry point or a large bitmap
-> mkW_ <> pprCLabelAddr clbl1 <> char '+' <> int i
where
pprCLabelAddr lbl = char '&' <> ppr lbl
pprLit1 :: CmmLit -> SDoc
pprLit1 lit@(CmmLabelOff _ _) = parens (pprLit lit)
pprLit1 lit@(CmmLabelDiffOff _ _ _) = parens (pprLit lit)
pprLit1 lit@(CmmFloat _ _) = parens (pprLit lit)
pprLit1 other = pprLit other
-- ---------------------------------------------------------------------------
-- Static data
pprStatics :: DynFlags -> [CmmStatic] -> [SDoc]
pprStatics _ [] = []
pprStatics dflags (CmmStaticLit (CmmFloat f W32) : rest)
-- floats are padded to a word, see #1852
| wORD_SIZE dflags == 8, CmmStaticLit (CmmInt 0 W32) : rest' <- rest
= pprLit1 (floatToWord dflags f) : pprStatics dflags rest'
| wORD_SIZE dflags == 4
= pprLit1 (floatToWord dflags f) : pprStatics dflags rest
| otherwise
= pprPanic "pprStatics: float" (vcat (map ppr' rest))
where ppr' (CmmStaticLit l) = sdocWithDynFlags $ \dflags ->
ppr (cmmLitType dflags l)
ppr' _other = ptext (sLit "bad static!")
pprStatics dflags (CmmStaticLit (CmmFloat f W64) : rest)
= map pprLit1 (doubleToWords dflags f) ++ pprStatics dflags rest
pprStatics dflags (CmmStaticLit (CmmInt i W64) : rest)
| wordWidth dflags == W32
= if wORDS_BIGENDIAN dflags
then pprStatics dflags (CmmStaticLit (CmmInt q W32) :
CmmStaticLit (CmmInt r W32) : rest)
else pprStatics dflags (CmmStaticLit (CmmInt r W32) :
CmmStaticLit (CmmInt q W32) : rest)
where r = i .&. 0xffffffff
q = i `shiftR` 32
pprStatics dflags (CmmStaticLit (CmmInt _ w) : _)
| w /= wordWidth dflags
= panic "pprStatics: cannot emit a non-word-sized static literal"
pprStatics dflags (CmmStaticLit lit : rest)
= pprLit1 lit : pprStatics dflags rest
pprStatics _ (other : _)
= pprPanic "pprWord" (pprStatic other)
pprStatic :: CmmStatic -> SDoc
pprStatic s = case s of
CmmStaticLit lit -> nest 4 (pprLit lit)
CmmUninitialised i -> nest 4 (mkC_ <> brackets (int i))
-- these should be inlined, like the old .hc
CmmString s' -> nest 4 (mkW_ <> parens(pprStringInCStyle s'))
-- ---------------------------------------------------------------------------
-- Block Ids
pprBlockId :: BlockId -> SDoc
pprBlockId b = char '_' <> ppr (getUnique b)
-- --------------------------------------------------------------------------
-- Print a MachOp in a way suitable for emitting via C.
--
pprMachOp_for_C :: MachOp -> SDoc
pprMachOp_for_C mop = case mop of
-- Integer operations
MO_Add _ -> char '+'
MO_Sub _ -> char '-'
MO_Eq _ -> ptext (sLit "==")
MO_Ne _ -> ptext (sLit "!=")
MO_Mul _ -> char '*'
MO_S_Quot _ -> char '/'
MO_S_Rem _ -> char '%'
MO_S_Neg _ -> char '-'
MO_U_Quot _ -> char '/'
MO_U_Rem _ -> char '%'
-- & Floating-point operations
MO_F_Add _ -> char '+'
MO_F_Sub _ -> char '-'
MO_F_Neg _ -> char '-'
MO_F_Mul _ -> char '*'
MO_F_Quot _ -> char '/'
-- Signed comparisons
MO_S_Ge _ -> ptext (sLit ">=")
MO_S_Le _ -> ptext (sLit "<=")
MO_S_Gt _ -> char '>'
MO_S_Lt _ -> char '<'
-- & Unsigned comparisons
MO_U_Ge _ -> ptext (sLit ">=")
MO_U_Le _ -> ptext (sLit "<=")
MO_U_Gt _ -> char '>'
MO_U_Lt _ -> char '<'
-- & Floating-point comparisons
MO_F_Eq _ -> ptext (sLit "==")
MO_F_Ne _ -> ptext (sLit "!=")
MO_F_Ge _ -> ptext (sLit ">=")
MO_F_Le _ -> ptext (sLit "<=")
MO_F_Gt _ -> char '>'
MO_F_Lt _ -> char '<'
-- Bitwise operations. Not all of these may be supported at all
-- sizes, and only integral MachReps are valid.
MO_And _ -> char '&'
MO_Or _ -> char '|'
MO_Xor _ -> char '^'
MO_Not _ -> char '~'
MO_Shl _ -> ptext (sLit "<<")
MO_U_Shr _ -> ptext (sLit ">>") -- unsigned shift right
MO_S_Shr _ -> ptext (sLit ">>") -- signed shift right
-- Conversions. Some of these will be NOPs, but never those that convert
-- between ints and floats.
-- Floating-point conversions use the signed variant.
-- We won't know to generate (void*) casts here, but maybe from
-- context elsewhere
-- noop casts
MO_UU_Conv from to | from == to -> empty
MO_UU_Conv _from to -> parens (machRep_U_CType to)
MO_SS_Conv from to | from == to -> empty
MO_SS_Conv _from to -> parens (machRep_S_CType to)
MO_FF_Conv from to | from == to -> empty
MO_FF_Conv _from to -> parens (machRep_F_CType to)
MO_SF_Conv _from to -> parens (machRep_F_CType to)
MO_FS_Conv _from to -> parens (machRep_S_CType to)
MO_S_MulMayOflo _ -> pprTrace "offending mop:"
(ptext $ sLit "MO_S_MulMayOflo")
(panic $ "PprC.pprMachOp_for_C: MO_S_MulMayOflo"
++ " should have been handled earlier!")
MO_U_MulMayOflo _ -> pprTrace "offending mop:"
(ptext $ sLit "MO_U_MulMayOflo")
(panic $ "PprC.pprMachOp_for_C: MO_U_MulMayOflo"
++ " should have been handled earlier!")
MO_V_Insert {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Insert")
(panic $ "PprC.pprMachOp_for_C: MO_V_Insert"
++ " should have been handled earlier!")
MO_V_Extract {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Extract")
(panic $ "PprC.pprMachOp_for_C: MO_V_Extract"
++ " should have been handled earlier!")
MO_V_Add {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Add")
(panic $ "PprC.pprMachOp_for_C: MO_V_Add"
++ " should have been handled earlier!")
MO_V_Sub {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Sub")
(panic $ "PprC.pprMachOp_for_C: MO_V_Sub"
++ " should have been handled earlier!")
MO_V_Mul {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_V_Mul")
(panic $ "PprC.pprMachOp_for_C: MO_V_Mul"
++ " should have been handled earlier!")
MO_VS_Quot {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VS_Quot")
(panic $ "PprC.pprMachOp_for_C: MO_VS_Quot"
++ " should have been handled earlier!")
MO_VS_Rem {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VS_Rem")
(panic $ "PprC.pprMachOp_for_C: MO_VS_Rem"
++ " should have been handled earlier!")
MO_VS_Neg {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VS_Neg")
(panic $ "PprC.pprMachOp_for_C: MO_VS_Neg"
++ " should have been handled earlier!")
MO_VU_Quot {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VU_Quot")
(panic $ "PprC.pprMachOp_for_C: MO_VU_Quot"
++ " should have been handled earlier!")
MO_VU_Rem {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VU_Rem")
(panic $ "PprC.pprMachOp_for_C: MO_VU_Rem"
++ " should have been handled earlier!")
MO_VF_Insert {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Insert")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Insert"
++ " should have been handled earlier!")
MO_VF_Extract {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Extract")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Extract"
++ " should have been handled earlier!")
MO_VF_Add {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Add")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Add"
++ " should have been handled earlier!")
MO_VF_Sub {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Sub")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Sub"
++ " should have been handled earlier!")
MO_VF_Neg {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Neg")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Neg"
++ " should have been handled earlier!")
MO_VF_Mul {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Mul")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Mul"
++ " should have been handled earlier!")
MO_VF_Quot {} -> pprTrace "offending mop:"
(ptext $ sLit "MO_VF_Quot")
(panic $ "PprC.pprMachOp_for_C: MO_VF_Quot"
++ " should have been handled earlier!")
signedOp :: MachOp -> Bool -- Argument type(s) are signed ints
signedOp (MO_S_Quot _) = True
signedOp (MO_S_Rem _) = True
signedOp (MO_S_Neg _) = True
signedOp (MO_S_Ge _) = True
signedOp (MO_S_Le _) = True
signedOp (MO_S_Gt _) = True
signedOp (MO_S_Lt _) = True
signedOp (MO_S_Shr _) = True
signedOp (MO_SS_Conv _ _) = True
signedOp (MO_SF_Conv _ _) = True
signedOp _ = False
floatComparison :: MachOp -> Bool -- comparison between float args
floatComparison (MO_F_Eq _) = True
floatComparison (MO_F_Ne _) = True
floatComparison (MO_F_Ge _) = True
floatComparison (MO_F_Le _) = True
floatComparison (MO_F_Gt _) = True
floatComparison (MO_F_Lt _) = True
floatComparison _ = False
-- ---------------------------------------------------------------------
-- tend to be implemented by foreign calls
pprCallishMachOp_for_C :: CallishMachOp -> SDoc
pprCallishMachOp_for_C mop
= case mop of
MO_F64_Pwr -> ptext (sLit "pow")
MO_F64_Sin -> ptext (sLit "sin")
MO_F64_Cos -> ptext (sLit "cos")
MO_F64_Tan -> ptext (sLit "tan")
MO_F64_Sinh -> ptext (sLit "sinh")
MO_F64_Cosh -> ptext (sLit "cosh")
MO_F64_Tanh -> ptext (sLit "tanh")
MO_F64_Asin -> ptext (sLit "asin")
MO_F64_Acos -> ptext (sLit "acos")
MO_F64_Atan -> ptext (sLit "atan")
MO_F64_Log -> ptext (sLit "log")
MO_F64_Exp -> ptext (sLit "exp")
MO_F64_Sqrt -> ptext (sLit "sqrt")
MO_F32_Pwr -> ptext (sLit "powf")
MO_F32_Sin -> ptext (sLit "sinf")
MO_F32_Cos -> ptext (sLit "cosf")
MO_F32_Tan -> ptext (sLit "tanf")
MO_F32_Sinh -> ptext (sLit "sinhf")
MO_F32_Cosh -> ptext (sLit "coshf")
MO_F32_Tanh -> ptext (sLit "tanhf")
MO_F32_Asin -> ptext (sLit "asinf")
MO_F32_Acos -> ptext (sLit "acosf")
MO_F32_Atan -> ptext (sLit "atanf")
MO_F32_Log -> ptext (sLit "logf")
MO_F32_Exp -> ptext (sLit "expf")
MO_F32_Sqrt -> ptext (sLit "sqrtf")
MO_WriteBarrier -> ptext (sLit "write_barrier")
MO_Memcpy _ -> ptext (sLit "memcpy")
MO_Memset _ -> ptext (sLit "memset")
MO_Memmove _ -> ptext (sLit "memmove")
(MO_BSwap w) -> ptext (sLit $ bSwapLabel w)
(MO_PopCnt w) -> ptext (sLit $ popCntLabel w)
(MO_Clz w) -> ptext (sLit $ clzLabel w)
(MO_Ctz w) -> ptext (sLit $ ctzLabel w)
(MO_AtomicRMW w amop) -> ptext (sLit $ atomicRMWLabel w amop)
(MO_Cmpxchg w) -> ptext (sLit $ cmpxchgLabel w)
(MO_AtomicRead w) -> ptext (sLit $ atomicReadLabel w)
(MO_AtomicWrite w) -> ptext (sLit $ atomicWriteLabel w)
(MO_UF_Conv w) -> ptext (sLit $ word2FloatLabel w)
MO_S_QuotRem {} -> unsupported
MO_U_QuotRem {} -> unsupported
MO_U_QuotRem2 {} -> unsupported
MO_Add2 {} -> unsupported
MO_SubWordC {} -> unsupported
MO_AddIntC {} -> unsupported
MO_SubIntC {} -> unsupported
MO_U_Mul2 {} -> unsupported
MO_Touch -> unsupported
(MO_Prefetch_Data _ ) -> unsupported
--- we could support prefetch via "__builtin_prefetch"
--- Not adding it for now
where unsupported = panic ("pprCallishMachOp_for_C: " ++ show mop
++ " not supported!")
-- ---------------------------------------------------------------------
-- Useful #defines
--
mkJMP_, mkFN_, mkIF_ :: SDoc -> SDoc
mkJMP_ i = ptext (sLit "JMP_") <> parens i
mkFN_ i = ptext (sLit "FN_") <> parens i -- externally visible function
mkIF_ i = ptext (sLit "IF_") <> parens i -- locally visible
-- from includes/Stg.h
--
mkC_,mkW_,mkP_ :: SDoc
mkC_ = ptext (sLit "(C_)") -- StgChar
mkW_ = ptext (sLit "(W_)") -- StgWord
mkP_ = ptext (sLit "(P_)") -- StgWord*
-- ---------------------------------------------------------------------
--
-- Assignments
--
-- Generating assignments is what we're all about, here
--
pprAssign :: DynFlags -> CmmReg -> CmmExpr -> SDoc
-- dest is a reg, rhs is a reg
pprAssign _ r1 (CmmReg r2)
| isPtrReg r1 && isPtrReg r2
= hcat [ pprAsPtrReg r1, equals, pprAsPtrReg r2, semi ]
-- dest is a reg, rhs is a CmmRegOff
pprAssign dflags r1 (CmmRegOff r2 off)
| isPtrReg r1 && isPtrReg r2 && (off `rem` wORD_SIZE dflags == 0)
= hcat [ pprAsPtrReg r1, equals, pprAsPtrReg r2, op, int off', semi ]
where
off1 = off `shiftR` wordShift dflags
(op,off') | off >= 0 = (char '+', off1)
| otherwise = (char '-', -off1)
-- dest is a reg, rhs is anything.
-- We can't cast the lvalue, so we have to cast the rhs if necessary. Casting
-- the lvalue elicits a warning from new GCC versions (3.4+).
pprAssign _ r1 r2
| isFixedPtrReg r1 = mkAssign (mkP_ <> pprExpr1 r2)
| Just ty <- strangeRegType r1 = mkAssign (parens ty <> pprExpr1 r2)
| otherwise = mkAssign (pprExpr r2)
where mkAssign x = if r1 == CmmGlobal BaseReg
then ptext (sLit "ASSIGN_BaseReg") <> parens x <> semi
else pprReg r1 <> ptext (sLit " = ") <> x <> semi
-- ---------------------------------------------------------------------
-- Registers
pprCastReg :: CmmReg -> SDoc
pprCastReg reg
| isStrangeTypeReg reg = mkW_ <> pprReg reg
| otherwise = pprReg reg
-- True if (pprReg reg) will give an expression with type StgPtr. We
-- need to take care with pointer arithmetic on registers with type
-- StgPtr.
isFixedPtrReg :: CmmReg -> Bool
isFixedPtrReg (CmmLocal _) = False
isFixedPtrReg (CmmGlobal r) = isFixedPtrGlobalReg r
-- True if (pprAsPtrReg reg) will give an expression with type StgPtr
-- JD: THIS IS HORRIBLE AND SHOULD BE RENAMED, AT THE VERY LEAST.
-- THE GARBAGE WITH THE VNonGcPtr HELPS MATCH THE OLD CODE GENERATOR'S OUTPUT;
-- I'M NOT SURE IF IT SHOULD REALLY STAY THAT WAY.
isPtrReg :: CmmReg -> Bool
isPtrReg (CmmLocal _) = False
isPtrReg (CmmGlobal (VanillaReg _ VGcPtr)) = True -- if we print via pprAsPtrReg
isPtrReg (CmmGlobal (VanillaReg _ VNonGcPtr)) = False -- if we print via pprAsPtrReg
isPtrReg (CmmGlobal reg) = isFixedPtrGlobalReg reg
-- True if this global reg has type StgPtr
isFixedPtrGlobalReg :: GlobalReg -> Bool
isFixedPtrGlobalReg Sp = True
isFixedPtrGlobalReg Hp = True
isFixedPtrGlobalReg HpLim = True
isFixedPtrGlobalReg SpLim = True
isFixedPtrGlobalReg _ = False
-- True if in C this register doesn't have the type given by
-- (machRepCType (cmmRegType reg)), so it has to be cast.
isStrangeTypeReg :: CmmReg -> Bool
isStrangeTypeReg (CmmLocal _) = False
isStrangeTypeReg (CmmGlobal g) = isStrangeTypeGlobal g
isStrangeTypeGlobal :: GlobalReg -> Bool
isStrangeTypeGlobal CCCS = True
isStrangeTypeGlobal CurrentTSO = True
isStrangeTypeGlobal CurrentNursery = True
isStrangeTypeGlobal BaseReg = True
isStrangeTypeGlobal r = isFixedPtrGlobalReg r
strangeRegType :: CmmReg -> Maybe SDoc
strangeRegType (CmmGlobal CCCS) = Just (ptext (sLit "struct CostCentreStack_ *"))
strangeRegType (CmmGlobal CurrentTSO) = Just (ptext (sLit "struct StgTSO_ *"))
strangeRegType (CmmGlobal CurrentNursery) = Just (ptext (sLit "struct bdescr_ *"))
strangeRegType (CmmGlobal BaseReg) = Just (ptext (sLit "struct StgRegTable_ *"))
strangeRegType _ = Nothing
-- pprReg just prints the register name.
--
pprReg :: CmmReg -> SDoc
pprReg r = case r of
CmmLocal local -> pprLocalReg local
CmmGlobal global -> pprGlobalReg global
pprAsPtrReg :: CmmReg -> SDoc
pprAsPtrReg (CmmGlobal (VanillaReg n gcp))
= WARN( gcp /= VGcPtr, ppr n ) char 'R' <> int n <> ptext (sLit ".p")
pprAsPtrReg other_reg = pprReg other_reg
pprGlobalReg :: GlobalReg -> SDoc
pprGlobalReg gr = case gr of
VanillaReg n _ -> char 'R' <> int n <> ptext (sLit ".w")
-- pprGlobalReg prints a VanillaReg as a .w regardless
-- Example: R1.w = R1.w & (-0x8UL);
-- JMP_(*R1.p);
FloatReg n -> char 'F' <> int n
DoubleReg n -> char 'D' <> int n
LongReg n -> char 'L' <> int n
Sp -> ptext (sLit "Sp")
SpLim -> ptext (sLit "SpLim")
Hp -> ptext (sLit "Hp")
HpLim -> ptext (sLit "HpLim")
CCCS -> ptext (sLit "CCCS")
CurrentTSO -> ptext (sLit "CurrentTSO")
CurrentNursery -> ptext (sLit "CurrentNursery")
HpAlloc -> ptext (sLit "HpAlloc")
BaseReg -> ptext (sLit "BaseReg")
EagerBlackholeInfo -> ptext (sLit "stg_EAGER_BLACKHOLE_info")
GCEnter1 -> ptext (sLit "stg_gc_enter_1")
GCFun -> ptext (sLit "stg_gc_fun")
other -> panic $ "pprGlobalReg: Unsupported register: " ++ show other
pprLocalReg :: LocalReg -> SDoc
pprLocalReg (LocalReg uniq _) = char '_' <> ppr uniq
-- -----------------------------------------------------------------------------
-- Foreign Calls
pprCall :: SDoc -> CCallConv -> [Hinted CmmFormal] -> [Hinted CmmActual] -> SDoc
pprCall ppr_fn cconv results args
| not (is_cishCC cconv)
= panic $ "pprCall: unknown calling convention"
| otherwise
=
ppr_assign results (ppr_fn <> parens (commafy (map pprArg args))) <> semi
where
ppr_assign [] rhs = rhs
ppr_assign [(one,hint)] rhs
= pprLocalReg one <> ptext (sLit " = ")
<> pprUnHint hint (localRegType one) <> rhs
ppr_assign _other _rhs = panic "pprCall: multiple results"
pprArg (expr, AddrHint)
= cCast (ptext (sLit "void *")) expr
-- see comment by machRepHintCType below
pprArg (expr, SignedHint)
= sdocWithDynFlags $ \dflags ->
cCast (machRep_S_CType $ typeWidth $ cmmExprType dflags expr) expr
pprArg (expr, _other)
= pprExpr expr
pprUnHint AddrHint rep = parens (machRepCType rep)
pprUnHint SignedHint rep = parens (machRepCType rep)
pprUnHint _ _ = empty
-- Currently we only have these two calling conventions, but this might
-- change in the future...
is_cishCC :: CCallConv -> Bool
is_cishCC CCallConv = True
is_cishCC CApiConv = True
is_cishCC StdCallConv = True
is_cishCC PrimCallConv = False
is_cishCC JavaScriptCallConv = False
-- ---------------------------------------------------------------------
-- Find and print local and external declarations for a list of
-- Cmm statements.
--
pprTempAndExternDecls :: [CmmBlock] -> (SDoc{-temps-}, SDoc{-externs-})
pprTempAndExternDecls stmts
= (vcat (map pprTempDecl (uniqSetToList temps)),
vcat (map (pprExternDecl False{-ToDo-}) (Map.keys lbls)))
where (temps, lbls) = runTE (mapM_ te_BB stmts)
pprDataExterns :: [CmmStatic] -> SDoc
pprDataExterns statics
= vcat (map (pprExternDecl False{-ToDo-}) (Map.keys lbls))
where (_, lbls) = runTE (mapM_ te_Static statics)
pprTempDecl :: LocalReg -> SDoc
pprTempDecl l@(LocalReg _ rep)
= hcat [ machRepCType rep, space, pprLocalReg l, semi ]
pprExternDecl :: Bool -> CLabel -> SDoc
pprExternDecl _in_srt lbl
-- do not print anything for "known external" things
| not (needsCDecl lbl) = empty
| Just sz <- foreignLabelStdcallInfo lbl = stdcall_decl sz
| otherwise =
hcat [ visibility, label_type lbl,
lparen, ppr lbl, text ");" ]
where
label_type lbl | isCFunctionLabel lbl = ptext (sLit "F_")
| otherwise = ptext (sLit "I_")
visibility
| externallyVisibleCLabel lbl = char 'E'
| otherwise = char 'I'
-- If the label we want to refer to is a stdcall function (on Windows) then
-- we must generate an appropriate prototype for it, so that the C compiler will
-- add the @n suffix to the label (#2276)
stdcall_decl sz = sdocWithDynFlags $ \dflags ->
ptext (sLit "extern __attribute__((stdcall)) void ") <> ppr lbl
<> parens (commafy (replicate (sz `quot` wORD_SIZE dflags) (machRep_U_CType (wordWidth dflags))))
<> semi
type TEState = (UniqSet LocalReg, Map CLabel ())
newtype TE a = TE { unTE :: TEState -> (a, TEState) }
instance Functor TE where
fmap = liftM
instance Applicative TE where
pure a = TE $ \s -> (a, s)
(<*>) = ap
instance Monad TE where
TE m >>= k = TE $ \s -> case m s of (a, s') -> unTE (k a) s'
return = pure
te_lbl :: CLabel -> TE ()
te_lbl lbl = TE $ \(temps,lbls) -> ((), (temps, Map.insert lbl () lbls))
te_temp :: LocalReg -> TE ()
te_temp r = TE $ \(temps,lbls) -> ((), (addOneToUniqSet temps r, lbls))
runTE :: TE () -> TEState
runTE (TE m) = snd (m (emptyUniqSet, Map.empty))
te_Static :: CmmStatic -> TE ()
te_Static (CmmStaticLit lit) = te_Lit lit
te_Static _ = return ()
te_BB :: CmmBlock -> TE ()
te_BB block = mapM_ te_Stmt (blockToList mid) >> te_Stmt last
where (_, mid, last) = blockSplit block
te_Lit :: CmmLit -> TE ()
te_Lit (CmmLabel l) = te_lbl l
te_Lit (CmmLabelOff l _) = te_lbl l
te_Lit (CmmLabelDiffOff l1 _ _) = te_lbl l1
te_Lit _ = return ()
te_Stmt :: CmmNode e x -> TE ()
te_Stmt (CmmAssign r e) = te_Reg r >> te_Expr e
te_Stmt (CmmStore l r) = te_Expr l >> te_Expr r
te_Stmt (CmmUnsafeForeignCall target rs es)
= do te_Target target
mapM_ te_temp rs
mapM_ te_Expr es
te_Stmt (CmmCondBranch e _ _ _) = te_Expr e
te_Stmt (CmmSwitch e _) = te_Expr e
te_Stmt (CmmCall { cml_target = e }) = te_Expr e
te_Stmt _ = return ()
te_Target :: ForeignTarget -> TE ()
te_Target (ForeignTarget e _) = te_Expr e
te_Target (PrimTarget{}) = return ()
te_Expr :: CmmExpr -> TE ()
te_Expr (CmmLit lit) = te_Lit lit
te_Expr (CmmLoad e _) = te_Expr e
te_Expr (CmmReg r) = te_Reg r
te_Expr (CmmMachOp _ es) = mapM_ te_Expr es
te_Expr (CmmRegOff r _) = te_Reg r
te_Expr (CmmStackSlot _ _) = panic "te_Expr: CmmStackSlot not supported!"
te_Reg :: CmmReg -> TE ()
te_Reg (CmmLocal l) = te_temp l
te_Reg _ = return ()
-- ---------------------------------------------------------------------
-- C types for MachReps
cCast :: SDoc -> CmmExpr -> SDoc
cCast ty expr = parens ty <> pprExpr1 expr
cLoad :: CmmExpr -> CmmType -> SDoc
cLoad expr rep
= sdocWithPlatform $ \platform ->
if bewareLoadStoreAlignment (platformArch platform)
then let decl = machRepCType rep <+> ptext (sLit "x") <> semi
struct = ptext (sLit "struct") <+> braces (decl)
packed_attr = ptext (sLit "__attribute__((packed))")
cast = parens (struct <+> packed_attr <> char '*')
in parens (cast <+> pprExpr1 expr) <> ptext (sLit "->x")
else char '*' <> parens (cCast (machRepPtrCType rep) expr)
where -- On these platforms, unaligned loads are known to cause problems
bewareLoadStoreAlignment ArchAlpha = True
bewareLoadStoreAlignment ArchMipseb = True
bewareLoadStoreAlignment ArchMipsel = True
bewareLoadStoreAlignment (ArchARM {}) = True
bewareLoadStoreAlignment ArchARM64 = True
-- Pessimistically assume that they will also cause problems
-- on unknown arches
bewareLoadStoreAlignment ArchUnknown = True
bewareLoadStoreAlignment _ = False
isCmmWordType :: DynFlags -> CmmType -> Bool
-- True of GcPtrReg/NonGcReg of native word size
isCmmWordType dflags ty = not (isFloatType ty)
&& typeWidth ty == wordWidth dflags
-- This is for finding the types of foreign call arguments. For a pointer
-- argument, we always cast the argument to (void *), to avoid warnings from
-- the C compiler.
machRepHintCType :: CmmType -> ForeignHint -> SDoc
machRepHintCType _ AddrHint = ptext (sLit "void *")
machRepHintCType rep SignedHint = machRep_S_CType (typeWidth rep)
machRepHintCType rep _other = machRepCType rep
machRepPtrCType :: CmmType -> SDoc
machRepPtrCType r
= sdocWithDynFlags $ \dflags ->
if isCmmWordType dflags r then ptext (sLit "P_")
else machRepCType r <> char '*'
machRepCType :: CmmType -> SDoc
machRepCType ty | isFloatType ty = machRep_F_CType w
| otherwise = machRep_U_CType w
where
w = typeWidth ty
machRep_F_CType :: Width -> SDoc
machRep_F_CType W32 = ptext (sLit "StgFloat") -- ToDo: correct?
machRep_F_CType W64 = ptext (sLit "StgDouble")
machRep_F_CType _ = panic "machRep_F_CType"
machRep_U_CType :: Width -> SDoc
machRep_U_CType w
= sdocWithDynFlags $ \dflags ->
case w of
_ | w == wordWidth dflags -> ptext (sLit "W_")
W8 -> ptext (sLit "StgWord8")
W16 -> ptext (sLit "StgWord16")
W32 -> ptext (sLit "StgWord32")
W64 -> ptext (sLit "StgWord64")
_ -> panic "machRep_U_CType"
machRep_S_CType :: Width -> SDoc
machRep_S_CType w
= sdocWithDynFlags $ \dflags ->
case w of
_ | w == wordWidth dflags -> ptext (sLit "I_")
W8 -> ptext (sLit "StgInt8")
W16 -> ptext (sLit "StgInt16")
W32 -> ptext (sLit "StgInt32")
W64 -> ptext (sLit "StgInt64")
_ -> panic "machRep_S_CType"
-- ---------------------------------------------------------------------
-- print strings as valid C strings
pprStringInCStyle :: [Word8] -> SDoc
pprStringInCStyle s = doubleQuotes (text (concatMap charToC s))
-- ---------------------------------------------------------------------------
-- Initialising static objects with floating-point numbers. We can't
-- just emit the floating point number, because C will cast it to an int
-- by rounding it. We want the actual bit-representation of the float.
-- This is a hack to turn the floating point numbers into ints that we
-- can safely initialise to static locations.
big_doubles :: DynFlags -> Bool
big_doubles dflags
| widthInBytes W64 == 2 * wORD_SIZE dflags = True
| widthInBytes W64 == wORD_SIZE dflags = False
| otherwise = panic "big_doubles"
castFloatToIntArray :: STUArray s Int Float -> ST s (STUArray s Int Int)
castFloatToIntArray = U.castSTUArray
castDoubleToIntArray :: STUArray s Int Double -> ST s (STUArray s Int Int)
castDoubleToIntArray = U.castSTUArray
-- floats are always 1 word
floatToWord :: DynFlags -> Rational -> CmmLit
floatToWord dflags r
= runST (do
arr <- newArray_ ((0::Int),0)
writeArray arr 0 (fromRational r)
arr' <- castFloatToIntArray arr
i <- readArray arr' 0
return (CmmInt (toInteger i) (wordWidth dflags))
)
doubleToWords :: DynFlags -> Rational -> [CmmLit]
doubleToWords dflags r
| big_doubles dflags -- doubles are 2 words
= runST (do
arr <- newArray_ ((0::Int),1)
writeArray arr 0 (fromRational r)
arr' <- castDoubleToIntArray arr
i1 <- readArray arr' 0
i2 <- readArray arr' 1
return [ CmmInt (toInteger i1) (wordWidth dflags)
, CmmInt (toInteger i2) (wordWidth dflags)
]
)
| otherwise -- doubles are 1 word
= runST (do
arr <- newArray_ ((0::Int),0)
writeArray arr 0 (fromRational r)
arr' <- castDoubleToIntArray arr
i <- readArray arr' 0
return [ CmmInt (toInteger i) (wordWidth dflags) ]
)
-- ---------------------------------------------------------------------------
-- Utils
wordShift :: DynFlags -> Int
wordShift dflags = widthInLog (wordWidth dflags)
commafy :: [SDoc] -> SDoc
commafy xs = hsep $ punctuate comma xs
-- Print in C hex format: 0x13fa
pprHexVal :: Integer -> Width -> SDoc
pprHexVal w rep
| w < 0 = parens (char '-' <>
ptext (sLit "0x") <> intToDoc (-w) <> repsuffix rep)
| otherwise = ptext (sLit "0x") <> intToDoc w <> repsuffix rep
where
-- type suffix for literals:
-- Integer literals are unsigned in Cmm/C. We explicitly cast to
-- signed values for doing signed operations, but at all other
-- times values are unsigned. This also helps eliminate occasional
-- warnings about integer overflow from gcc.
repsuffix W64 = sdocWithDynFlags $ \dflags ->
if cINT_SIZE dflags == 8 then char 'U'
else if cLONG_SIZE dflags == 8 then ptext (sLit "UL")
else if cLONG_LONG_SIZE dflags == 8 then ptext (sLit "ULL")
else panic "pprHexVal: Can't find a 64-bit type"
repsuffix _ = char 'U'
intToDoc :: Integer -> SDoc
intToDoc i = case truncInt i of
0 -> char '0'
v -> go v
-- We need to truncate value as Cmm backend does not drop
-- redundant bits to ease handling of negative values.
-- Thus the following Cmm code on 64-bit arch, like amd64:
-- CInt v;
-- v = {something};
-- if (v == %lobits32(-1)) { ...
-- leads to the following C code:
-- StgWord64 v = (StgWord32)({something});
-- if (v == 0xFFFFffffFFFFffffU) { ...
-- Such code is incorrect as it promotes both operands to StgWord64
-- and the whole condition is always false.
truncInt :: Integer -> Integer
truncInt i =
case rep of
W8 -> i `rem` (2^(8 :: Int))
W16 -> i `rem` (2^(16 :: Int))
W32 -> i `rem` (2^(32 :: Int))
W64 -> i `rem` (2^(64 :: Int))
_ -> panic ("pprHexVal/truncInt: C backend can't encode "
++ show rep ++ " literals")
go 0 = empty
go w' = go q <> dig
where
(q,r) = w' `quotRem` 16
dig | r < 10 = char (chr (fromInteger r + ord '0'))
| otherwise = char (chr (fromInteger r - 10 + ord 'a'))
| AlexanderPankiv/ghc | compiler/cmm/PprC.hs | bsd-3-clause | 48,336 | 0 | 20 | 14,633 | 12,643 | 6,266 | 6,377 | 826 | 63 |
-- | Generating C symbol names emitted by the compiler.
module CPrim
( atomicReadLabel
, atomicWriteLabel
, atomicRMWLabel
, cmpxchgLabel
, popCntLabel
, bSwapLabel
, word2FloatLabel
) where
import CmmType
import CmmMachOp
import Outputable
popCntLabel :: Width -> String
popCntLabel w = "hs_popcnt" ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "popCntLabel: Unsupported word width " (ppr w)
bSwapLabel :: Width -> String
bSwapLabel w = "hs_bswap" ++ pprWidth w
where
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "bSwapLabel: Unsupported word width " (ppr w)
word2FloatLabel :: Width -> String
word2FloatLabel w = "hs_word2float" ++ pprWidth w
where
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "word2FloatLabel: Unsupported word width " (ppr w)
atomicRMWLabel :: Width -> AtomicMachOp -> String
atomicRMWLabel w amop = "hs_atomic_" ++ pprFunName amop ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "atomicRMWLabel: Unsupported word width " (ppr w)
pprFunName AMO_Add = "add"
pprFunName AMO_Sub = "sub"
pprFunName AMO_And = "and"
pprFunName AMO_Nand = "nand"
pprFunName AMO_Or = "or"
pprFunName AMO_Xor = "xor"
cmpxchgLabel :: Width -> String
cmpxchgLabel w = "hs_cmpxchg" ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "cmpxchgLabel: Unsupported word width " (ppr w)
atomicReadLabel :: Width -> String
atomicReadLabel w = "hs_atomicread" ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "atomicReadLabel: Unsupported word width " (ppr w)
atomicWriteLabel :: Width -> String
atomicWriteLabel w = "hs_atomicwrite" ++ pprWidth w
where
pprWidth W8 = "8"
pprWidth W16 = "16"
pprWidth W32 = "32"
pprWidth W64 = "64"
pprWidth w = pprPanic "atomicWriteLabel: Unsupported word width " (ppr w)
| frantisekfarka/ghc-dsi | compiler/nativeGen/CPrim.hs | bsd-3-clause | 2,263 | 0 | 9 | 564 | 613 | 309 | 304 | 63 | 10 |
--
-- Copyright (c) 2012 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
module UpdateMgr.Background where
import Control.Concurrent.Lifted
import Control.Monad
import Control.Monad.Error
import Control.Exception.Lifted
import Tools.Log
import UpdateMgr.Rpc
import UpdateMgr.Error
import UpdateMgr.UpdateMonad
import UpdateMgr.UpdateReqHandler
import UpdateMgr.DbReqHandler
import UpdateMgr.App
type Job = MVar (Maybe ThreadId)
initJob :: IO Job
initJob = newMVar Nothing
cancelJob :: Job -> IO ()
cancelJob job = modifyMVar_ job cancel
where
cancel Nothing = return Nothing
cancel (Just thread) = killThread thread >> return Nothing
backgroundApp :: App () -> App ThreadId
backgroundApp = fork
foregroundUpdate :: Update a -> App a
foregroundUpdate = runner
runner :: Update a -> App a
runner = runUpdate handleUpdateReq handleDbReq
backgroundUpdate :: Job -> Update () -> App ()
backgroundUpdate job action = modifyMVar_ job $ \id -> case id of
Just _ -> throwError (localE BackgroundOpAlreadyRunning)
Nothing -> liftM Just . backgroundApp $ do
info "starting bg operation."
finally (runner action) (swapMVar job Nothing)
foregroundUpdate' :: AppState -> Update a -> Rpc a
foregroundUpdate' state action = runApp state (foregroundUpdate action)
backgroundUpdate' :: AppState -> Job -> Update () -> Rpc ()
backgroundUpdate' state job action = runApp state (backgroundUpdate job action)
| crogers1/manager | updatemgr/UpdateMgr/Background.hs | gpl-2.0 | 2,128 | 0 | 15 | 369 | 447 | 232 | 215 | 35 | 2 |
{-
Copyright (C) 2012-2014 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
import Text.Pandoc
import Criterion.Main
import Criterion.Types (Config(..))
import Data.Maybe (mapMaybe)
import Debug.Trace (trace)
import Text.Pandoc.Error
readerBench :: Pandoc
-> (String, ReaderOptions -> String -> IO (Either PandocError Pandoc))
-> Maybe Benchmark
readerBench doc (name, reader) =
case lookup name writers of
Just (PureStringWriter writer) ->
let inp = writer def{ writerWrapText = WrapAuto} doc
in return $ bench (name ++ " reader") $ nfIO $
(fmap handleError <$> reader def{ readerSmart = True }) inp
_ -> trace ("\nCould not find writer for " ++ name ++ "\n") Nothing
writerBench :: Pandoc
-> (String, WriterOptions -> Pandoc -> String)
-> Benchmark
writerBench doc (name, writer) = bench (name ++ " writer") $ nf
(writer def{ writerWrapText = WrapAuto }) doc
main :: IO ()
main = do
inp <- readFile "tests/testsuite.txt"
let opts = def{ readerSmart = True }
let doc = handleError $ readMarkdown opts inp
let readers' = [(n,r) | (n, StringReader r) <- readers]
let readerBs = mapMaybe (readerBench doc)
$ filter (\(n,_) -> n /="haddock") readers'
let writers' = [(n,w) | (n, PureStringWriter w) <- writers]
let writerBs = map (writerBench doc)
$ writers'
defaultMainWith defaultConfig{ timeLimit = 6.0 }
(writerBs ++ readerBs)
| infotroph/pandoc | benchmark/benchmark-pandoc.hs | gpl-2.0 | 2,154 | 1 | 16 | 484 | 525 | 272 | 253 | 34 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>Getting started Guide</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Buscar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/gettingStarted/src/main/javahelp/org/zaproxy/zap/extension/gettingStarted/resources/help_es_ES/helpset_es_ES.hs | apache-2.0 | 968 | 89 | 29 | 157 | 391 | 210 | 181 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Mabilis na Pagsisimula | Ekstensyon ng ZAP</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga Paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/quickstart/src/main/javahelp/org/zaproxy/zap/extension/quickstart/resources/help_fil_PH/helpset_fil_PH.hs | apache-2.0 | 1,001 | 87 | 29 | 163 | 406 | 216 | 190 | -1 | -1 |
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE NoImplicitPrelude
, BangPatterns
, MagicHash
, UnboxedTuples
#-}
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.ForeignPtr
-- Copyright : (c) The University of Glasgow, 1992-2003
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- GHC's implementation of the 'ForeignPtr' data type.
--
-----------------------------------------------------------------------------
module GHC.ForeignPtr
(
ForeignPtr(..),
ForeignPtrContents(..),
FinalizerPtr,
FinalizerEnvPtr,
newForeignPtr_,
mallocForeignPtr,
mallocPlainForeignPtr,
mallocForeignPtrBytes,
mallocPlainForeignPtrBytes,
mallocForeignPtrAlignedBytes,
mallocPlainForeignPtrAlignedBytes,
addForeignPtrFinalizer,
addForeignPtrFinalizerEnv,
touchForeignPtr,
unsafeForeignPtrToPtr,
castForeignPtr,
newConcForeignPtr,
addForeignPtrConcFinalizer,
finalizeForeignPtr
) where
import Foreign.Storable
import Data.Foldable ( sequence_ )
import GHC.Show
import GHC.Base
import GHC.IORef
import GHC.STRef ( STRef(..) )
import GHC.Ptr ( Ptr(..), FunPtr(..) )
-- |The type 'ForeignPtr' represents references to objects that are
-- maintained in a foreign language, i.e., that are not part of the
-- data structures usually managed by the Haskell storage manager.
-- The essential difference between 'ForeignPtr's and vanilla memory
-- references of type @Ptr a@ is that the former may be associated
-- with /finalizers/. A finalizer is a routine that is invoked when
-- the Haskell storage manager detects that - within the Haskell heap
-- and stack - there are no more references left that are pointing to
-- the 'ForeignPtr'. Typically, the finalizer will, then, invoke
-- routines in the foreign language that free the resources bound by
-- the foreign object.
--
-- The 'ForeignPtr' is parameterised in the same way as 'Ptr'. The
-- type argument of 'ForeignPtr' should normally be an instance of
-- class 'Storable'.
--
data ForeignPtr a = ForeignPtr Addr# ForeignPtrContents
-- we cache the Addr# in the ForeignPtr object, but attach
-- the finalizer to the IORef (or the MutableByteArray# in
-- the case of a MallocPtr). The aim of the representation
-- is to make withForeignPtr efficient; in fact, withForeignPtr
-- should be just as efficient as unpacking a Ptr, and multiple
-- withForeignPtrs can share an unpacked ForeignPtr. Note
-- that touchForeignPtr only has to touch the ForeignPtrContents
-- object, because that ensures that whatever the finalizer is
-- attached to is kept alive.
data Finalizers
= NoFinalizers
| CFinalizers (Weak# ())
| HaskellFinalizers [IO ()]
data ForeignPtrContents
= PlainForeignPtr !(IORef Finalizers)
| MallocPtr (MutableByteArray# RealWorld) !(IORef Finalizers)
| PlainPtr (MutableByteArray# RealWorld)
instance Eq (ForeignPtr a) where
p == q = unsafeForeignPtrToPtr p == unsafeForeignPtrToPtr q
instance Ord (ForeignPtr a) where
compare p q = compare (unsafeForeignPtrToPtr p) (unsafeForeignPtrToPtr q)
instance Show (ForeignPtr a) where
showsPrec p f = showsPrec p (unsafeForeignPtrToPtr f)
-- |A finalizer is represented as a pointer to a foreign function that, at
-- finalisation time, gets as an argument a plain pointer variant of the
-- foreign pointer that the finalizer is associated with.
--
-- Note that the foreign function /must/ use the @ccall@ calling convention.
--
type FinalizerPtr a = FunPtr (Ptr a -> IO ())
type FinalizerEnvPtr env a = FunPtr (Ptr env -> Ptr a -> IO ())
newConcForeignPtr :: Ptr a -> IO () -> IO (ForeignPtr a)
--
-- ^Turns a plain memory reference into a foreign object by
-- associating a finalizer - given by the monadic operation - with the
-- reference. The storage manager will start the finalizer, in a
-- separate thread, some time after the last reference to the
-- @ForeignPtr@ is dropped. There is no guarantee of promptness, and
-- in fact there is no guarantee that the finalizer will eventually
-- run at all.
--
-- Note that references from a finalizer do not necessarily prevent
-- another object from being finalized. If A's finalizer refers to B
-- (perhaps using 'touchForeignPtr', then the only guarantee is that
-- B's finalizer will never be started before A's. If both A and B
-- are unreachable, then both finalizers will start together. See
-- 'touchForeignPtr' for more on finalizer ordering.
--
newConcForeignPtr p finalizer
= do fObj <- newForeignPtr_ p
addForeignPtrConcFinalizer fObj finalizer
return fObj
mallocForeignPtr :: Storable a => IO (ForeignPtr a)
-- ^ Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- 'mallocForeignPtr' is equivalent to
--
-- > do { p <- malloc; newForeignPtr finalizerFree p }
--
-- although it may be implemented differently internally: you may not
-- assume that the memory returned by 'mallocForeignPtr' has been
-- allocated with 'Foreign.Marshal.Alloc.malloc'.
--
-- GHC notes: 'mallocForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, so the 'ForeignPtr' does not require a finalizer to
-- free the memory. Use of 'mallocForeignPtr' and associated
-- functions is strongly recommended in preference to 'newForeignPtr'
-- with a finalizer.
--
mallocForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = error "mallocForeignPtr: size must be >= 0"
| otherwise = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtr', except that the
-- size of the memory required is given explicitly as a number of bytes.
mallocForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocForeignPtrBytes size | size < 0 =
error "mallocForeignPtrBytes: size must be >= 0"
mallocForeignPtrBytes (I# size) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | This function is similar to 'mallocForeignPtrBytes', except that the
-- size and alignment of the memory required is given explicitly as numbers of
-- bytes.
mallocForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocForeignPtrAlignedBytes size _align | size < 0 =
error "mallocForeignPtrAlignedBytes: size must be >= 0"
mallocForeignPtrAlignedBytes (I# size) (I# align) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- GHC notes: 'mallocPlainForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, as for mallocForeignPtr. Unlike mallocForeignPtr, a
-- ForeignPtr created with mallocPlainForeignPtr carries no finalizers.
-- It is not possible to add a finalizer to a ForeignPtr created with
-- mallocPlainForeignPtr. This is useful for ForeignPtrs that will live
-- only inside Haskell (such as those created for packed strings).
-- Attempts to add a finalizer to a ForeignPtr created this way, or to
-- finalize such a pointer, will throw an exception.
--
mallocPlainForeignPtr :: Storable a => IO (ForeignPtr a)
mallocPlainForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = error "mallocForeignPtr: size must be >= 0"
| otherwise = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtrBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocPlainForeignPtrBytes size | size < 0 =
error "mallocPlainForeignPtrBytes: size must be >= 0"
mallocPlainForeignPtrBytes (I# size) = IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
-- | This function is similar to 'mallocForeignPtrAlignedBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocPlainForeignPtrAlignedBytes size _align | size < 0 =
error "mallocPlainForeignPtrAlignedBytes: size must be >= 0"
mallocPlainForeignPtrAlignedBytes (I# size) (I# align) = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
addForeignPtrFinalizer :: FinalizerPtr a -> ForeignPtr a -> IO ()
-- ^This function adds a finalizer to the given foreign object. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
addForeignPtrFinalizer (FunPtr fp) (ForeignPtr p c) = case c of
PlainForeignPtr r -> insertCFinalizer r fp 0# nullAddr# p ()
MallocPtr _ r -> insertCFinalizer r fp 0# nullAddr# p c
_ -> error "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
-- Note [MallocPtr finalizers] (#10904)
--
-- When we have C finalizers for a MallocPtr, the memory is
-- heap-resident and would normally be recovered by the GC before the
-- finalizers run. To prevent the memory from being reused too early,
-- we attach the MallocPtr constructor to the "value" field of the
-- weak pointer when we call mkWeak# in ensureCFinalizerWeak below.
-- The GC will keep this field alive until the finalizers have run.
addForeignPtrFinalizerEnv ::
FinalizerEnvPtr env a -> Ptr env -> ForeignPtr a -> IO ()
-- ^ Like 'addForeignPtrFinalizerEnv' but allows the finalizer to be
-- passed an additional environment parameter to be passed to the
-- finalizer. The environment passed to the finalizer is fixed by the
-- second argument to 'addForeignPtrFinalizerEnv'
addForeignPtrFinalizerEnv (FunPtr fp) (Ptr ep) (ForeignPtr p c) = case c of
PlainForeignPtr r -> insertCFinalizer r fp 1# ep p ()
MallocPtr _ r -> insertCFinalizer r fp 1# ep p c
_ -> error "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
addForeignPtrConcFinalizer :: ForeignPtr a -> IO () -> IO ()
-- ^This function adds a finalizer to the given @ForeignPtr@. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
--
-- This is a variant of @addForeignPtrFinalizer@, where the finalizer
-- is an arbitrary @IO@ action. When it is invoked, the finalizer
-- will run in a new thread.
--
-- NB. Be very careful with these finalizers. One common trap is that
-- if a finalizer references another finalized value, it does not
-- prevent that value from being finalized. In particular, 'Handle's
-- are finalized objects, so a finalizer should not refer to a 'Handle'
-- (including @stdout@, @stdin@ or @stderr@).
--
addForeignPtrConcFinalizer (ForeignPtr _ c) finalizer =
addForeignPtrConcFinalizer_ c finalizer
addForeignPtrConcFinalizer_ :: ForeignPtrContents -> IO () -> IO ()
addForeignPtrConcFinalizer_ (PlainForeignPtr r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case r of { IORef (STRef r#) ->
case mkWeak# r# () (unIO $ foreignPtrFinalizer r) s of {
(# s1, _ #) -> (# s1, () #) }}
else return ()
addForeignPtrConcFinalizer_ f@(MallocPtr fo r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case mkWeak# fo () finalizer' s of
(# s1, _ #) -> (# s1, () #)
else return ()
where
finalizer' :: State# RealWorld -> (# State# RealWorld, () #)
finalizer' = unIO (foreignPtrFinalizer r >> touch f)
addForeignPtrConcFinalizer_ _ _ =
error "GHC.ForeignPtr: attempt to add a finalizer to plain pointer"
insertHaskellFinalizer :: IORef Finalizers -> IO () -> IO Bool
insertHaskellFinalizer r f = do
!wasEmpty <- atomicModifyIORef r $ \finalizers -> case finalizers of
NoFinalizers -> (HaskellFinalizers [f], True)
HaskellFinalizers fs -> (HaskellFinalizers (f:fs), False)
_ -> noMixingError
return wasEmpty
-- | A box around Weak#, private to this module.
data MyWeak = MyWeak (Weak# ())
insertCFinalizer ::
IORef Finalizers -> Addr# -> Int# -> Addr# -> Addr# -> value -> IO ()
insertCFinalizer r fp flag ep p val = do
MyWeak w <- ensureCFinalizerWeak r val
IO $ \s -> case addCFinalizerToWeak# fp p flag ep w s of
(# s1, 1# #) -> (# s1, () #)
-- Failed to add the finalizer because some other thread
-- has finalized w by calling foreignPtrFinalizer. We retry now.
-- This won't be an infinite loop because that thread must have
-- replaced the content of r before calling finalizeWeak#.
(# s1, _ #) -> unIO (insertCFinalizer r fp flag ep p val) s1
ensureCFinalizerWeak :: IORef Finalizers -> value -> IO MyWeak
ensureCFinalizerWeak ref@(IORef (STRef r#)) value = do
fin <- readIORef ref
case fin of
CFinalizers weak -> return (MyWeak weak)
HaskellFinalizers{} -> noMixingError
NoFinalizers -> IO $ \s ->
case mkWeakNoFinalizer# r# (unsafeCoerce# value) s of { (# s1, w #) ->
-- See Note [MallocPtr finalizers] (#10904)
case atomicModifyMutVar# r# (update w) s1 of
{ (# s2, (weak, needKill ) #) ->
if needKill
then case finalizeWeak# w s2 of { (# s3, _, _ #) ->
(# s3, weak #) }
else (# s2, weak #) }}
where
update _ fin@(CFinalizers w) = (fin, (MyWeak w, True))
update w NoFinalizers = (CFinalizers w, (MyWeak w, False))
update _ _ = noMixingError
noMixingError :: a
noMixingError = error $
"GHC.ForeignPtr: attempt to mix Haskell and C finalizers " ++
"in the same ForeignPtr"
foreignPtrFinalizer :: IORef Finalizers -> IO ()
foreignPtrFinalizer r = do
fs <- atomicModifyIORef r $ \fs -> (NoFinalizers, fs) -- atomic, see #7170
case fs of
NoFinalizers -> return ()
CFinalizers w -> IO $ \s -> case finalizeWeak# w s of
(# s1, 1#, f #) -> f s1
(# s1, _, _ #) -> (# s1, () #)
HaskellFinalizers actions -> sequence_ actions
newForeignPtr_ :: Ptr a -> IO (ForeignPtr a)
-- ^Turns a plain memory reference into a foreign pointer that may be
-- associated with finalizers by using 'addForeignPtrFinalizer'.
newForeignPtr_ (Ptr obj) = do
r <- newIORef NoFinalizers
return (ForeignPtr obj (PlainForeignPtr r))
touchForeignPtr :: ForeignPtr a -> IO ()
-- ^This function ensures that the foreign object in
-- question is alive at the given place in the sequence of IO
-- actions. In particular 'Foreign.ForeignPtr.withForeignPtr'
-- does a 'touchForeignPtr' after it
-- executes the user action.
--
-- Note that this function should not be used to express dependencies
-- between finalizers on 'ForeignPtr's. For example, if the finalizer
-- for a 'ForeignPtr' @F1@ calls 'touchForeignPtr' on a second
-- 'ForeignPtr' @F2@, then the only guarantee is that the finalizer
-- for @F2@ is never started before the finalizer for @F1@. They
-- might be started together if for example both @F1@ and @F2@ are
-- otherwise unreachable, and in that case the scheduler might end up
-- running the finalizer for @F2@ first.
--
-- In general, it is not recommended to use finalizers on separate
-- objects with ordering constraints between them. To express the
-- ordering robustly requires explicit synchronisation using @MVar@s
-- between the finalizers, but even then the runtime sometimes runs
-- multiple finalizers sequentially in a single thread (for
-- performance reasons), so synchronisation between finalizers could
-- result in artificial deadlock. Another alternative is to use
-- explicit reference counting.
--
touchForeignPtr (ForeignPtr _ r) = touch r
touch :: ForeignPtrContents -> IO ()
touch r = IO $ \s -> case touch# r s of s' -> (# s', () #)
unsafeForeignPtrToPtr :: ForeignPtr a -> Ptr a
-- ^This function extracts the pointer component of a foreign
-- pointer. This is a potentially dangerous operations, as if the
-- argument to 'unsafeForeignPtrToPtr' is the last usage
-- occurrence of the given foreign pointer, then its finalizer(s) will
-- be run, which potentially invalidates the plain pointer just
-- obtained. Hence, 'touchForeignPtr' must be used
-- wherever it has to be guaranteed that the pointer lives on - i.e.,
-- has another usage occurrence.
--
-- To avoid subtle coding errors, hand written marshalling code
-- should preferably use 'Foreign.ForeignPtr.withForeignPtr' rather
-- than combinations of 'unsafeForeignPtrToPtr' and
-- 'touchForeignPtr'. However, the latter routines
-- are occasionally preferred in tool generated marshalling code.
unsafeForeignPtrToPtr (ForeignPtr fo _) = Ptr fo
castForeignPtr :: ForeignPtr a -> ForeignPtr b
-- ^This function casts a 'ForeignPtr'
-- parameterised by one type into another type.
castForeignPtr f = unsafeCoerce# f
-- | Causes the finalizers associated with a foreign pointer to be run
-- immediately.
finalizeForeignPtr :: ForeignPtr a -> IO ()
finalizeForeignPtr (ForeignPtr _ (PlainPtr _)) = return () -- no effect
finalizeForeignPtr (ForeignPtr _ foreignPtr) = foreignPtrFinalizer refFinalizers
where
refFinalizers = case foreignPtr of
(PlainForeignPtr ref) -> ref
(MallocPtr _ ref) -> ref
PlainPtr _ ->
error "finalizeForeignPtr PlainPtr"
| ml9951/ghc | libraries/base/GHC/ForeignPtr.hs | bsd-3-clause | 19,296 | 0 | 22 | 4,236 | 3,175 | 1,678 | 1,497 | 216 | 6 |
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE NoImplicitPrelude
, BangPatterns
, MagicHash
, UnboxedTuples
#-}
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.ForeignPtr
-- Copyright : (c) The University of Glasgow, 1992-2003
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- GHC's implementation of the 'ForeignPtr' data type.
--
-----------------------------------------------------------------------------
module GHC.ForeignPtr
(
ForeignPtr(..),
ForeignPtrContents(..),
FinalizerPtr,
FinalizerEnvPtr,
newForeignPtr_,
mallocForeignPtr,
mallocPlainForeignPtr,
mallocForeignPtrBytes,
mallocPlainForeignPtrBytes,
mallocForeignPtrAlignedBytes,
mallocPlainForeignPtrAlignedBytes,
addForeignPtrFinalizer,
addForeignPtrFinalizerEnv,
touchForeignPtr,
unsafeForeignPtrToPtr,
castForeignPtr,
plusForeignPtr,
newConcForeignPtr,
addForeignPtrConcFinalizer,
finalizeForeignPtr
) where
import Foreign.Storable
import Data.Foldable ( sequence_ )
import GHC.Show
import GHC.Base
import GHC.IORef
import GHC.STRef ( STRef(..) )
import GHC.Ptr ( Ptr(..), FunPtr(..) )
-- |The type 'ForeignPtr' represents references to objects that are
-- maintained in a foreign language, i.e., that are not part of the
-- data structures usually managed by the Haskell storage manager.
-- The essential difference between 'ForeignPtr's and vanilla memory
-- references of type @Ptr a@ is that the former may be associated
-- with /finalizers/. A finalizer is a routine that is invoked when
-- the Haskell storage manager detects that - within the Haskell heap
-- and stack - there are no more references left that are pointing to
-- the 'ForeignPtr'. Typically, the finalizer will, then, invoke
-- routines in the foreign language that free the resources bound by
-- the foreign object.
--
-- The 'ForeignPtr' is parameterised in the same way as 'Ptr'. The
-- type argument of 'ForeignPtr' should normally be an instance of
-- class 'Storable'.
--
data ForeignPtr a = ForeignPtr Addr# ForeignPtrContents
-- The Addr# in the ForeignPtr object is intentionally stored
-- separately from the finalizer. The primary aim of the
-- representation is to make withForeignPtr efficient; in fact,
-- withForeignPtr should be just as efficient as unpacking a
-- Ptr, and multiple withForeignPtrs can share an unpacked
-- ForeignPtr. As a secondary benefit, this representation
-- allows pointers to subregions within the same overall block
-- to share the same finalizer (see 'plusForeignPtr'). Note
-- that touchForeignPtr only has to touch the ForeignPtrContents
-- object, because that ensures that whatever the finalizer is
-- attached to is kept alive.
data Finalizers
= NoFinalizers
| CFinalizers (Weak# ())
| HaskellFinalizers [IO ()]
data ForeignPtrContents
= PlainForeignPtr !(IORef Finalizers)
| MallocPtr (MutableByteArray# RealWorld) !(IORef Finalizers)
| PlainPtr (MutableByteArray# RealWorld)
-- | @since 2.01
instance Eq (ForeignPtr a) where
p == q = unsafeForeignPtrToPtr p == unsafeForeignPtrToPtr q
-- | @since 2.01
instance Ord (ForeignPtr a) where
compare p q = compare (unsafeForeignPtrToPtr p) (unsafeForeignPtrToPtr q)
-- | @since 2.01
instance Show (ForeignPtr a) where
showsPrec p f = showsPrec p (unsafeForeignPtrToPtr f)
-- |A finalizer is represented as a pointer to a foreign function that, at
-- finalisation time, gets as an argument a plain pointer variant of the
-- foreign pointer that the finalizer is associated with.
--
-- Note that the foreign function /must/ use the @ccall@ calling convention.
--
type FinalizerPtr a = FunPtr (Ptr a -> IO ())
type FinalizerEnvPtr env a = FunPtr (Ptr env -> Ptr a -> IO ())
newConcForeignPtr :: Ptr a -> IO () -> IO (ForeignPtr a)
--
-- ^Turns a plain memory reference into a foreign object by
-- associating a finalizer - given by the monadic operation - with the
-- reference. The storage manager will start the finalizer, in a
-- separate thread, some time after the last reference to the
-- @ForeignPtr@ is dropped. There is no guarantee of promptness, and
-- in fact there is no guarantee that the finalizer will eventually
-- run at all.
--
-- Note that references from a finalizer do not necessarily prevent
-- another object from being finalized. If A's finalizer refers to B
-- (perhaps using 'touchForeignPtr', then the only guarantee is that
-- B's finalizer will never be started before A's. If both A and B
-- are unreachable, then both finalizers will start together. See
-- 'touchForeignPtr' for more on finalizer ordering.
--
newConcForeignPtr p finalizer
= do fObj <- newForeignPtr_ p
addForeignPtrConcFinalizer fObj finalizer
return fObj
mallocForeignPtr :: Storable a => IO (ForeignPtr a)
-- ^ Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- 'mallocForeignPtr' is equivalent to
--
-- > do { p <- malloc; newForeignPtr finalizerFree p }
--
-- although it may be implemented differently internally: you may not
-- assume that the memory returned by 'mallocForeignPtr' has been
-- allocated with 'Foreign.Marshal.Alloc.malloc'.
--
-- GHC notes: 'mallocForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, so the 'ForeignPtr' does not require a finalizer to
-- free the memory. Use of 'mallocForeignPtr' and associated
-- functions is strongly recommended in preference to 'newForeignPtr'
-- with a finalizer.
--
mallocForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = errorWithoutStackTrace "mallocForeignPtr: size must be >= 0"
| otherwise = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtr', except that the
-- size of the memory required is given explicitly as a number of bytes.
mallocForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocForeignPtrBytes size | size < 0 =
errorWithoutStackTrace "mallocForeignPtrBytes: size must be >= 0"
mallocForeignPtrBytes (I# size) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | This function is similar to 'mallocForeignPtrBytes', except that the
-- size and alignment of the memory required is given explicitly as numbers of
-- bytes.
mallocForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocForeignPtrAlignedBytes size _align | size < 0 =
errorWithoutStackTrace "mallocForeignPtrAlignedBytes: size must be >= 0"
mallocForeignPtrAlignedBytes (I# size) (I# align) = do
r <- newIORef NoFinalizers
IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(MallocPtr mbarr# r) #)
}
-- | Allocate some memory and return a 'ForeignPtr' to it. The memory
-- will be released automatically when the 'ForeignPtr' is discarded.
--
-- GHC notes: 'mallocPlainForeignPtr' has a heavily optimised
-- implementation in GHC. It uses pinned memory in the garbage
-- collected heap, as for mallocForeignPtr. Unlike mallocForeignPtr, a
-- ForeignPtr created with mallocPlainForeignPtr carries no finalizers.
-- It is not possible to add a finalizer to a ForeignPtr created with
-- mallocPlainForeignPtr. This is useful for ForeignPtrs that will live
-- only inside Haskell (such as those created for packed strings).
-- Attempts to add a finalizer to a ForeignPtr created this way, or to
-- finalize such a pointer, will throw an exception.
--
mallocPlainForeignPtr :: Storable a => IO (ForeignPtr a)
mallocPlainForeignPtr = doMalloc undefined
where doMalloc :: Storable b => b -> IO (ForeignPtr b)
doMalloc a
| I# size < 0 = errorWithoutStackTrace "mallocForeignPtr: size must be >= 0"
| otherwise = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
where !(I# size) = sizeOf a
!(I# align) = alignment a
-- | This function is similar to 'mallocForeignPtrBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrBytes :: Int -> IO (ForeignPtr a)
mallocPlainForeignPtrBytes size | size < 0 =
errorWithoutStackTrace "mallocPlainForeignPtrBytes: size must be >= 0"
mallocPlainForeignPtrBytes (I# size) = IO $ \s ->
case newPinnedByteArray# size s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
-- | This function is similar to 'mallocForeignPtrAlignedBytes', except that
-- the internally an optimised ForeignPtr representation with no
-- finalizer is used. Attempts to add a finalizer will cause an
-- exception to be thrown.
mallocPlainForeignPtrAlignedBytes :: Int -> Int -> IO (ForeignPtr a)
mallocPlainForeignPtrAlignedBytes size _align | size < 0 =
errorWithoutStackTrace "mallocPlainForeignPtrAlignedBytes: size must be >= 0"
mallocPlainForeignPtrAlignedBytes (I# size) (I# align) = IO $ \s ->
case newAlignedPinnedByteArray# size align s of { (# s', mbarr# #) ->
(# s', ForeignPtr (byteArrayContents# (unsafeCoerce# mbarr#))
(PlainPtr mbarr#) #)
}
addForeignPtrFinalizer :: FinalizerPtr a -> ForeignPtr a -> IO ()
-- ^This function adds a finalizer to the given foreign object. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
addForeignPtrFinalizer (FunPtr fp) (ForeignPtr p c) = case c of
PlainForeignPtr r -> insertCFinalizer r fp 0# nullAddr# p ()
MallocPtr _ r -> insertCFinalizer r fp 0# nullAddr# p c
_ -> errorWithoutStackTrace "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
-- Note [MallocPtr finalizers] (#10904)
--
-- When we have C finalizers for a MallocPtr, the memory is
-- heap-resident and would normally be recovered by the GC before the
-- finalizers run. To prevent the memory from being reused too early,
-- we attach the MallocPtr constructor to the "value" field of the
-- weak pointer when we call mkWeak# in ensureCFinalizerWeak below.
-- The GC will keep this field alive until the finalizers have run.
addForeignPtrFinalizerEnv ::
FinalizerEnvPtr env a -> Ptr env -> ForeignPtr a -> IO ()
-- ^ Like 'addForeignPtrFinalizerEnv' but allows the finalizer to be
-- passed an additional environment parameter to be passed to the
-- finalizer. The environment passed to the finalizer is fixed by the
-- second argument to 'addForeignPtrFinalizerEnv'
addForeignPtrFinalizerEnv (FunPtr fp) (Ptr ep) (ForeignPtr p c) = case c of
PlainForeignPtr r -> insertCFinalizer r fp 1# ep p ()
MallocPtr _ r -> insertCFinalizer r fp 1# ep p c
_ -> errorWithoutStackTrace "GHC.ForeignPtr: attempt to add a finalizer to a plain pointer"
addForeignPtrConcFinalizer :: ForeignPtr a -> IO () -> IO ()
-- ^This function adds a finalizer to the given @ForeignPtr@. The
-- finalizer will run /before/ all other finalizers for the same
-- object which have already been registered.
--
-- This is a variant of @addForeignPtrFinalizer@, where the finalizer
-- is an arbitrary @IO@ action. When it is invoked, the finalizer
-- will run in a new thread.
--
-- NB. Be very careful with these finalizers. One common trap is that
-- if a finalizer references another finalized value, it does not
-- prevent that value from being finalized. In particular, 'Handle's
-- are finalized objects, so a finalizer should not refer to a 'Handle'
-- (including @stdout@, @stdin@ or @stderr@).
--
addForeignPtrConcFinalizer (ForeignPtr _ c) finalizer =
addForeignPtrConcFinalizer_ c finalizer
addForeignPtrConcFinalizer_ :: ForeignPtrContents -> IO () -> IO ()
addForeignPtrConcFinalizer_ (PlainForeignPtr r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case r of { IORef (STRef r#) ->
case mkWeak# r# () (unIO $ foreignPtrFinalizer r) s of {
(# s1, _ #) -> (# s1, () #) }}
else return ()
addForeignPtrConcFinalizer_ f@(MallocPtr fo r) finalizer = do
noFinalizers <- insertHaskellFinalizer r finalizer
if noFinalizers
then IO $ \s ->
case mkWeak# fo () finalizer' s of
(# s1, _ #) -> (# s1, () #)
else return ()
where
finalizer' :: State# RealWorld -> (# State# RealWorld, () #)
finalizer' = unIO (foreignPtrFinalizer r >> touch f)
addForeignPtrConcFinalizer_ _ _ =
errorWithoutStackTrace "GHC.ForeignPtr: attempt to add a finalizer to plain pointer"
insertHaskellFinalizer :: IORef Finalizers -> IO () -> IO Bool
insertHaskellFinalizer r f = do
!wasEmpty <- atomicModifyIORef r $ \finalizers -> case finalizers of
NoFinalizers -> (HaskellFinalizers [f], True)
HaskellFinalizers fs -> (HaskellFinalizers (f:fs), False)
_ -> noMixingError
return wasEmpty
-- | A box around Weak#, private to this module.
data MyWeak = MyWeak (Weak# ())
insertCFinalizer ::
IORef Finalizers -> Addr# -> Int# -> Addr# -> Addr# -> value -> IO ()
insertCFinalizer r fp flag ep p val = do
MyWeak w <- ensureCFinalizerWeak r val
IO $ \s -> case addCFinalizerToWeak# fp p flag ep w s of
(# s1, 1# #) -> (# s1, () #)
-- Failed to add the finalizer because some other thread
-- has finalized w by calling foreignPtrFinalizer. We retry now.
-- This won't be an infinite loop because that thread must have
-- replaced the content of r before calling finalizeWeak#.
(# s1, _ #) -> unIO (insertCFinalizer r fp flag ep p val) s1
ensureCFinalizerWeak :: IORef Finalizers -> value -> IO MyWeak
ensureCFinalizerWeak ref@(IORef (STRef r#)) value = do
fin <- readIORef ref
case fin of
CFinalizers weak -> return (MyWeak weak)
HaskellFinalizers{} -> noMixingError
NoFinalizers -> IO $ \s ->
case mkWeakNoFinalizer# r# (unsafeCoerce# value) s of { (# s1, w #) ->
-- See Note [MallocPtr finalizers] (#10904)
case atomicModifyMutVar# r# (update w) s1 of
{ (# s2, (weak, needKill ) #) ->
if needKill
then case finalizeWeak# w s2 of { (# s3, _, _ #) ->
(# s3, weak #) }
else (# s2, weak #) }}
where
update _ fin@(CFinalizers w) = (fin, (MyWeak w, True))
update w NoFinalizers = (CFinalizers w, (MyWeak w, False))
update _ _ = noMixingError
noMixingError :: a
noMixingError = errorWithoutStackTrace $
"GHC.ForeignPtr: attempt to mix Haskell and C finalizers " ++
"in the same ForeignPtr"
foreignPtrFinalizer :: IORef Finalizers -> IO ()
foreignPtrFinalizer r = do
fs <- atomicModifyIORef r $ \fs -> (NoFinalizers, fs) -- atomic, see #7170
case fs of
NoFinalizers -> return ()
CFinalizers w -> IO $ \s -> case finalizeWeak# w s of
(# s1, 1#, f #) -> f s1
(# s1, _, _ #) -> (# s1, () #)
HaskellFinalizers actions -> sequence_ actions
newForeignPtr_ :: Ptr a -> IO (ForeignPtr a)
-- ^Turns a plain memory reference into a foreign pointer that may be
-- associated with finalizers by using 'addForeignPtrFinalizer'.
newForeignPtr_ (Ptr obj) = do
r <- newIORef NoFinalizers
return (ForeignPtr obj (PlainForeignPtr r))
touchForeignPtr :: ForeignPtr a -> IO ()
-- ^This function ensures that the foreign object in
-- question is alive at the given place in the sequence of IO
-- actions. In particular 'Foreign.ForeignPtr.withForeignPtr'
-- does a 'touchForeignPtr' after it
-- executes the user action.
--
-- Note that this function should not be used to express dependencies
-- between finalizers on 'ForeignPtr's. For example, if the finalizer
-- for a 'ForeignPtr' @F1@ calls 'touchForeignPtr' on a second
-- 'ForeignPtr' @F2@, then the only guarantee is that the finalizer
-- for @F2@ is never started before the finalizer for @F1@. They
-- might be started together if for example both @F1@ and @F2@ are
-- otherwise unreachable, and in that case the scheduler might end up
-- running the finalizer for @F2@ first.
--
-- In general, it is not recommended to use finalizers on separate
-- objects with ordering constraints between them. To express the
-- ordering robustly requires explicit synchronisation using @MVar@s
-- between the finalizers, but even then the runtime sometimes runs
-- multiple finalizers sequentially in a single thread (for
-- performance reasons), so synchronisation between finalizers could
-- result in artificial deadlock. Another alternative is to use
-- explicit reference counting.
--
touchForeignPtr (ForeignPtr _ r) = touch r
touch :: ForeignPtrContents -> IO ()
touch r = IO $ \s -> case touch# r s of s' -> (# s', () #)
unsafeForeignPtrToPtr :: ForeignPtr a -> Ptr a
-- ^This function extracts the pointer component of a foreign
-- pointer. This is a potentially dangerous operations, as if the
-- argument to 'unsafeForeignPtrToPtr' is the last usage
-- occurrence of the given foreign pointer, then its finalizer(s) will
-- be run, which potentially invalidates the plain pointer just
-- obtained. Hence, 'touchForeignPtr' must be used
-- wherever it has to be guaranteed that the pointer lives on - i.e.,
-- has another usage occurrence.
--
-- To avoid subtle coding errors, hand written marshalling code
-- should preferably use 'Foreign.ForeignPtr.withForeignPtr' rather
-- than combinations of 'unsafeForeignPtrToPtr' and
-- 'touchForeignPtr'. However, the latter routines
-- are occasionally preferred in tool generated marshalling code.
unsafeForeignPtrToPtr (ForeignPtr fo _) = Ptr fo
castForeignPtr :: ForeignPtr a -> ForeignPtr b
-- ^This function casts a 'ForeignPtr'
-- parameterised by one type into another type.
castForeignPtr = coerce
plusForeignPtr :: ForeignPtr a -> Int -> ForeignPtr b
-- ^Advances the given address by the given offset in bytes.
--
-- The new 'ForeignPtr' shares the finalizer of the original,
-- equivalent from a finalization standpoint to just creating another
-- reference to the original. That is, the finalizer will not be
-- called before the new 'ForeignPtr' is unreachable, nor will it be
-- called an additional time due to this call, and the finalizer will
-- be called with the same address that it would have had this call
-- not happened, *not* the new address.
--
-- @since 4.10.0.0
plusForeignPtr (ForeignPtr addr c) (I# d) = ForeignPtr (plusAddr# addr d) c
-- | Causes the finalizers associated with a foreign pointer to be run
-- immediately.
finalizeForeignPtr :: ForeignPtr a -> IO ()
finalizeForeignPtr (ForeignPtr _ (PlainPtr _)) = return () -- no effect
finalizeForeignPtr (ForeignPtr _ foreignPtr) = foreignPtrFinalizer refFinalizers
where
refFinalizers = case foreignPtr of
(PlainForeignPtr ref) -> ref
(MallocPtr _ ref) -> ref
PlainPtr _ ->
errorWithoutStackTrace "finalizeForeignPtr PlainPtr"
| ezyang/ghc | libraries/base/GHC/ForeignPtr.hs | bsd-3-clause | 20,344 | 0 | 22 | 4,399 | 3,244 | 1,721 | 1,523 | 219 | 6 |
{-# OPTIONS_GHC -Wall #-}
nomain :: IO ()
nomain = putStrLn used
used :: String
used = "T13839"
nonUsed :: ()
nonUsed = ()
| sdiehl/ghc | testsuite/tests/rename/should_fail/T13839b.hs | bsd-3-clause | 126 | 1 | 7 | 27 | 59 | 26 | 33 | 7 | 1 |
module Demo where
--
import Lesson01
import Lesson02
import Lesson03
import Lesson04
import Lesson05
import Lesson07
import Lesson08
import Lesson09
import Lesson10
import Lesson11
import Lesson12
import Lesson13
import Lesson14
import Lesson15
import Lesson17
import Lesson18
--
import qualified SDL
import System.Environment
import System.Exit (die)
import Control.Exception (catch)
--
main :: IO ()
main = catch runLesson
(\e -> do let err = show (e :: SDL.SDLException)
die ("SDL_Error: "++ err))
runLesson :: IO ()
runLesson = do
args <- getArgs
let i = (read $ head (args++["0"])) :: Int
case i of
1 -> lesson01
2 -> lesson02
3 -> lesson03
4 -> lesson04
5 -> lesson05
7 -> lesson07
8 -> lesson08
9 -> lesson09
10 -> lesson10
11 -> lesson11
12 -> lesson12
13 -> lesson13
14 -> lesson14
15 -> lesson15
17 -> lesson17
18 -> lesson18
_ -> print $ "Lesson " ++ (show i) ++ " is undefined"
return ()
| rueshyna/sdl2-examples | src/Demo.hs | mit | 1,045 | 0 | 15 | 301 | 334 | 181 | 153 | 48 | 17 |
{-# LANGUAGE DuplicateRecordFields #-}
module IR.Pure where
import Data.Word
import qualified IR.Common as C
import qualified IR.Name as Name
type Brand = C.ListBrand Name.CapnpQ
data File = File
{ fileId :: !Word64
, fileName :: FilePath
, decls :: [Decl]
, reExportEnums :: [Name.LocalQ]
-- ^ A list of enums that we should re-export from this module.
, usesRpc :: !Bool
-- ^ Whether or not the module uses rpc features. If not, we skip
-- the rpc related imports. This is mainly important to avoid a
-- cyclic dependency with rpc.capnp.
}
data Decl
= DataDecl Data
| ConstDecl Constant
| IFaceDecl Interface
data Data = Data
{ typeName :: Name.LocalQ
, typeParams :: [Name.UnQ]
, firstClass :: !Bool
-- ^ Whether this is a "first class" type, i.e. it is a type in the
-- capnproto sense, rather than an auxiliary type defined for a group
-- or an anonymous union.
--
-- Note that this *can* be set for unions, if they subsume the whole
-- struct, since in that case we collapse the two types in the
-- high-level API.
, cerialName :: Name.LocalQ
-- ^ The name of the type our 'Cerial' should be. This will only be
-- different from typeName if we're an anonymous union in a struct
-- that also has other fields; in this case our Cerial should be
-- the same as our parent struct.
, def :: DataDef
}
data DataDef
= Sum [Variant]
| Product [Field]
data Constant = Constant
{ name :: Name.LocalQ
, value :: C.Value Brand Name.CapnpQ
}
data Interface = IFace
{ name :: Name.CapnpQ
, typeParams :: [C.TypeParamRef Name.CapnpQ]
, interfaceId :: !Word64
, methods :: [Method]
, supers :: [(Interface, Brand)]
-- ^ Immediate superclasses
, ancestors :: [(Interface, Brand)]
-- ^ All ancestors, including 'supers'.
}
-- TODO(cleanup): this same type exists in IR.Flat; it doesn't make sense for
-- IR.Common, but we should factor this out.
data Method = Method
{ name :: Name.UnQ
, paramType :: C.CompositeType Brand Name.CapnpQ
, resultType :: C.CompositeType Brand Name.CapnpQ
}
data Field = Field
{ name :: Name.UnQ
-- ^ The name of the field.
, type_ :: C.Type Brand Name.CapnpQ
-- ^ The type of the field.
}
data Variant = Variant
{ name :: Name.LocalQ
, arg :: Maybe (C.Type Brand Name.CapnpQ)
}
| zenhack/haskell-capnp | cmd/capnpc-haskell/IR/Pure.hs | mit | 2,499 | 0 | 12 | 695 | 427 | 263 | 164 | 53 | 0 |
import Data.List
import Data.Char
include :: String -> String -> Bool
include xs ys = or . map (isPrefixOf ys) . tails $ xs
joinWith :: [String] -> String -> String
joinWith xs sep = concat . init . concat $ [[x, sep] | x <- xs]
split :: String -> Char -> [String]
split "" _ = []
split xs c = let (ys, zs) = break (== c) xs
in if null zs then [ys] else ys : split (tail zs) c
main = do
putStrLn $ "Contains: " ++ show ("test" `include` "es")
putStrLn $ "Count: " ++ show (length . filter (=='t') $ "test")
putStrLn $ "HasPrefix: " ++ show (isPrefixOf "te" "test")
putStrLn $ "HasSuffix: " ++ show (isSuffixOf "st" "test")
putStrLn $ "Index: " ++ show (elemIndex 'e' "test")
putStrLn $ "Join: " ++ show (["a", "b"] `joinWith` "-")
putStrLn $ "Repeat: " ++ show (replicate 5 'a')
putStrLn $ "Replace: " ++ show (map (\x -> if x == 'o' then '0' else x) "foo")
putStrLn $ "Split: " ++ show (split "a-b-c-d-e" '-')
putStrLn $ "ToLower: " ++ map toLower "TEST"
putStrLn $ "ToUpper: " ++ map toUpper "test"
putStrLn ""
putStrLn $ "Len: " ++ show (length "hello")
putStrLn $ "Char:" ++ show ("hello" !! 1)
| rkalis/monokalis-syntax | sample-files/Haskell.hs | mit | 1,200 | 1 | 14 | 316 | 537 | 263 | 274 | 25 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import Network.SOAP
import Network.SOAP.Transport.HTTP
import Text.XML.Stream.Parse
import qualified Data.Text as T
import qualified Text.XML as XML
import qualified Text.XML.Writer as W
main :: IO ()
main = do
transport <- initTransport "http://www.webservicex.net/ConvertTemperature.asmx" traceRequest (iconv "utf-8")
out <- convertTemperatureCToF transport 25
print out
return ()
convertTemperatureCToF :: Transport -> Int -> IO T.Text
convertTemperatureCToF t c = invokeWS t "http://www.webserviceX.NET/ConvertTemp" () (body c) parser
body :: Int -> W.XML
body c = W.elementA "ConvertTemp" [("xmlns","http://www.webserviceX.NET/")] $ do
e "Temperature" (T.pack $ show c)
e "FromUnit" "degreeCelsius"
e "ToUnit" "degreeFahrenheit"
where
e :: XML.Name -> T.Text -> W.XML
e n t = W.element n t
parser :: ResponseParser T.Text
parser = StreamParser $
force "missing response" $
tagName "ConvertTempResponse" ignoreAttrs $ \_ ->
force "missing result" $
tagNoAttr "ConvertTempResult" content
| twopoint718/hsoap-testing | Temp.hs | mit | 1,134 | 2 | 11 | 240 | 335 | 165 | 170 | 28 | 1 |
-- |
-- Module : Data.Edison.Assoc.AssocList
-- Copyright : Copyright (c) 2006, 2008 Robert Dockins
-- License : MIT; see COPYRIGHT file for terms and conditions
--
-- Maintainer : robdockins AT fastmail DOT fm
-- Stability : stable
-- Portability : GHC, Hugs (MPTC and FD)
--
-- The standard library "Data.Map" repackaged as an Edison
-- associative collection.
module Data.Edison.Assoc.StandardMap (
-- * Type of standard finite maps
FM,
-- * AssocX operations
empty,singleton,fromSeq,insert,insertSeq,union,unionSeq,delete,deleteAll,
deleteSeq,null,size,member,count,lookup,lookupM,lookupAll,
lookupAndDelete,lookupAndDeleteM,lookupAndDeleteAll,
lookupWithDefault,adjust,adjustAll,adjustOrInsert,adjustAllOrInsert,
adjustOrDelete,adjustOrDeleteAll,strict,strictWith,
map,fold,fold',fold1,fold1',filter,partition,elements,structuralInvariant,
-- * FiniteMapX operations
fromSeqWith,fromSeqWithKey,insertWith,insertWithKey,insertSeqWith,
insertSeqWithKey,unionl,unionr,unionWith,unionSeqWith,intersectionWith,
difference,properSubset,subset,properSubmapBy,submapBy,sameMapBy,
properSubmap,submap,sameMap,
-- * OrdAssocX operations
minView, minElem, deleteMin, unsafeInsertMin, maxView, maxElem, deleteMax,
unsafeInsertMax, foldr, foldr', foldl, foldl', foldr1, foldr1',
foldl1, foldl1', unsafeFromOrdSeq,
unsafeAppend, filterLT, filterLE, filterGT, filterGE,
partitionLT_GE, partitionLE_GT, partitionLT_GT,
-- * Assoc operations
toSeq,keys,mapWithKey,foldWithKey,foldWithKey',filterWithKey,partitionWithKey,
-- * OrdAssoc operations
minViewWithKey, minElemWithKey, maxViewWithKey, maxElemWithKey,
foldrWithKey, foldrWithKey', foldlWithKey, foldlWithKey', toOrdSeq,
-- * FiniteMap operations
unionWithKey,unionSeqWithKey,intersectionWithKey,
-- * Documentation
moduleName
) where
import Prelude hiding (null,map,lookup,foldr,foldl,foldr1,foldl1,filter)
import qualified Prelude
import qualified Data.Edison.Assoc as A
import qualified Data.Edison.Seq as S
import qualified Data.Edison.Seq.ListSeq as L
import Data.Edison.Assoc.Defaults
import Data.Int
import Test.QuickCheck (Arbitrary(..), CoArbitrary(..))
import qualified Data.Map as DM
type FM = DM.Map
moduleName :: String
moduleName = "Data.Edison.Assoc.StandardMap"
empty :: FM k a
singleton :: Ord k => k -> a -> FM k a
fromSeq :: (Ord k,S.Sequence seq) => seq (k,a) -> FM k a
insert :: Ord k => k -> a -> FM k a -> FM k a
insertSeq :: (Ord k,S.Sequence seq) => seq (k,a) -> FM k a -> FM k a
union :: Ord k => FM k a -> FM k a -> FM k a
unionSeq :: (Ord k,S.Sequence seq) => seq (FM k a) -> FM k a
delete :: Ord k => k -> FM k a -> FM k a
deleteAll :: Ord k => k -> FM k a -> FM k a
deleteSeq :: (Ord k,S.Sequence seq) => seq k -> FM k a -> FM k a
null :: FM k a -> Bool
size :: FM k a -> Int
member :: Ord k => k -> FM k a -> Bool
count :: Ord k => k -> FM k a -> Int
lookup :: Ord k => k -> FM k a -> a
lookupAll :: (Ord k,S.Sequence seq) => k -> FM k a -> seq a
lookupM :: (Ord k,Monad m) => k -> FM k a -> m a
lookupWithDefault :: Ord k => a -> k -> FM k a -> a
lookupAndDelete :: Ord k => k -> FM k a -> (a, FM k a)
lookupAndDeleteM :: (Ord k,Monad m) => k -> FM k a -> m (a, FM k a)
lookupAndDeleteAll :: (Ord k,S.Sequence seq) => k -> FM k a -> (seq a,FM k a)
adjust :: Ord k => (a->a) -> k -> FM k a -> FM k a
adjustAll :: Ord k => (a->a) -> k -> FM k a -> FM k a
adjustOrInsert :: Ord k => (a -> a) -> a -> k -> FM k a -> FM k a
adjustAllOrInsert :: Ord k => (a -> a) -> a -> k -> FM k a -> FM k a
adjustOrDelete :: Ord k => (a -> Maybe a) -> k -> FM k a -> FM k a
adjustOrDeleteAll :: Ord k => (a -> Maybe a) -> k -> FM k a -> FM k a
strict :: Ord k => FM k a -> FM k a
strictWith :: Ord k => (a -> b) -> FM k a -> FM k a
map :: Ord k => (a -> b) -> FM k a -> FM k b
fold :: Ord k => (a -> b -> b) -> b -> FM k a -> b
fold1 :: Ord k => (a -> a -> a) -> FM k a -> a
fold' :: Ord k => (a -> b -> b) -> b -> FM k a -> b
fold1' :: Ord k => (a -> a -> a) -> FM k a -> a
filter :: Ord k => (a -> Bool) -> FM k a -> FM k a
partition :: Ord k => (a -> Bool) -> FM k a -> (FM k a,FM k a)
elements :: (Ord k,S.Sequence seq) => FM k a -> seq a
minView :: (Ord k,Monad m) => FM k a -> m (a, FM k a)
minElem :: Ord k => FM k a -> a
deleteMin :: Ord k => FM k a -> FM k a
unsafeInsertMin :: Ord k => k -> a -> FM k a -> FM k a
maxView :: (Ord k,Monad m) => FM k a -> m (a, FM k a)
maxElem :: Ord k => FM k a -> a
deleteMax :: Ord k => FM k a -> FM k a
unsafeInsertMax :: Ord k => k -> a -> FM k a -> FM k a
foldr :: Ord k => (a -> b -> b) -> b -> FM k a -> b
foldl :: Ord k => (b -> a -> b) -> b -> FM k a -> b
foldr1 :: Ord k => (a -> a -> a) -> FM k a -> a
foldl1 :: Ord k => (a -> a -> a) -> FM k a -> a
foldr' :: Ord k => (a -> b -> b) -> b -> FM k a -> b
foldl' :: Ord k => (b -> a -> b) -> b -> FM k a -> b
foldr1' :: Ord k => (a -> a -> a) -> FM k a -> a
foldl1' :: Ord k => (a -> a -> a) -> FM k a -> a
unsafeFromOrdSeq :: (Ord k,S.Sequence seq) => seq (k,a) -> FM k a
unsafeAppend :: Ord k => FM k a -> FM k a -> FM k a
filterLT :: Ord k => k -> FM k a -> FM k a
filterGT :: Ord k => k -> FM k a -> FM k a
filterLE :: Ord k => k -> FM k a -> FM k a
filterGE :: Ord k => k -> FM k a -> FM k a
partitionLT_GE :: Ord k => k -> FM k a -> (FM k a,FM k a)
partitionLE_GT :: Ord k => k -> FM k a -> (FM k a,FM k a)
partitionLT_GT :: Ord k => k -> FM k a -> (FM k a,FM k a)
fromSeqWith :: (Ord k,S.Sequence seq) => (a -> a -> a)
-> seq (k,a) -> FM k a
fromSeqWithKey :: (Ord k,S.Sequence seq) => (k -> a -> a -> a)
-> seq (k,a) -> FM k a
insertWith :: Ord k => (a -> a -> a) -> k -> a
-> FM k a -> FM k a
insertWithKey :: Ord k => (k -> a -> a -> a) -> k -> a
-> FM k a -> FM k a
insertSeqWith :: (Ord k,S.Sequence seq) => (a -> a -> a) -> seq (k,a)
-> FM k a -> FM k a
insertSeqWithKey :: (Ord k,S.Sequence seq) => (k -> a -> a -> a) -> seq (k,a)
-> FM k a -> FM k a
unionl :: Ord k => FM k a -> FM k a -> FM k a
unionr :: Ord k => FM k a -> FM k a -> FM k a
unionWith :: Ord k => (a -> a -> a) -> FM k a -> FM k a -> FM k a
unionSeqWith :: (Ord k,S.Sequence seq) =>
(a -> a -> a) -> seq (FM k a) -> FM k a
intersectionWith :: Ord k => (a -> b -> c) -> FM k a -> FM k b -> FM k c
difference :: Ord k => FM k a -> FM k b -> FM k a
properSubset :: Ord k => FM k a -> FM k b -> Bool
subset :: Ord k => FM k a -> FM k b -> Bool
properSubmapBy :: Ord k => (a -> a -> Bool) -> FM k a -> FM k a -> Bool
submapBy :: Ord k => (a -> a -> Bool) -> FM k a -> FM k a -> Bool
sameMapBy :: Ord k => (a -> a -> Bool) -> FM k a -> FM k a -> Bool
properSubmap :: (Ord k,Eq a) => FM k a -> FM k a -> Bool
submap :: (Ord k,Eq a) => FM k a -> FM k a -> Bool
sameMap :: (Ord k,Eq a) => FM k a -> FM k a -> Bool
toSeq :: (Ord k,S.Sequence seq) => FM k a -> seq (k,a)
keys :: (Ord k,S.Sequence seq) => FM k a -> seq k
mapWithKey :: Ord k => (k -> a -> b) -> FM k a -> FM k b
foldWithKey :: Ord k => (k -> a -> b -> b) -> b -> FM k a -> b
foldWithKey' :: Ord k => (k -> a -> b -> b) -> b -> FM k a -> b
filterWithKey :: Ord k => (k -> a -> Bool) -> FM k a -> FM k a
partitionWithKey :: Ord k => (k -> a -> Bool) -> FM k a -> (FM k a,FM k a)
minViewWithKey :: (Ord k,Monad m) => FM k a -> m ((k, a), FM k a)
minElemWithKey :: Ord k => FM k a -> (k,a)
maxViewWithKey :: (Ord k,Monad m) => FM k a -> m ((k, a), FM k a)
maxElemWithKey :: Ord k => FM k a -> (k,a)
foldrWithKey :: (k -> a -> b -> b) -> b -> FM k a -> b
foldlWithKey :: (b -> k -> a -> b) -> b -> FM k a -> b
foldrWithKey' :: (k -> a -> b -> b) -> b -> FM k a -> b
foldlWithKey' :: (b -> k -> a -> b) -> b -> FM k a -> b
toOrdSeq :: (Ord k,S.Sequence seq) => FM k a -> seq (k,a)
unionWithKey :: Ord k => (k -> a -> a -> a) -> FM k a -> FM k a -> FM k a
unionSeqWithKey :: (Ord k,S.Sequence seq) => (k -> a -> a -> a)
-> seq (FM k a) -> FM k a
intersectionWithKey :: Ord k => (k -> a -> b -> c) -> FM k a -> FM k b -> FM k c
structuralInvariant :: Ord k => FM k a -> Bool
structuralInvariant = DM.valid
empty = DM.empty
singleton = DM.singleton
fromSeq = fromSeqUsingInsertSeq
insert = DM.insert
insertSeq = insertSeqUsingFoldr
union = DM.union
unionSeq = DM.unions . S.toList
delete = DM.delete
deleteAll = DM.delete -- by finite map property
deleteSeq = deleteSeqUsingFoldr
null = DM.null
size = DM.size
member = DM.member
count = countUsingMember
lookup k m = maybe (error (moduleName ++ ".lookup: failed")) id (DM.lookup k m)
lookupM k m = maybe (fail (moduleName ++ ".lookupM: failed")) return (DM.lookup k m)
lookupAll = lookupAllUsingLookupM
lookupWithDefault = DM.findWithDefault
lookupAndDelete = lookupAndDeleteDefault
lookupAndDeleteM = lookupAndDeleteMDefault
lookupAndDeleteAll = lookupAndDeleteAllDefault
adjust = DM.adjust
adjustAll = DM.adjust
adjustOrInsert = adjustOrInsertUsingMember
adjustAllOrInsert = adjustOrInsertUsingMember
adjustOrDelete = DM.update
adjustOrDeleteAll = DM.update
strict xs = DM.foldr (flip const) () xs `seq` xs
strictWith f xs = DM.foldr (\x z -> f x `seq` z) () xs `seq` xs
map = fmap
fold = DM.foldr
fold' f x xs = L.foldl' (flip f) x (DM.elems xs)
fold1 f xs = L.foldr1 f (DM.elems xs)
fold1' f xs = L.foldl1' (flip f) (DM.elems xs)
filter = DM.filter
partition = DM.partition
elements = elementsUsingFold
minView m = if DM.null m
then fail (moduleName ++ ".minView: failed")
else let ((_,x),m') = DM.deleteFindMin m
in return (x,m')
minElem = snd . DM.findMin
deleteMin = DM.deleteMin
unsafeInsertMin = DM.insert
maxView m = if DM.null m
then fail (moduleName ++ ".maxView: failed")
else let ((_,x),m') = DM.deleteFindMax m
in return (x,m')
maxElem = snd . DM.findMax
deleteMax = DM.deleteMax
unsafeInsertMax = DM.insert
foldr f x m = L.foldr f x (DM.elems m)
foldl f x m = L.foldl f x (DM.elems m)
foldr1 f m = L.foldr1 f (DM.elems m)
foldl1 f m = L.foldl1 f (DM.elems m)
foldr' f x m = L.foldr' f x (DM.elems m)
foldl' f x m = L.foldl' f x (DM.elems m)
foldr1' f m = L.foldr1' f (DM.elems m)
foldl1' f m = L.foldl1' f (DM.elems m)
unsafeFromOrdSeq = DM.fromAscList . S.toList
unsafeAppend = DM.union
filterLT k = fst . DM.split k
filterGT k = snd . DM.split k
filterLE k m = let (lt, mx, _ ) = DM.splitLookup k m in maybe lt (\x -> insert k x lt) mx
filterGE k m = let (_ , mx, gt) = DM.splitLookup k m in maybe gt (\x -> insert k x gt) mx
partitionLT_GE k m = let (lt, mx, gt) = DM.splitLookup k m in (lt, maybe gt (\x -> insert k x gt) mx)
partitionLE_GT k m = let (lt, mx, gt) = DM.splitLookup k m in (maybe lt (\x -> insert k x lt) mx, gt)
partitionLT_GT = DM.split
fromSeqWith f s = DM.fromListWith f (S.toList s)
fromSeqWithKey f s = DM.fromListWithKey f (S.toList s)
insertWith = DM.insertWith
insertWithKey = insertWithKeyUsingInsertWith
insertSeqWith = insertSeqWithUsingInsertWith
insertSeqWithKey = insertSeqWithKeyUsingInsertWithKey
unionl = DM.union
unionr = flip DM.union
unionWith = DM.unionWith
unionSeqWith = unionSeqWithUsingReduce
intersectionWith = DM.intersectionWith
difference = DM.difference
properSubset = DM.isProperSubmapOfBy (\_ _ -> True)
subset = DM.isSubmapOfBy (\_ _ -> True)
properSubmapBy = DM.isProperSubmapOfBy
submapBy = DM.isSubmapOfBy
sameMapBy = sameMapByUsingOrdLists
properSubmap = A.properSubmap
submap = A.submap
sameMap = A.sameMap
toSeq = toSeqUsingFoldWithKey
keys = keysUsingFoldWithKey
mapWithKey = DM.mapWithKey
foldWithKey = DM.foldrWithKey
foldWithKey' f x m = L.foldl' (\b (k,a) -> f k a b) x (DM.toList m)
filterWithKey = DM.filterWithKey
partitionWithKey = DM.partitionWithKey
minViewWithKey m = if DM.null m
then fail (moduleName ++ ".minViewWithKey: failed")
else return (DM.deleteFindMin m)
minElemWithKey = DM.findMin
maxViewWithKey m = if DM.null m
then fail (moduleName ++ ".maxViewWithKey: failed")
else return (DM.deleteFindMax m)
maxElemWithKey = DM.findMax
foldrWithKey = DM.foldrWithKey
foldrWithKey' f x m = L.foldr' (\(k,a) b -> f k a b) x (DM.toAscList m)
foldlWithKey f x m = L.foldl (\b (k,a) -> f b k a) x (DM.toAscList m)
foldlWithKey' f x m = L.foldl' (\b (k,a) -> f b k a) x (DM.toAscList m)
toOrdSeq = S.fromList . DM.toAscList
unionWithKey = DM.unionWithKey
unionSeqWithKey = unionSeqWithKeyUsingReduce
intersectionWithKey = DM.intersectionWithKey
instance Ord k => A.AssocX (FM k) k where
{empty = empty; singleton = singleton; fromSeq = fromSeq; insert = insert;
insertSeq = insertSeq; union = union; unionSeq = unionSeq;
delete = delete; deleteAll = deleteAll; deleteSeq = deleteSeq;
null = null; size = size; member = member; count = count;
lookup = lookup; lookupM = lookupM; lookupAll = lookupAll;
lookupAndDelete = lookupAndDelete; lookupAndDeleteM = lookupAndDeleteM;
lookupAndDeleteAll = lookupAndDeleteAll;
lookupWithDefault = lookupWithDefault; adjust = adjust;
adjustAll = adjustAll; adjustOrInsert = adjustOrInsert;
adjustAllOrInsert = adjustAllOrInsert;
adjustOrDelete = adjustOrDelete; adjustOrDeleteAll = adjustOrDeleteAll;
fold = fold; fold' = fold'; fold1 = fold1; fold1' = fold1';
filter = filter; partition = partition; elements = elements;
strict = strict; strictWith = strictWith;
structuralInvariant = structuralInvariant; instanceName _ = moduleName}
instance Ord k => A.OrdAssocX (FM k) k where
{minView = minView; minElem = minElem; deleteMin = deleteMin;
unsafeInsertMin = unsafeInsertMin; maxView = maxView; maxElem = maxElem;
deleteMax = deleteMax; unsafeInsertMax = unsafeInsertMax;
foldr = foldr; foldr' = foldr'; foldl = foldl; foldl' = foldl';
foldr1 = foldr1; foldr1' = foldr1'; foldl1 = foldl1; foldl1' = foldl1';
unsafeFromOrdSeq = unsafeFromOrdSeq; unsafeAppend = unsafeAppend;
filterLT = filterLT; filterGT = filterGT; filterLE = filterLE;
filterGE = filterGE; partitionLT_GE = partitionLT_GE;
partitionLE_GT = partitionLE_GT; partitionLT_GT = partitionLT_GT}
instance Ord k => A.FiniteMapX (FM k) k where
{fromSeqWith = fromSeqWith; fromSeqWithKey = fromSeqWithKey;
insertWith = insertWith; insertWithKey = insertWithKey;
insertSeqWith = insertSeqWith; insertSeqWithKey = insertSeqWithKey;
unionl = unionl; unionr = unionr; unionWith = unionWith;
unionSeqWith = unionSeqWith; intersectionWith = intersectionWith;
difference = difference; properSubset = properSubset; subset = subset;
properSubmapBy = properSubmapBy; submapBy = submapBy;
sameMapBy = sameMapBy}
instance Ord k => A.OrdFiniteMapX (FM k) k
instance Ord k => A.Assoc (FM k) k where
{toSeq = toSeq; keys = keys; mapWithKey = mapWithKey;
foldWithKey = foldWithKey; foldWithKey' = foldWithKey';
filterWithKey = filterWithKey;
partitionWithKey = partitionWithKey}
instance Ord k => A.OrdAssoc (FM k) k where
{minViewWithKey = minViewWithKey; minElemWithKey = minElemWithKey;
maxViewWithKey = maxViewWithKey; maxElemWithKey = maxElemWithKey;
foldrWithKey = foldrWithKey; foldrWithKey' = foldrWithKey';
foldlWithKey = foldlWithKey; foldlWithKey' = foldlWithKey';
toOrdSeq = toOrdSeq}
instance Ord k => A.FiniteMap (FM k) k where
{unionWithKey = unionWithKey; unionSeqWithKey = unionSeqWithKey;
intersectionWithKey = intersectionWithKey}
instance Ord k => A.OrdFiniteMap (FM k) k
| robdockins/edison | edison-core/src/Data/Edison/Assoc/StandardMap.hs | mit | 17,103 | 0 | 11 | 5,046 | 6,928 | 3,666 | 3,262 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.Wai.WebSockets where
import Network.Wai
import Control.Exception (Exception, throwIO, assert)
import Control.Applicative ((<$>))
import Control.Monad (when, unless)
import Data.Typeable (Typeable)
import Blaze.ByteString.Builder
import Data.Monoid ((<>), mempty)
import qualified Crypto.Hash.SHA1 as SHA1
import Data.Word (Word8, Word32, Word64)
import Data.ByteString (ByteString)
import Data.Bits ((.|.), testBit, clearBit, shiftL, (.&.), Bits, xor, shiftR)
import qualified Data.Map as Map
import Data.Maybe (isJust)
import qualified Data.ByteString as S
import qualified Data.ByteString.Base64 as B64
import Data.IORef
type IsText = Bool
data Connection = Connection
{ connSend :: IsText -> ByteString -> IO ()
, connRecv :: IO ByteString
}
type WSApp a
= IO ByteString
-> (ByteString -> IO ())
-> (Connection -> IO a)
-> IO a
websocketsApp :: Request -> Maybe (WSApp a)
websocketsApp req
-- FIXME handle keep-alive, Upgrade | lookup "connection" reqhs /= Just "Upgrade" = backup sendResponse
| lookup "upgrade" reqhs /= Just "websocket" = Nothing
| lookup "sec-websocket-version" reqhs /= Just "13" = Nothing
| Just key <- lookup "sec-websocket-key" reqhs = Just $ \recvRaw sendRaw app -> do
let handshake = fromByteString "HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: "
<> fromByteString (B64.encode key')
<> fromByteString "\r\n\r\n"
key' = SHA1.hash $ key <> "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
toByteStringIO sendRaw handshake
src <- mkSource recvRaw
let recv front0 = waitForFrame src $ \isFinished _opcode _ _ getBS' -> do
let loop front = do
bs <- getBS'
if S.null bs
then return front
else loop $ front . (bs:)
front <- loop front0
if isFinished
then return $ S.concat $ front []
else recv front
app Connection
{ connSend = \isText payload -> do
let header = Frame True (if isText then OpText else OpBinary) Nothing $ fromIntegral $ S.length payload
toByteStringIO sendRaw $ wsDataToBuilder header <> fromByteString payload
, connRecv = recv id
}
| otherwise = Nothing
where
reqhs = requestHeaders req
type FrameFinished = Bool
type MaskingKey = Word32
type PayloadSize = Word64
data WSData payload
= Frame FrameFinished Opcode (Maybe MaskingKey) PayloadSize
| Payload payload
deriving Show
data Opcode = OpCont | OpText | OpBinary | OpClose | OpPing | OpPong
deriving (Show, Eq, Ord, Enum, Bounded)
opcodeToWord8 :: Opcode -> Word8
opcodeToWord8 OpCont = 0x0
opcodeToWord8 OpText = 0x1
opcodeToWord8 OpBinary = 0x2
opcodeToWord8 OpClose = 0x8
opcodeToWord8 OpPing = 0x9
opcodeToWord8 OpPong = 0xA
opcodeFromWord8 :: Word8 -> Maybe Opcode
opcodeFromWord8 =
flip Map.lookup m
where
m = Map.fromList $ map (\o -> (opcodeToWord8 o, o)) [minBound..maxBound]
wsDataToBuilder :: WSData Builder -> Builder
wsDataToBuilder (Payload builder) = builder
wsDataToBuilder (Frame finished opcode mmask payload) =
fromWord8 byte1
<> fromWord8 byte2
<> lenrest
<> maybe mempty fromWord32be mmask
where
byte1 = (if finished then 128 else 0) .|. opcodeToWord8 opcode
byte2 = (if isJust mmask then 128 else 0) .|. len1
(len1, lenrest)
| payload <= 125 = (fromIntegral payload, mempty)
| payload <= 65536 = (126, fromWord16be $ fromIntegral payload)
| otherwise = (127, fromWord64be $ fromIntegral payload)
data WSException = ConnectionClosed
| RSVBitsSet Word8
| InvalidOpcode Word8
deriving (Show, Typeable)
instance Exception WSException
data Source = Source (IO ByteString) (IORef ByteString)
mkSource :: IO ByteString -> IO Source
mkSource recv = Source recv <$> newIORef S.empty
-- | Guaranteed to never return an empty ByteString.
getBS :: Source -> IO ByteString
getBS (Source next ref) = do
bs <- readIORef ref
if S.null bs
then do
bs' <- next
when (S.null bs') (throwIO ConnectionClosed)
return bs'
else writeIORef ref S.empty >> return bs
leftover :: Source -> ByteString -> IO ()
leftover (Source _ ref) bs = writeIORef ref bs
getWord8 :: Source -> IO Word8
getWord8 src = do
bs <- getBS src
leftover src $ S.tail bs
return $ S.head bs
getBytes :: (Num word, Bits word) => Source -> Int -> IO word
getBytes src =
loop 0
where
loop total 0 = return total
loop total remaining = do
x <- getWord8 src -- FIXME not very efficient, better to use ByteString directly
loop (shiftL total 8 .|. fromIntegral x) (remaining - 1)
waitForFrame :: Source -> (FrameFinished -> Opcode -> Maybe MaskingKey -> PayloadSize -> IO ByteString -> IO a) -> IO a
waitForFrame src yield = do
byte1 <- getWord8 src
byte2 <- getWord8 src
when (testBit byte1 6 || testBit byte1 5 || testBit byte1 4)
$ throwIO $ RSVBitsSet byte1
let opcode' = byte1 .&. 0x0F
opcode <-
case opcodeFromWord8 opcode' of
Nothing -> throwIO $ InvalidOpcode opcode'
Just o -> return o
let isFinished = testBit byte1 7
isMasked = testBit byte2 7
len' = byte2 `clearBit` 7
payloadSize <-
case () of
()
| len' <= 125 -> return $ fromIntegral len'
| len' == 126 -> getBytes src 2
| otherwise -> assert (len' == 127) (getBytes src 8)
mmask <- if isMasked then Just <$> getBytes src 4 else return Nothing
let unmask' =
case mmask of
Nothing -> \_ bs -> bs
Just mask -> unmask mask
consumedRef <- newIORef 0
let getPayload = handlePayload unmask' payloadSize consumedRef
res <- yield isFinished opcode mmask payloadSize getPayload
let drain = do
bs <- getPayload
unless (S.null bs) drain
drain
return res
where
handlePayload unmask' totalSize consumedRef = do
consumed <- readIORef consumedRef
if consumed >= totalSize
then return S.empty
else do
bs <- getBS src
let len = fromIntegral $ S.length bs
consumed' = consumed + len
if consumed' <= totalSize
then do
writeIORef consumedRef consumed'
return $ unmask' consumed bs
else do
let (x, y) = S.splitAt (fromIntegral $ totalSize - consumed) bs
leftover src y
return $ unmask' consumed x
unmask :: MaskingKey -> Word64 -> ByteString -> ByteString
unmask key offset' masked =
-- we really want a mapWithIndex...
fst $ S.unfoldrN len f 0
where
len = S.length masked
f idx | idx >= len = Nothing
f idx = Just (getIndex idx, idx + 1)
offset = fromIntegral $ offset' `mod` 4
getIndex idx = S.index masked idx `xor` maskByte ((offset + idx) `mod` 4)
maskByte 0 = fromIntegral $ key `shiftR` 24
maskByte 1 = fromIntegral $ key `shiftR` 16
maskByte 2 = fromIntegral $ key `shiftR` 8
maskByte 3 = fromIntegral key
maskByte i = error $ "Network.Wai.WebSockets.unmask.maskByte: invalid input " ++ show i
| snoyberg/wai-websockets-native | Network/Wai/WebSockets.hs | mit | 7,697 | 0 | 24 | 2,265 | 2,348 | 1,181 | 1,167 | 183 | 6 |
module Sound.Source where
import Data.Monoid
-- A source takes a time 't' and returns the amplitude of the source
-- at that time. 't' is a time in seconds, representing the current
-- time where 0.0 is the start of the audio data
newtype Source = Source { sample :: Double -> Double }
instance Monoid (Source) where
mempty = Source (const 0.0)
mappend (Source f) (Source g) = Source (\t -> f t + g t)
mconcat srcs = Source (\t -> foldr (\(Source f) x -> f t + x) 0.0 srcs)
type Synth = (Double -> Source)
sineSynth :: Double -> Source
sineSynth = Source . sineWave
sawSynth :: Double -> Source
sawSynth = Source . sawWave
triangleSynth :: Double -> Source
triangleSynth = Source . triangleWave
squareSynth :: Double -> Source
squareSynth = Source . squareWave
sineWave :: Double -> Double -> Double
sineWave freq t = sin (freq * t * 2 * pi)
sawWave :: Double -> Double -> Double
sawWave freq t = saw (freq * t)
where saw x = 2 * (x - fromInteger (floor (0.5 + x)))
triangleWave :: Double -> Double -> Double
triangleWave freq t = 2 * abs (sawWave freq t) - 1
squareWave :: Double -> Double -> Double
squareWave freq t
| s < 0 = -1
| otherwise = 1
where s = sineWave freq t
| unknownloner/HaskellSynth | src/Sound/Source.hs | mit | 1,204 | 0 | 14 | 262 | 442 | 233 | 209 | 28 | 1 |
{-# OPTIONS_GHC -Wall #-}
module HW04 where
import Data.List
newtype Poly a = P [a]
-- Exercise 1 -----------------------------------------
x :: Num a => Poly a
x = P [0, 1]
-- Exercise 2 ----------------------------------------
trimTail :: (Eq a, Num a) => [a] -> [a]
trimTail = reverse . trimHead . reverse
trimHead :: (Eq a, Num a) => [a] -> [a]
trimHead = dropWhile (==0)
instance (Num a, Eq a) => Eq (Poly a) where
(==) (P l) (P l') = (trimTail l) == (trimTail l')
-- Exercise 3 -----------------------------------------
-- Get coefficient given the number
getce :: (Num a, Eq a, Show a) => a -> String
getce 1 = ""
getce n = show n
-- Generate pairs for array element with its index
items :: [a] -> [(a, Integer)]
items l = zip l [0,1..]
-- Get the term given a pair of coefficient and index
getTerm :: (Num a, Eq a, Show a) => (a, Integer) -> String
getTerm (0, _) = "0"
getTerm (n, 0) = show n
getTerm (n, 1) = getce n ++ "x"
getTerm (n, i) = getce n ++ "x^" ++ show i
instance (Num a, Eq a, Show a) => Show (Poly a) where
show (P l) = case trimTail l of
[] -> "0"
xs -> let terms = map getTerm $ items xs in
intercalate " + " $ reverse $ filter (/="0") $ terms
-- Exercise 4 -----------------------------------------
concatPoly :: Poly a -> Poly a -> Poly a
concatPoly (P l) (P l') = P (l ++ l')
plus :: Num a => Poly a -> Poly a -> Poly a
plus (P []) (P l) = P l
plus (P l) (P []) = P l
plus (P (n:ns)) (P (n':ns')) = concatPoly (P [n + n']) (P ns `plus` P ns')
-- Exercise 5 -----------------------------------------
getComb :: Num a => Poly a -> Poly a -> [Poly a]
getComb (P []) (P _) = []
getComb (P _) (P []) = []
getComb (P (n:ns)) (P l) = (P $ map (*n) l):(getComb (P ns) (P $ 0:l))
times :: Num a => Poly a -> Poly a -> Poly a
times p p' = sum $ getComb p p'
-- Exercise 6 -----------------------------------------
instance Num a => Num (Poly a) where
(+) = plus
(*) = times
negate (P l) = P $ map negate l
fromInteger n = P [fromInteger n]
-- No meaningful definitions exist
abs = undefined
signum = undefined
-- Exercise 7 -----------------------------------------
applyP :: Num a => Poly a -> a -> a
applyP (P l) n = evalP $ items l
where evalP ((ce, i):ps') = n ^ i * ce + evalP ps'
evalP [] = 0
-- Exercise 8 -----------------------------------------
class Num a => Differentiable a where
deriv :: a -> a
nderiv :: Int -> a -> a
nderiv 0 f = f
nderiv n f = nderiv (n-1) (deriv f)
-- Exercise 9 -----------------------------------------
instance Num a => Differentiable (Poly a) where
deriv (P l) = P $ drop 1 $ calcTerms $ items l
where calcTerms [] = []
calcTerms ((n, i):ps) = (n * (fromInteger i)):(calcTerms ps)
| hanjoes/cis194 | hw4/HW04.hs | mit | 2,807 | 0 | 14 | 704 | 1,314 | 681 | 633 | 59 | 2 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.IDBRequest
(js_getResult, getResult, js_getError, getError, js_getSource,
getSource, js_getTransaction, getTransaction, js_getReadyState,
getReadyState, success, error, IDBRequest, castToIDBRequest,
gTypeIDBRequest, IsIDBRequest, toIDBRequest)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"result\"]" js_getResult ::
IDBRequest -> IO (Nullable IDBAny)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.result Mozilla IDBRequest.result documentation>
getResult ::
(MonadIO m, IsIDBRequest self) => self -> m (Maybe IDBAny)
getResult self
= liftIO (nullableToMaybe <$> (js_getResult (toIDBRequest self)))
foreign import javascript unsafe "$1[\"error\"]" js_getError ::
IDBRequest -> IO (Nullable DOMError)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.error Mozilla IDBRequest.error documentation>
getError ::
(MonadIO m, IsIDBRequest self) => self -> m (Maybe DOMError)
getError self
= liftIO (nullableToMaybe <$> (js_getError (toIDBRequest self)))
foreign import javascript unsafe "$1[\"source\"]" js_getSource ::
IDBRequest -> IO (Nullable IDBAny)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.source Mozilla IDBRequest.source documentation>
getSource ::
(MonadIO m, IsIDBRequest self) => self -> m (Maybe IDBAny)
getSource self
= liftIO (nullableToMaybe <$> (js_getSource (toIDBRequest self)))
foreign import javascript unsafe "$1[\"transaction\"]"
js_getTransaction :: IDBRequest -> IO (Nullable IDBTransaction)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.transaction Mozilla IDBRequest.transaction documentation>
getTransaction ::
(MonadIO m, IsIDBRequest self) => self -> m (Maybe IDBTransaction)
getTransaction self
= liftIO
(nullableToMaybe <$> (js_getTransaction (toIDBRequest self)))
foreign import javascript unsafe "$1[\"readyState\"]"
js_getReadyState :: IDBRequest -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.readyState Mozilla IDBRequest.readyState documentation>
getReadyState ::
(MonadIO m, IsIDBRequest self, FromJSString result) =>
self -> m result
getReadyState self
= liftIO (fromJSString <$> (js_getReadyState (toIDBRequest self)))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.onsuccess Mozilla IDBRequest.onsuccess documentation>
success ::
(IsIDBRequest self, IsEventTarget self) => EventName self Event
success = unsafeEventName (toJSString "success")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/IDBRequest.onerror Mozilla IDBRequest.onerror documentation>
error ::
(IsIDBRequest self, IsEventTarget self) => EventName self Event
error = unsafeEventName (toJSString "error") | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/IDBRequest.hs | mit | 3,705 | 30 | 11 | 536 | 858 | 491 | 367 | 58 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
module Main (main) where
#if !MIN_VERSION_base(4, 8, 0)
import Data.Monoid (mempty)
#endif
import Data.Word (Word8)
import Text.Printf (printf)
import System.Random (Random(random), RandomGen, getStdGen)
import Options.Applicative
#if MIN_VERSION_optparse_applicative(0, 13, 0)
import Data.Monoid ((<>))
#endif
import System.Clock (Clock(Monotonic), TimeSpec(sec, nsec), getTime, diffTimeSpec)
import Control.DeepSeq (force)
import qualified Data.Vector as V
import qualified Data.Vector.Storable as SV
import qualified Data.ReedSolomon as RS
data Options = Options { optionsN :: Int
, optionsK :: Int
, optionsSize :: Int
, optionsIterations :: Int
}
deriving (Show, Eq)
parser :: Parser Options
parser = Options
<$> option auto
( short 'n'
<> metavar "N"
<> value 9
<> showDefault
<> help "Number of data shards"
)
<*> option auto
( short 'k'
<> metavar "K"
<> value 3
<> showDefault
<> help "Number of parity shards to calculate"
)
<*> option auto
( short 's'
<> metavar "BYTES"
<> value (1024 * 1024)
<> showDefault
<> help "Total data size to encode"
)
<*> option auto
( short 'i'
<> metavar "COUNT"
<> value 500
<> showDefault
<> help "Number of encoding iterations"
)
go :: RS.Encoder -> V.Vector (SV.Vector Word8) -> Int -> IO ()
go enc shards = loop
where
loop n | n == 0 = return ()
| otherwise = do
parities <- force `fmap` RS.encode RS.defaultBackend enc shards
parities `seq` loop (n - 1)
makeVector :: (SV.Storable a, Random a, RandomGen g) => g -> Int -> SV.Vector a
makeVector gen0 cnt = SV.unfoldrN cnt (Just . random) gen0
time :: IO () -> IO TimeSpec
time act = do
start <- getTime Monotonic
act
diffTimeSpec start `fmap` getTime Monotonic
main :: IO ()
main = do
Options{..} <- execParser $ info (helper <*> parser) mempty
printf "Settings: N=%d K=%d size=%d iterations=%d\n"
optionsN optionsK optionsSize optionsIterations
enc <- RS.new optionsN optionsK
vecs <- RS.split enc =<< flip makeVector optionsSize `fmap` getStdGen
diff <- time (go enc vecs optionsIterations)
printf "Total time: %ds %dns\n" (sec diff) (nsec diff)
| NicolasT/reedsolomon | bench/profiling.hs | mit | 2,602 | 0 | 15 | 836 | 755 | 393 | 362 | 66 | 1 |
module ParserSpec (spec) where
import Test.Hspec
import Test.Hspec.Expectations.Contrib
import Language.CFrp.Parser
import Language.CFrp.Syntax
spec :: Spec
spec = do
describe "parseExprString" $ do
it "parses arithmetic" $ do
"(1 + x * 3) - _a0 / 5"
`shouldBeParsedE`
SubE
(AddE
(IntE 1 ())
(MulE (VarE "x" ()) (IntE 3 ()) ())
())
(DivE (VarE "_a0" ()) (IntE 5 ()) ())
()
it "parses lambda" $ do
"\\x -> \\_y10 -> 1 + _y10"
`shouldBeParsedE`
AbsE "x"
(AbsE "_y10"
(AddE (IntE 1 ()) (VarE "_y10" ()) ())
())
()
it "parses application" $ do
"4 * (\\x y -> x + y + 1) 2 3 / 5"
`shouldBeParsedE`
DivE
(MulE
(IntE 4 ())
(AppE
(AbsE "x"
(AbsE "y"
(AddE
(AddE (VarE "x" ()) (VarE "y" ()) ())
(IntE 1 ())
())
())
())
[IntE 2 (), IntE 3 ()]
())
())
(IntE 5 ())
()
it "parses tuple" $ do
"(1, (x), (\\y -> y, (), 2))"
`shouldBeParsedE`
TupE [
IntE 1 ()
, VarE "x" ()
, TupE [AbsE "y" (VarE "y" ()) (), UnitE (), IntE 2 ()] ()
]
()
it "parses if" $ do
"if \\x -> x then \\y -> y + 2 else \\z -> z + 3"
`shouldBeParsedE`
IfE (AbsE "x" (VarE "x" ()) ())
(AbsE "y" (AddE (VarE "y" ()) (IntE 2 ()) ()) ())
(AbsE "z" (AddE (VarE "z" ()) (IntE 3 ()) ()) ())
()
it "parses let" $ do
"let f x y = x + y in f 1 2"
`shouldBeParsedE`
LetE "f"
(AbsE "x" (AbsE "y" (AddE (VarE "x" ()) (VarE "y" ()) ()) ()) ())
(AppE (VarE "f" ()) [IntE 1 (), IntE 2 ()] ())
()
it "parses sequence" $ do
"let f n = print_int n; print_int n in f 10; f 20"
`shouldBeParsedE`
LetE "f"
(AbsE "n"
(SeqE
(AppE (VarE "print_int" ()) [VarE "n" ()] ())
(AppE (VarE "print_int" ()) [VarE "n" ()] ())
())
())
(SeqE
(AppE (VarE "f" ()) [IntE 10 ()] ())
(AppE (VarE "f" ()) [IntE 20 ()] ())
())
()
describe "parseDeclString" $ do
it "parses input declaration" $ do
"%input lastPress :: Signal Int = last_press_input_node;"
`shouldBeParsedD`
InputD "lastPress" (SigT IntT) "last_press_input_node" ()
it "parses embed declaration" $ do
let code = "int func(int x)\n{\n if (x) { x += 1; };\nreturn x;\n}"
("%{\n" ++ code ++ "\n%}")
`shouldBeParsedD`
EmbedD code ()
describe "parseProgramString" $ do
it "parses program" $ do
let code = "int func(int x)\n{\n if (x) { x += 1; };\nreturn x;\n}"
let prog = unlines
[ "%input lastPress :: Signal Int = last_press_input_node;"
, "%{\n" ++ code ++ "\n%}"
, "f lastPress"
]
putStrLn prog
prog
`shouldBeParsedP`
([ InputD "lastPress" (SigT IntT) "last_press_input_node" ()
, EmbedD code ()
]
, AppE (VarE "f" ()) [VarE "lastPress" ()] ()
)
shouldBeParsedE :: String -> ParsedExpr -> Expectation
shouldBeParsedE = parsed parseExprString
shouldBeParsedD :: String -> ParsedDecl -> Expectation
shouldBeParsedD = parsed parseDeclString
shouldBeParsedP :: String -> ([ParsedDecl], ParsedExpr) -> Expectation
shouldBeParsedP = parsed parseProgramString
parsed :: (Show e, Show a, Eq a) => (String -> String -> Either e a) -> String -> a -> Expectation
parsed parser str expected = do
let got = parser "<test>" str
got `shouldSatisfy` isRight
let Right e = got
e `shouldBe` expected
| psg-titech/cfrp | spec/ParserSpec.hs | mit | 3,924 | 0 | 29 | 1,506 | 1,380 | 686 | 694 | 118 | 1 |
-- TODO this parser is wonky
module Parse where
data Parser a = P {runParser :: String -> [(a, String)]}
first f (a, b) = (f a , b)
instance Functor Parser where
fmap f p = P $ map (first f) . runParser p
instance Applicative Parser where
pure = return
f <*> x = do
f <- f
x <- x
return $ f x
instance Monad Parser where
return x = P $ \s -> [(x, s)]
x >>= f = P $ \s ->
let ms = runParser x s
step (a, str) = runParser (f a) str
in concatMap step ms
char :: Char -> Parser Char
char x = P char'
where
char' (c : rest) | c == x = [(c, rest)]
char' _ = []
string :: String -> Parser String
string str = mapM char str
parseAny :: [Parser a] -> Parser a
parseAny ps = P $ \s ->
concatMap (\f -> runParser f s) ps
(<|>) a b = parseAny [a, b]
many, many1 :: Parser a -> Parser [a]
many p = parseAny [(:) <$> p <*> many p, return []]
many1 p = (:) <$> p <*> many p
many_ x = many x *> return ()
charSet = parseAny . map char
ws :: Parser Char
ws = charSet $ " \n\t"
ws' = charSet $ " \t"
whitespace = many_ ws
whitespace' = many_ ws'
newline = charSet "\n"
lowerAlpha = charSet $ ['a'..'z']
upperAlpha = charSet $ ['A'..'Z']
digit = charSet ['0'..'9']
idSym = charSet "-'"
idChar = lowerAlpha <|> upperAlpha <|> digit <|> idSym
identifier' first = (:) <$> first <*> many idChar
identifier = identifier' lowerAlpha
predIdentifier = identifier' upperAlpha
token x = (x <* many ws)
indent = many1 ws
sepBy :: Parser a -> Parser b -> Parser [b]
sepBy a b = ((:) <$> b <*> (a *> sepBy a b)) <|> (return <$> b)
type Symbol = String
data Value
= Num Int
| Ref Symbol
| Member Symbol Symbol
deriving (Show, Eq, Ord)
data Expr
= App Symbol [Value]
| EVal Value
deriving (Show, Eq, Ord)
data Binder = Binder Expr Symbol
deriving (Show, Eq, Ord)
data Predicate = Predicate Symbol [Value]
deriving (Show, Eq, Ord)
data Pattern
= PExpr Expr
| PBind Binder
| PPred Predicate
deriving (Show, Eq, Ord)
data Action
= Mutate Symbol Symbol Expr
| AExpr Expr
| ABind Binder
| APred Predicate
deriving (Show, Eq, Ord)
data LHS = LHS [Pattern]
deriving (Show, Eq, Ord)
data ERHS = ERHS Expr
deriving (Show, Eq, Ord)
data MRHS = MRHS [Action]
deriving (Show, Eq, Ord)
data Arrow = FnArrow | MutArrow
deriving (Eq, Ord)
instance Show Arrow where
show FnArrow = "->"
show MutArrow = "~>"
data Rule = ERule LHS ERHS | MRule LHS MRHS
deriving (Show, Eq, Ord)
data Def = Def String [Rule]
deriving (Eq, Ord)
instance Show Def where
show (Def name rules) =
unlines $ name : map ((" " ++) . show) rules
finish :: Parser a -> Parser a
finish p = p <* (P finish')
where
finish' "" = [((), "")]
finish' _ = []
symbol = token identifier
readInt :: String -> [(Int, String)]
readInt = reads
value :: Parser Value
value = num <|> ref <|> member
where
num = Num <$> P readInt
ref = Ref <$> identifier
member = Member <$> identifier <*> (char '.' *> identifier)
spaces = sepBy (many1 ws)
args = spaces value
expr :: Parser Expr
expr = (EVal <$> value) <|> appParser
where
appParser = App <$> identifier <*> (many1 ws *> args)
binder :: Parser Binder
binder = bindR <|> bindL
where
bindR = Binder <$> token expr <*> (token (char ')') *> identifier)
bindL = flip Binder <$> token identifier <*> (token (char '(') *> expr)
predicate :: Parser Predicate
predicate = Predicate <$> predIdentifier <*> (many1 ws *> args)
pattern :: Parser Pattern
pattern = (PExpr <$> expr) <|> (PBind <$> binder) <|> (PPred <$> predicate)
-- TODO
action :: Parser Action
action = (ABind <$> binder) <|> (AExpr <$> expr) <|> (APred <$> predicate)
<|> mutate
where
mutate = Mutate <$> identifier <*> (char '.' *> token identifier)
<*> (token (string "<-") *> expr)
commas x = sepBy (whitespace >> token (char ',')) x
lhs = LHS <$> commas (pattern)
erhs = ERHS <$> expr
mrhs = MRHS <$> commas (action)
earrow = (string "->" >> return FnArrow)
marrow = (string "~>" >> return MutArrow)
nlws = whitespace' >> many1 newline >> whitespace'
wslr x = whitespace *> x <* whitespace
endDef = string "\n.\n"
def = Def <$> identifier <*> (nlws *> sepBy nlws rule <* endDef)
rule = erule <|> mrule
where
erule = ERule <$> token lhs <*> (token earrow *> erhs)
mrule = MRule <$> token lhs <*> (token marrow *> mrhs)
prog :: Parser [Def]
prog = wslr $ many def
chk' p = head . runParser p
chk p = runParser (finish p)
chn n p = take n . runParser (finish p)
| kovach/cards | src/Parse.hs | mit | 4,510 | 83 | 15 | 1,082 | 2,049 | 1,075 | 974 | 141 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC
import Data.Time
import Data.Word
import Test.Tasty
import Test.Tasty.HUnit
-- IUT
import Data.OTP
hotpSecret :: ByteString
hotpSecret = "12345678901234567890"
testHotp :: Word64 -> Word32 -> TestTree
testHotp key result = testCase (show result) $ do
let h = hotp SHA1 hotpSecret key 6
result @=? h
hotpResults :: [Word32]
hotpResults =
[ 755224, 287082, 359152
, 969429, 338314, 254676
, 287922, 162583, 399871
, 520489
]
testTotp :: (ByteString, UTCTime, HashAlgorithm, Word32) -> TestTree
testTotp (secr, key, alg, result) =
testCase (show alg ++ " => " ++ show result) $ do
let t = totp alg secr key 30 8
result @=? t
sha1Secr :: ByteString
sha1Secr = BC.pack $ take 20 $ cycle "12345678901234567890"
sha256Secr :: ByteString
sha256Secr = BC.pack $ take 32 $ cycle "12345678901234567890"
sha512Secr :: ByteString
sha512Secr = BC.pack $ take 64 $ cycle "12345678901234567890"
totpData :: [(ByteString, UTCTime, HashAlgorithm, Word32)]
totpData =
[ (sha1Secr, read "1970-01-01 00:00:59 UTC", SHA1, 94287082)
, (sha256Secr, read "1970-01-01 00:00:59 UTC", SHA256, 46119246)
, (sha512Secr, read "1970-01-01 00:00:59 UTC", SHA512, 90693936)
, (sha1Secr, read "2005-03-18 01:58:29 UTC", SHA1, 07081804)
, (sha256Secr, read "2005-03-18 01:58:29 UTC", SHA256, 68084774)
, (sha512Secr, read "2005-03-18 01:58:29 UTC", SHA512, 25091201)
, (sha1Secr, read "2005-03-18 01:58:31 UTC", SHA1, 14050471)
, (sha256Secr, read "2005-03-18 01:58:31 UTC", SHA256, 67062674)
, (sha512Secr, read "2005-03-18 01:58:31 UTC", SHA512, 99943326)
, (sha1Secr, read "2009-02-13 23:31:30 UTC", SHA1, 89005924)
, (sha256Secr, read "2009-02-13 23:31:30 UTC", SHA256, 91819424)
, (sha512Secr, read "2009-02-13 23:31:30 UTC", SHA512, 93441116)
, (sha1Secr, read "2033-05-18 03:33:20 UTC", SHA1, 69279037)
, (sha256Secr, read "2033-05-18 03:33:20 UTC", SHA256, 90698825)
, (sha512Secr, read "2033-05-18 03:33:20 UTC", SHA512, 38618901)
, (sha1Secr, read "2603-10-11 11:33:20 UTC", SHA1, 65353130)
, (sha256Secr, read "2603-10-11 11:33:20 UTC", SHA256, 77737706)
, (sha512Secr, read "2603-10-11 11:33:20 UTC", SHA512, 47863826)
]
main :: IO ()
main = defaultMain $ testGroup "test vectors"
[ testGroup "hotp" $ map (uncurry testHotp) $ zip [0..] hotpResults
, testGroup "totp" $ map testTotp totpData
]
| matshch/OTP | test/Test.hs | mit | 2,655 | 0 | 12 | 588 | 754 | 432 | 322 | 56 | 1 |
module Data.FacetedSpec where
import Control.Applicative
import Data.Faceted
import Test.Hspec
-- Simple Faceted Value
simple = (\x -> x > 0) ? 1 .: 0
nested = (\x -> x <= 2) ?? ((\x -> x < 2) ? 1 .: 2) .: ((\x -> x < 4 ) ? 3 .: 4)
-- do syntax test cases
ap_do = do a <- ((\x -> 0 < x && x < 3) ? 1 .: 2)
b <- ((\x -> 1 < x && x < 4) ? 4 .: 8)
return (a + b)
monad_do = do a <- ((\x -> 0 < x && x < 3) ? 1 .: 2)
b <- ((\x -> 1 < x && x < 4) ? (a+4) .: (a+8))
(\y -> y < 3) ? (10*b) .: (100*b)
spec :: Spec
spec = do
describe "facete value can be declared in intuitive manner \"(\\x -> x > 0) ? 1 .: 0)\"" $ do
it "its observation with context 0 should be 0." $
observe simple 0 `shouldBe` 0
it "its observation with context 1 should be 1." $
observe simple 1 `shouldBe` 1
describe "use (??) for nested facete value \"(\\x -> x <= 2) ?? ((\\x -> x < 2) ? 1 .: 2) .: ((\\x -> x < 4 ) ? 3 .: 4)\"" $ do
it "its observation with context 1 should be 1." $
observe nested 1 `shouldBe` 1
it "its observation with context 2 should be 2." $
observe nested 2 `shouldBe` 2
it "its observation with context 3 should be 3." $
observe nested 3 `shouldBe` 3
it "its observation with context 4 should be 4." $
observe nested 4 `shouldBe` 4
describe "Functor: ((*3) `fmap` (\\x -> x > 0) ? 1 .: 0)) should be equivalent with < (x > 0) ? 1*3 : 0*3>." $ do
it "observation with context 0 should be 0." $
observe ((*3) `fmap` simple) 0 `shouldBe` 0
it "observation with context 1 should be 3." $
observe ((*3) `fmap` simple) 1 `shouldBe` 3
describe ("Applicative: ((+) <$> ((\\x -> 0 < x && x < 3) ? 1 .: 2) <*> ((\\x -> 1 < x && x < 4) ? 4 .: 8))\n"
++"\tThis computation adds two faceted values. So in this case, 4 patterns of results can be observed.\n"
++"\tThe result should be equivalent with\n"
++"\t < (0 < x < 3) ? < (1 < x < 4) ? 1+4 : 1+8 >\n"
++"\t : < (1 < x < 4) ? 2+4 : 2+8 >>") $ do
it "observation with context 1 should be 9 (= 1 + 8)." $
observe ( (+) <$> ((\x -> 0 < x && x < 3) ? 1 .: 2) <*> ((\x -> 1 < x && x < 4) ? 4 .: 8)) 1 `shouldBe` 9
it "observation with context 2 should be 5 (= 1 + 4)." $
observe ( (+) <$> ((\x -> 0 < x && x < 3) ? 1 .: 2) <*> ((\x -> 1 < x && x < 4) ? 4 .: 8)) 2 `shouldBe` 5
it "observation with context 3 should be 6 (= 2 + 4)." $
observe ( (+) <$> ((\x -> 0 < x && x < 3) ? 1 .: 2) <*> ((\x -> 1 < x && x < 4) ? 4 .: 8)) 3 `shouldBe` 6
it "observation with context 4 should be 10 (= 2 + 8)." $
observe ( (+) <$> ((\x -> 0 < x && x < 3) ? 1 .: 2) <*> ((\x -> 1 < x && x < 4) ? 4 .: 8)) 4 `shouldBe` 10
describe ("Applicative Do: \n"
++"\t do a <- ((\\x -> 0 < x && x < 3) ? 1 .: 2)\n"
++"\t b <- ((\\x -> 1 < x && x < 4) ? 4 .: 8)\n"
++"\t return a + b\n"
++"\tshould be equivalent with above.\n") $ do
it "observation with context 1 should be 9 (= 1 + 8)." $
observe ap_do 1 `shouldBe` 9
it "observation with context 2 should be 5 (= 1 + 4)." $
observe ap_do 2 `shouldBe` 5
it "observation with context 3 should be 6 (= 2 + 4)." $
observe ap_do 3 `shouldBe` 6
it "observation with context 4 should be 10 (= 2 + 8)." $
observe ap_do 4 `shouldBe` 10
describe ("Bind: ((\\x -> 0 < x && x < 3) ? 1 .: 2) >>= (\\v -> ((\\x -> 1 < x && x < 4) ? (v+4) .: (v+8))\n"
++"\tshoule be equivalent with\n"
++"\t < (0 < x < 3) ? < (1 < x < 4) ? 1+4 : 1+8 >\n"
++"\t : < (1 < x < 4) ? 2+4 : 2+8 >>") $ do
it "observation with context 1 should be 9 (= 1 + 8)." $
observe (((\x -> 0 < x && x < 3) ? 1 .: 2) >>= \v -> ((\x -> 1 < x && x < 4) ? (v+4) .: (v+8))) 1 `shouldBe` 9
it "observation with context 2 should be 5 (= 1 + 4)." $
observe (((\x -> 0 < x && x < 3) ? 1 .: 2) >>= \v -> ((\x -> 1 < x && x < 4) ? (v+4) .: (v+8))) 2 `shouldBe` 5
it "observation with context 3 should be 6 (= 2 + 4)." $
observe (((\x -> 0 < x && x < 3) ? 1 .: 2) >>= \v -> ((\x -> 1 < x && x < 4) ? (v+4) .: (v+8))) 3 `shouldBe` 6
it "observation with context 4 should be 10 (= 2 + 8)." $
observe (((\x -> 0 < x && x < 3) ? 1 .: 2) >>= \v -> ((\x -> 1 < x && x < 4) ? (v+4) .: (v+8))) 4 `shouldBe` 10
describe ("Do Syntax:\n"
++"\tdo a <- ((\\x -> 0 < x && x < 3) ? 1 .: 2)\n"
++"\t b <- ((\\x -> 1 < x && x < 4) ? (a+4) .: (a+8))\n"
++"\t (\\y -> y < 3) ? (10*b) .: (100*b)\n"
++"\tshoule be equivalent with\n"
++"\t < (0 < x < 3) ? < (1 < x < 4) ? <(y < 3)? 10*(1+4) : 100*(1+4)>\n"
++"\t : <(y < 3)? 10*(2+8) : 100*(2+8)>>\n"
++"\t : < (1 < x < 4) ? <(y < 3)? 10*(2+4) : 100*(2+4)>\n"
++"\t : <(y < 3)? 10*(2+8) : 100*(2+8)>>") $ do
it "observation with context 1 should be 90 (= 10 * (1 + 8))." $
observe monad_do 1 `shouldBe` 90
it "observation with context 2 should be 50 (= 10 * (1 + 4))." $
observe monad_do 2 `shouldBe` 50
it "observation with context 3 should be 600 (= 100 * (2 + 4))." $
observe monad_do 3 `shouldBe` 600
it "observation with context 4 should be 1000 (= 100 * (2 + 8))." $
observe monad_do 4 `shouldBe` 1000
| everpeace/faceted-values | test/Data/FacetedSpec.hs | mit | 5,471 | 0 | 23 | 1,819 | 1,695 | 891 | 804 | 88 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | This is a Haskell port of the Hashids library by Ivan Akimov.
-- This is /not/ a cryptographic hashing algorithm. Hashids is typically
-- used to encode numbers to a format suitable for appearance in places
-- like urls.
--
-- See the official Hashids home page: <http://hashids.org>
--
-- Hashids is a small open-source library that generates short, unique,
-- non-sequential ids from numbers. It converts numbers like 347 into
-- strings like @yr8@, or a list of numbers like [27, 986] into @3kTMd@.
-- You can also decode those ids back. This is useful in bundling several
-- parameters into one or simply using them as short UIDs.
module Web.Hashids
( HashidsContext
-- * How to use
-- $howto
-- ** Encoding
-- $encoding
-- ** Decoding
-- $decoding
-- ** Randomness
-- $randomness
-- *** Repeating numbers
-- $repeating
-- *** Incrementing number sequence
-- $incrementing
-- ** Curses\! \#\$\%\@
-- $curses
-- * API
, version
-- ** Context object constructors
, createHashidsContext
, hashidsSimple
, hashidsMinimum
-- ** Encoding and decoding
, encodeHex
, decodeHex
, encode
, encodeList
, decode
-- ** Convenience wrappers
, encodeUsingSalt
, encodeListUsingSalt
, decodeUsingSalt
, encodeHexUsingSalt
, decodeHexUsingSalt
) where
import Data.ByteString ( ByteString )
import Data.Foldable ( toList )
import Data.List ( (\\), nub, intersect, foldl' )
import Data.List.Split ( chunksOf )
import Data.Sequence ( Seq )
import Numeric ( showHex, readHex )
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C8
import qualified Data.Sequence as Seq
-- $howto
--
-- Note that most of the examples on this page require the OverloadedStrings extension.
-- $encoding
--
-- Unless you require a minimum length for the generated hash, create a
-- context using 'hashidsSimple' and then call 'encode' and 'decode' with
-- this object.
--
-- > {-# LANGUAGE OverloadedStrings #-}
-- >
-- > import Web.Hashids
-- >
-- > main :: IO ()
-- > main = do
-- > let context = hashidsSimple "oldsaltyswedishseadog"
-- > print $ encode context 42
--
-- This program will output
--
-- > "kg"
--
-- To specify a minimum hash length, use 'hashidsMinimum' instead.
--
-- > main = do
-- > let context = hashidsMinimum "oldsaltyswedishseadog" 12
-- > print $ encode context 42
--
-- The output will now be
--
-- > "W3xbdkgdy42v"
--
-- If you only need the context once, you can use one of the provided wrappers
-- to simplify things.
--
-- > main :: IO ()
-- > main = print $ encodeUsingSalt "oldsaltyswedishseadog" 42
--
-- On the other hand, if your implementation invokes the hashing algorithm
-- frequently without changing the configuration, it is probably better to
-- define partially applied versions of 'encode', 'encodeList', and 'decode'.
--
-- > import Web.Hashids
-- >
-- > context :: HashidsContext
-- > context = createHashidsContext "oldsaltyswedishseadog" 12 "abcdefghijklmnopqrstuvwxyz"
-- >
-- > encode' = encode context
-- > encodeList' = encodeList context
-- > decode' = decode context
-- >
-- > main :: IO ()
-- > main = print $ encode' 12345
--
-- Use a custom alphabet and 'createHashidsContext' if you want to make your
-- hashes \"unique\".
--
-- > main = do
-- > let context = createHashidsContext "oldsaltyswedishseadog" 0 "XbrNfdylm5qtnP19R"
-- > print $ encode context 1
--
-- The output is now
--
-- > "Rd"
--
-- To encode a list of numbers, use `encodeList`.
--
-- > let context = hashidsSimple "this is my salt" in encodeList context [0, 1, 2]
--
-- > "yJUWHx"
-- $decoding
--
-- Decoding a hash returns a list of numbers,
--
-- > let context = hashidsSimple "this is my salt"
-- > hash = decode context "rD" -- == [5]
--
-- Decoding will not work if the salt is changed:
--
-- > main = do
-- > let context = hashidsSimple "this is my salt"
-- > hash = encode context 5
-- >
-- > print $ decodeUsingSalt "this is my pepper" hash
--
-- When decoding fails, the empty list is returned.
--
-- > []
--
-- $randomness
--
-- Hashids is based on a modified version of the Fisher-Yates shuffle. The
-- primary purpose is to obfuscate ids, and it is not meant for security
-- purposes or compression. Having said that, the algorithm does try to make
-- hashes unguessable and unpredictable. See the official Hashids home page
-- for details: <http://hashids.org>
-- $repeating
--
-- > let context = hashidsSimple "this is my salt" in encodeList context $ replicate 4 5
--
-- There are no repeating patterns in the hash to suggest that four identical
-- numbers are used:
--
-- > "1Wc8cwcE"
--
-- The same is true for increasing numbers:
--
-- > let context = hashidsSimple "this is my salt" in encodeList context [1..10]
--
-- > "kRHnurhptKcjIDTWC3sx"
-- $incrementing
--
-- > let context = hashidsSimple "this is my salt" in map (encode context) [1..5]
--
-- > ["NV","6m","yD","2l","rD"]
-- $curses
--
-- The algorithm tries to avoid generating common curse words in English by
-- never placing the following letters next to each other:
--
-- > c, C, s, S, f, F, h, H, u, U, i, I, t, T
{-# INLINE (|>) #-}
(|>) :: a -> (a -> b) -> b
(|>) a f = f a
{-# INLINE splitOn #-}
splitOn :: ByteString -> ByteString -> [ByteString]
splitOn = BS.splitWith . flip BS.elem
-- | Opaque data type with various internals required for encoding and decoding.
data HashidsContext = Context
{ guards :: !ByteString
, seps :: !ByteString
, salt :: !ByteString
, minHashLength :: !Int
, alphabet :: !ByteString }
-- | Hashids version number.
version :: String
version = "1.0.2"
-- | Create a context object using the given salt, a minimum hash length, and
-- a custom alphabet. If you only need to supply the salt, or the first two
-- arguments, use 'hashidsSimple' or 'hashidsMinimum' instead.
--
-- Changing the alphabet is useful if you want to make your hashes unique,
-- i.e., create hashes different from those generated by other applications
-- relying on the same algorithm.
createHashidsContext :: ByteString -- ^ Salt
-> Int -- ^ Minimum required hash length
-> String -- ^ Alphabet
-> HashidsContext
createHashidsContext salt minHashLen alphabet
| length uniqueAlphabet < minAlphabetLength
= error $ "alphabet must contain at least " ++ show minAlphabetLength ++ " unique characters"
| ' ' `elem` uniqueAlphabet
= error "alphabet cannot contain spaces"
| BS.null seps'' || fromIntegral (BS.length alphabet') / fromIntegral (BS.length seps'') > sepDiv
= case sepsLength - BS.length seps'' of
diff | diff > 0
-> res (BS.drop diff alphabet') (seps'' `BS.append` BS.take diff alphabet')
_ -> res alphabet' (BS.take sepsLength seps'')
| otherwise = res alphabet' seps''
where
res ab _seps =
let shuffled = consistentShuffle ab salt
guardCount = ceiling (fromIntegral (BS.length shuffled) / guardDiv)
context = Context
{ guards = BS.take guardCount _seps
, seps = BS.drop guardCount _seps
, salt = salt
, minHashLength = minHashLen
, alphabet = shuffled }
in if BS.length shuffled < 3
then context
else context{ guards = BS.take guardCount shuffled
, seps = _seps
, alphabet = BS.drop guardCount shuffled }
seps' = C8.pack $ uniqueAlphabet `intersect` seps
seps'' = consistentShuffle seps' salt
sepsLength =
case ceiling (fromIntegral (BS.length alphabet') / sepDiv) of
1 -> 2
n -> n
uniqueAlphabet = nub alphabet
alphabet' = C8.pack $ uniqueAlphabet \\ seps
minAlphabetLength = 16
sepDiv = 3.5
guardDiv = 12
seps = "cfhistuCFHISTU"
defaultAlphabet :: String
defaultAlphabet = ['a'..'z'] ++ ['A'..'Z'] ++ "1234567890"
-- | Create a context object using the default alphabet and the provided salt,
-- without any minimum required length.
hashidsSimple :: ByteString -- ^ Salt
-> HashidsContext
hashidsSimple salt = createHashidsContext salt 0 defaultAlphabet
-- | Create a context object using the default alphabet and the provided salt.
-- The generated hashes will have a minimum length as specified by the second
-- argument.
hashidsMinimum :: ByteString -- ^ Salt
-> Int -- ^ Minimum required hash length
-> HashidsContext
hashidsMinimum salt minimum = createHashidsContext salt minimum defaultAlphabet
-- | Decode a hash generated with 'encodeHex'.
--
-- /Example use:/
--
-- > decodeHex context "yzgwD"
--
decodeHex :: HashidsContext -- ^ A Hashids context object
-> ByteString -- ^ Hash
-> String
decodeHex context hash = concatMap (drop 1 . flip showHex "") numbers
where
numbers = decode context hash
-- | Encode a hexadecimal number.
--
-- /Example use:/
--
-- > encodeHex context "ff83"
--
encodeHex :: HashidsContext -- ^ A Hashids context object
-> String -- ^ Hexadecimal number represented as a string
-> ByteString
encodeHex context str
| not (all hexChar str) = ""
| otherwise = encodeList context $ map go $ chunksOf 12 str
where
go str = let [(a,_)] = readHex ('1':str) in a
hexChar c = c `elem` ("0123456789abcdefABCDEF" :: String)
-- | Decode a hash.
--
-- /Example use:/
--
-- > let context = hashidsSimple "this is my salt"
-- > hash = decode context "rD" -- == [5]
--
decode :: HashidsContext -- ^ A Hashids context object
-> ByteString -- ^ Hash
-> [Int]
decode ctx@Context{..} hash
| BS.null hash = []
| encodeList ctx res /= hash = []
| otherwise = res
where
res = splitOn seps tail
|> foldl' go ([], alphabet)
|> fst
|> reverse
hashArray = splitOn guards hash
alphabetLength = BS.length alphabet
Just str@(lottery, tail) =
BS.uncons $ hashArray !! case length hashArray of
0 -> error "Internal error."
2 -> 1
3 -> 1
_ -> 0
prefix = BS.cons lottery salt
go (xs, ab) ssh =
let buffer = prefix `BS.append` ab
ab' = consistentShuffle ab buffer
in (unhash ssh ab':xs, ab')
numbersHashInt :: [Int] -> Int
numbersHashInt xs = foldr ((+) . uncurry mod) 0 $ zip xs [100 .. ]
-- | Encode a single number.
--
-- /Example use:/
--
-- > let context = hashidsSimple "this is my salt"
-- > hash = encode context 5 -- == "rD"
--
encode :: HashidsContext -- ^ A Hashids context object
-> Int -- ^ Number to encode
-> ByteString
encode context n = encodeList context [n]
-- | Encode a list of numbers.
--
-- /Example use:/
--
-- > let context = hashidsSimple "this is my salt"
-- > hash = encodeList context [2, 3, 5, 7, 11] -- == "EOurh6cbTD"
--
encodeList :: HashidsContext -- ^ A Hashids context object
-> [Int] -- ^ List of numbers
-> ByteString
encodeList _ [] = error "encodeList: empty list"
encodeList Context{..} numbers =
res |> expand False |> BS.reverse
|> expand True |> BS.reverse
|> expand' alphabet'
where
(res, alphabet') = foldl' go (BS.singleton lottery, alphabet) (zip [0 .. ] numbers)
expand rep str
| BS.length str < minHashLength
= let ix = if rep then BS.length str - 3 else 0
jx = fromIntegral (BS.index str ix) + hashInt
in BS.index guards (jx `mod` guardsLength) `BS.cons` str
| otherwise = str
expand' ab str
| BS.length str < minHashLength
= let ab' = consistentShuffle ab ab
str' = BS.concat [BS.drop halfLength ab', str, BS.take halfLength ab']
in expand' ab' $ case BS.length str' - minHashLength of
n | n > 0
-> BS.take minHashLength $ BS.drop (div n 2) str'
_ -> str'
| otherwise = str
hashInt = numbersHashInt numbers
lottery = alphabet `BS.index` (hashInt `mod` alphabetLength)
prefix = BS.cons lottery salt
numLast = length numbers - 1
guardsLength = BS.length guards
alphabetLength = BS.length alphabet
halfLength = div alphabetLength 2
go (r, ab) (i, number)
| number < 0 = error "all numbers must be non-negative"
| otherwise =
let shuffled = consistentShuffle ab (BS.append prefix ab)
last = hash number shuffled
n = number `mod` (fromIntegral (BS.head last) + i) `mod` BS.length seps
suffix = if i < numLast
then BS.singleton (seps `BS.index` n)
else BS.empty
in (BS.concat [r,last,suffix], shuffled)
-- Exchange elements at positions i and j in a sequence.
exchange :: Int -> Int -> Seq a -> Seq a
exchange i j seq = i <--> j $ j <--> i $ seq
where
a <--> b = Seq.update a $ Seq.index seq b
consistentShuffle :: ByteString -> ByteString -> ByteString
consistentShuffle alphabet salt
| 0 == saltLength = alphabet
| otherwise = BS.pack $ toList x
where
(_,x) = zip3 [len, pred len .. 1] xs ys |> foldl' go (0, toSeq alphabet)
xs = cycle [0 .. saltLength - 1]
ys = map (fromIntegral . saltLookup) xs
saltLookup ix = BS.index salt (ix `mod` saltLength)
saltLength = BS.length salt
toSeq = BS.foldl' (Seq.|>) Seq.empty
len = BS.length alphabet - 1
go (p, ab) (i, v, ch) =
let shuffled = exchange i j ab
p' = p + ch
j = mod (ch + v + p') i
in (p', shuffled)
unhash :: ByteString -> ByteString -> Int
unhash input alphabet = fst $ BS.foldl' go (0, pred $ BS.length input) input
where
go (num, i) w8 =
let Just index = BS.elemIndex w8 alphabet
in (num + index * alphabetLength ^ i, pred i)
alphabetLength = BS.length alphabet
hash :: Int -> ByteString -> ByteString
hash input alphabet
| 0 == input = BS.take 1 alphabet
| otherwise = BS.reverse $ BS.unfoldr go input
where
len = BS.length alphabet
go 0 = Nothing
go i = Just (alphabet `BS.index` (i `mod` len), div i len)
-- | Encode a number using the provided salt.
--
-- This convenience function creates a context with the default alphabet.
-- If the same context is used repeatedly, use 'encode' with one of the
-- constructors instead.
encodeUsingSalt :: ByteString -- ^ Salt
-> Int -- ^ Number
-> ByteString
encodeUsingSalt = encode . hashidsSimple
-- | Encode a list of numbers using the provided salt.
--
-- This function wrapper creates a context with the default alphabet.
-- If the same context is used repeatedly, use 'encodeList' with one of the
-- constructors instead.
encodeListUsingSalt :: ByteString -- ^ Salt
-> [Int] -- ^ Numbers
-> ByteString
encodeListUsingSalt = encodeList . hashidsSimple
-- | Decode a hash using the provided salt.
--
-- This convenience function creates a context with the default alphabet.
-- If the same context is used repeatedly, use 'decode' with one of the
-- constructors instead.
decodeUsingSalt :: ByteString -- ^ Salt
-> ByteString -- ^ Hash
-> [Int]
decodeUsingSalt = decode . hashidsSimple
-- | Shortcut for 'encodeHex'.
encodeHexUsingSalt :: ByteString -- ^ Salt
-> String -- ^ Hexadecimal number represented as a string
-> ByteString
encodeHexUsingSalt = encodeHex . hashidsSimple
-- | Shortcut for 'decodeHex'.
decodeHexUsingSalt :: ByteString -- ^ Salt
-> ByteString -- ^ Hash
-> String
decodeHexUsingSalt = decodeHex . hashidsSimple
| tmcgilchrist/hashids-haskell | Web/Hashids.hs | mit | 16,507 | 0 | 19 | 4,720 | 2,984 | 1,671 | 1,313 | 237 | 4 |
module Graphics.UI.FLTK.LowLevel.FLTKHS
(
-- * Motivation
--
-- $Motivation
-- * Goals
--
-- $Goals
-- * Look And Feel
--
-- $LookAndFeel
-- * Obstacles
--
-- $Obstacles
-- * Installation #Installation#
--
-- $InstallationSummary
-- ** Build With Bundled FLTK #BundledBuild#
-- *** Linux & *BSD
--
-- $InstallationLinuxBundled
-- *** Mac (Yosemite, El Capitan, Sierra)
--
-- $InstallationMacBundled
-- *** Windows(7,8,10)(64-bit)
--
-- $InstallationWindowsBundled
-- ** Compile FLTK Yourself #SelfCompilation#
-- *** Linux & *BSD
--
-- $InstallationLinux
-- *** Mac (Yosemite & El Capitan)
--
-- $InstallationMac
-- *** Windows(7,8,10)(64-bit)
--
-- $InstallationWindows10
-- * Demos
--
-- $Demos
-- * Getting Started
--
-- $GettingStarted
-- * Fluid Support #FluidSupport#
--
-- $FluidSupport
-- * Stack Traces
--
-- $StackTrace
-- * API Guide
--
-- ** Guide to the Haddock Docs
--
-- $guidetothehaddockdocs
-- ** Widget Construction
--
-- $widgetconstruction
-- ** Widget Methods
--
-- $widgetmethods
-- ** Widget Hierachy
--
-- $widgethierarchyguide
-- ** Overriding C++ Methods (Creating Custom Widgets)
--
-- $overriding
-- ** Explicitly Calling Base Class Methods
--
-- $explicitbaseclasscalling
-- ** Overriding the Widget Destructor
--
-- $destructors
-- * Slow Compilation Issues
--
-- $Compilation
-- * Running in the REPL #RunningInTheREPL#
--
-- $REPL
-- * Core Types
module Graphics.UI.FLTK.LowLevel.Fl_Types,
-- * Widgets
module Graphics.UI.FLTK.LowLevel.Base.Adjuster,
module Graphics.UI.FLTK.LowLevel.Adjuster,
module Graphics.UI.FLTK.LowLevel.Ask,
module Graphics.UI.FLTK.LowLevel.BMPImage,
module Graphics.UI.FLTK.LowLevel.Bitmap,
module Graphics.UI.FLTK.LowLevel.Box,
module Graphics.UI.FLTK.LowLevel.Base.Browser,
module Graphics.UI.FLTK.LowLevel.Browser,
module Graphics.UI.FLTK.LowLevel.Base.Button,
module Graphics.UI.FLTK.LowLevel.Button,
module Graphics.UI.FLTK.LowLevel.Base.CheckButton,
module Graphics.UI.FLTK.LowLevel.CheckButton,
module Graphics.UI.FLTK.LowLevel.Base.Choice,
module Graphics.UI.FLTK.LowLevel.Choice,
module Graphics.UI.FLTK.LowLevel.Base.Clock,
module Graphics.UI.FLTK.LowLevel.Clock,
module Graphics.UI.FLTK.LowLevel.Base.ColorChooser,
module Graphics.UI.FLTK.LowLevel.ColorChooser,
module Graphics.UI.FLTK.LowLevel.CopySurface,
module Graphics.UI.FLTK.LowLevel.Base.Counter,
module Graphics.UI.FLTK.LowLevel.Counter,
module Graphics.UI.FLTK.LowLevel.Base.Dial,
module Graphics.UI.FLTK.LowLevel.Dial,
module Graphics.UI.FLTK.LowLevel.DoubleWindow,
module Graphics.UI.FLTK.LowLevel.Base.DoubleWindow,
module Graphics.UI.FLTK.LowLevel.Draw,
module Graphics.UI.FLTK.LowLevel.Base.FileBrowser,
module Graphics.UI.FLTK.LowLevel.FileBrowser,
module Graphics.UI.FLTK.LowLevel.Base.FileInput,
module Graphics.UI.FLTK.LowLevel.FileInput,
module Graphics.UI.FLTK.LowLevel.FillDial,
module Graphics.UI.FLTK.LowLevel.Base.FillSlider,
module Graphics.UI.FLTK.LowLevel.FillSlider,
module Graphics.UI.FLTK.LowLevel.GIFImage,
module Graphics.UI.FLTK.LowLevel.Base.Group,
module Graphics.UI.FLTK.LowLevel.Group,
module Graphics.UI.FLTK.LowLevel.Base.HorFillSlider,
module Graphics.UI.FLTK.LowLevel.HorFillSlider,
module Graphics.UI.FLTK.LowLevel.Base.HorNiceSlider,
module Graphics.UI.FLTK.LowLevel.HorNiceSlider,
module Graphics.UI.FLTK.LowLevel.Base.HorSlider,
module Graphics.UI.FLTK.LowLevel.HorSlider,
module Graphics.UI.FLTK.LowLevel.HorValueSlider,
module Graphics.UI.FLTK.LowLevel.Image,
module Graphics.UI.FLTK.LowLevel.ImageSurface,
module Graphics.UI.FLTK.LowLevel.Base.Input,
module Graphics.UI.FLTK.LowLevel.Input,
module Graphics.UI.FLTK.LowLevel.JPEGImage,
module Graphics.UI.FLTK.LowLevel.Base.LightButton,
module Graphics.UI.FLTK.LowLevel.LightButton,
module Graphics.UI.FLTK.LowLevel.LineDial,
module Graphics.UI.FLTK.LowLevel.Base.MenuBar,
module Graphics.UI.FLTK.LowLevel.MenuBar,
module Graphics.UI.FLTK.LowLevel.Base.MenuButton,
module Graphics.UI.FLTK.LowLevel.MenuButton,
module Graphics.UI.FLTK.LowLevel.Base.MenuItem,
module Graphics.UI.FLTK.LowLevel.MenuItem,
module Graphics.UI.FLTK.LowLevel.Base.MenuPrim,
module Graphics.UI.FLTK.LowLevel.MenuPrim,
module Graphics.UI.FLTK.LowLevel.MultiLabel,
module Graphics.UI.FLTK.LowLevel.NativeFileChooser,
module Graphics.UI.FLTK.LowLevel.Base.NiceSlider,
module Graphics.UI.FLTK.LowLevel.NiceSlider,
module Graphics.UI.FLTK.LowLevel.Base.Output,
module Graphics.UI.FLTK.LowLevel.Output,
module Graphics.UI.FLTK.LowLevel.OverlayWindow,
module Graphics.UI.FLTK.LowLevel.Base.OverlayWindow,
module Graphics.UI.FLTK.LowLevel.PNGImage,
module Graphics.UI.FLTK.LowLevel.PNMImage,
module Graphics.UI.FLTK.LowLevel.Base.Pack,
module Graphics.UI.FLTK.LowLevel.Pack,
module Graphics.UI.FLTK.LowLevel.Pixmap,
module Graphics.UI.FLTK.LowLevel.Base.Positioner,
module Graphics.UI.FLTK.LowLevel.Positioner,
module Graphics.UI.FLTK.LowLevel.Base.Progress,
module Graphics.UI.FLTK.LowLevel.Progress,
module Graphics.UI.FLTK.LowLevel.RGBImage,
module Graphics.UI.FLTK.LowLevel.Base.RadioLightButton,
module Graphics.UI.FLTK.LowLevel.RadioLightButton,
module Graphics.UI.FLTK.LowLevel.Base.RepeatButton,
module Graphics.UI.FLTK.LowLevel.RepeatButton,
module Graphics.UI.FLTK.LowLevel.Base.ReturnButton,
module Graphics.UI.FLTK.LowLevel.ReturnButton,
module Graphics.UI.FLTK.LowLevel.Base.Roller,
module Graphics.UI.FLTK.LowLevel.Roller,
module Graphics.UI.FLTK.LowLevel.Base.RoundButton,
module Graphics.UI.FLTK.LowLevel.RoundButton,
module Graphics.UI.FLTK.LowLevel.SVGImage,
module Graphics.UI.FLTK.LowLevel.Base.Scrollbar,
module Graphics.UI.FLTK.LowLevel.Scrollbar,
module Graphics.UI.FLTK.LowLevel.Base.Scrolled,
module Graphics.UI.FLTK.LowLevel.Scrolled,
module Graphics.UI.FLTK.LowLevel.SelectBrowser,
module Graphics.UI.FLTK.LowLevel.Base.SimpleTerminal,
module Graphics.UI.FLTK.LowLevel.SimpleTerminal,
module Graphics.UI.FLTK.LowLevel.Base.SingleWindow,
module Graphics.UI.FLTK.LowLevel.SingleWindow,
module Graphics.UI.FLTK.LowLevel.Base.Slider,
module Graphics.UI.FLTK.LowLevel.Slider,
module Graphics.UI.FLTK.LowLevel.Base.Spinner,
module Graphics.UI.FLTK.LowLevel.Spinner,
module Graphics.UI.FLTK.LowLevel.Base.SysMenuBar,
module Graphics.UI.FLTK.LowLevel.SysMenuBar,
module Graphics.UI.FLTK.LowLevel.Base.Table,
module Graphics.UI.FLTK.LowLevel.Table,
module Graphics.UI.FLTK.LowLevel.Base.TableRow,
module Graphics.UI.FLTK.LowLevel.TableRow,
module Graphics.UI.FLTK.LowLevel.Base.Tabs,
module Graphics.UI.FLTK.LowLevel.Tabs,
module Graphics.UI.FLTK.LowLevel.TextBuffer,
module Graphics.UI.FLTK.LowLevel.Base.TextDisplay,
module Graphics.UI.FLTK.LowLevel.TextDisplay,
module Graphics.UI.FLTK.LowLevel.Base.TextEditor,
module Graphics.UI.FLTK.LowLevel.TextEditor,
module Graphics.UI.FLTK.LowLevel.TextSelection,
module Graphics.UI.FLTK.LowLevel.Base.Tile,
module Graphics.UI.FLTK.LowLevel.Tile,
module Graphics.UI.FLTK.LowLevel.Base.ToggleButton,
module Graphics.UI.FLTK.LowLevel.ToggleButton,
module Graphics.UI.FLTK.LowLevel.Tooltip,
module Graphics.UI.FLTK.LowLevel.Base.Tree,
module Graphics.UI.FLTK.LowLevel.Tree,
module Graphics.UI.FLTK.LowLevel.TreeItem,
module Graphics.UI.FLTK.LowLevel.TreePrefs,
module Graphics.UI.FLTK.LowLevel.Base.Valuator,
module Graphics.UI.FLTK.LowLevel.Valuator,
module Graphics.UI.FLTK.LowLevel.Base.ValueInput,
module Graphics.UI.FLTK.LowLevel.ValueInput,
module Graphics.UI.FLTK.LowLevel.Base.ValueOutput,
module Graphics.UI.FLTK.LowLevel.ValueOutput,
module Graphics.UI.FLTK.LowLevel.Base.ValueSlider,
module Graphics.UI.FLTK.LowLevel.ValueSlider,
module Graphics.UI.FLTK.LowLevel.Widget,
module Graphics.UI.FLTK.LowLevel.Base.Widget,
module Graphics.UI.FLTK.LowLevel.Base.Window,
module Graphics.UI.FLTK.LowLevel.Window,
module Graphics.UI.FLTK.LowLevel.Base.Wizard,
module Graphics.UI.FLTK.LowLevel.Wizard,
module Graphics.UI.FLTK.LowLevel.XBMImage,
module Graphics.UI.FLTK.LowLevel.XPMImage,
-- * Machinery for static dispatch
module Graphics.UI.FLTK.LowLevel.Dispatch,
-- * Association of widgets and functions
module Graphics.UI.FLTK.LowLevel.Hierarchy
)
where
import Graphics.UI.FLTK.LowLevel.Base.Adjuster
import Graphics.UI.FLTK.LowLevel.Adjuster()
import Graphics.UI.FLTK.LowLevel.Ask
import Graphics.UI.FLTK.LowLevel.BMPImage
import Graphics.UI.FLTK.LowLevel.Bitmap
import Graphics.UI.FLTK.LowLevel.Box
import Graphics.UI.FLTK.LowLevel.Base.Browser
import Graphics.UI.FLTK.LowLevel.Browser()
import Graphics.UI.FLTK.LowLevel.Base.Button
import Graphics.UI.FLTK.LowLevel.Button()
import Graphics.UI.FLTK.LowLevel.Base.CheckButton
import Graphics.UI.FLTK.LowLevel.CheckButton()
import Graphics.UI.FLTK.LowLevel.Base.Choice
import Graphics.UI.FLTK.LowLevel.Choice()
import Graphics.UI.FLTK.LowLevel.Base.Clock
import Graphics.UI.FLTK.LowLevel.Clock()
import Graphics.UI.FLTK.LowLevel.Base.ColorChooser
import Graphics.UI.FLTK.LowLevel.ColorChooser()
import Graphics.UI.FLTK.LowLevel.CopySurface
import Graphics.UI.FLTK.LowLevel.Base.Counter
import Graphics.UI.FLTK.LowLevel.Counter()
import Graphics.UI.FLTK.LowLevel.Base.Dial
import Graphics.UI.FLTK.LowLevel.Dial()
import Graphics.UI.FLTK.LowLevel.Dispatch
import Graphics.UI.FLTK.LowLevel.DoubleWindow()
import Graphics.UI.FLTK.LowLevel.Base.DoubleWindow
import Graphics.UI.FLTK.LowLevel.Draw
import Graphics.UI.FLTK.LowLevel.Base.FileBrowser
import Graphics.UI.FLTK.LowLevel.FileBrowser()
import Graphics.UI.FLTK.LowLevel.Base.FileInput
import Graphics.UI.FLTK.LowLevel.FileInput()
import Graphics.UI.FLTK.LowLevel.FillDial
import Graphics.UI.FLTK.LowLevel.Base.FillSlider
import Graphics.UI.FLTK.LowLevel.FillSlider()
import Graphics.UI.FLTK.LowLevel.Fl_Types
import Graphics.UI.FLTK.LowLevel.GIFImage
import Graphics.UI.FLTK.LowLevel.Base.Group
import Graphics.UI.FLTK.LowLevel.Group()
import Graphics.UI.FLTK.LowLevel.Hierarchy
import Graphics.UI.FLTK.LowLevel.Base.HorFillSlider
import Graphics.UI.FLTK.LowLevel.HorFillSlider()
import Graphics.UI.FLTK.LowLevel.Base.HorNiceSlider
import Graphics.UI.FLTK.LowLevel.HorNiceSlider()
import Graphics.UI.FLTK.LowLevel.Base.HorSlider
import Graphics.UI.FLTK.LowLevel.HorSlider()
import Graphics.UI.FLTK.LowLevel.HorValueSlider
import Graphics.UI.FLTK.LowLevel.Image
import Graphics.UI.FLTK.LowLevel.ImageSurface
import Graphics.UI.FLTK.LowLevel.Base.Input
import Graphics.UI.FLTK.LowLevel.Input()
import Graphics.UI.FLTK.LowLevel.JPEGImage
import Graphics.UI.FLTK.LowLevel.Base.LightButton
import Graphics.UI.FLTK.LowLevel.LightButton()
import Graphics.UI.FLTK.LowLevel.LineDial
import Graphics.UI.FLTK.LowLevel.Base.MenuBar
import Graphics.UI.FLTK.LowLevel.MenuBar()
import Graphics.UI.FLTK.LowLevel.Base.MenuButton
import Graphics.UI.FLTK.LowLevel.MenuButton()
import Graphics.UI.FLTK.LowLevel.Base.MenuItem
import Graphics.UI.FLTK.LowLevel.MenuItem()
import Graphics.UI.FLTK.LowLevel.Base.MenuPrim
import Graphics.UI.FLTK.LowLevel.MenuPrim()
import Graphics.UI.FLTK.LowLevel.MultiLabel
import Graphics.UI.FLTK.LowLevel.NativeFileChooser
import Graphics.UI.FLTK.LowLevel.Base.NiceSlider
import Graphics.UI.FLTK.LowLevel.NiceSlider()
import Graphics.UI.FLTK.LowLevel.Base.Output
import Graphics.UI.FLTK.LowLevel.Output()
import Graphics.UI.FLTK.LowLevel.Base.OverlayWindow
import Graphics.UI.FLTK.LowLevel.OverlayWindow()
import Graphics.UI.FLTK.LowLevel.PNGImage
import Graphics.UI.FLTK.LowLevel.PNMImage
import Graphics.UI.FLTK.LowLevel.Base.Pack
import Graphics.UI.FLTK.LowLevel.Pack()
import Graphics.UI.FLTK.LowLevel.Pixmap
import Graphics.UI.FLTK.LowLevel.Base.Positioner
import Graphics.UI.FLTK.LowLevel.Positioner()
import Graphics.UI.FLTK.LowLevel.Base.Progress
import Graphics.UI.FLTK.LowLevel.Progress()
import Graphics.UI.FLTK.LowLevel.RGBImage
import Graphics.UI.FLTK.LowLevel.Base.RadioLightButton
import Graphics.UI.FLTK.LowLevel.RadioLightButton()
import Graphics.UI.FLTK.LowLevel.Base.RepeatButton
import Graphics.UI.FLTK.LowLevel.RepeatButton()
import Graphics.UI.FLTK.LowLevel.Base.ReturnButton
import Graphics.UI.FLTK.LowLevel.ReturnButton()
import Graphics.UI.FLTK.LowLevel.Base.Roller
import Graphics.UI.FLTK.LowLevel.Roller()
import Graphics.UI.FLTK.LowLevel.Base.RoundButton
import Graphics.UI.FLTK.LowLevel.RoundButton()
import Graphics.UI.FLTK.LowLevel.SVGImage
import Graphics.UI.FLTK.LowLevel.Base.Scrollbar
import Graphics.UI.FLTK.LowLevel.Scrollbar()
import Graphics.UI.FLTK.LowLevel.Base.Scrolled
import Graphics.UI.FLTK.LowLevel.Scrolled()
import Graphics.UI.FLTK.LowLevel.SelectBrowser
import Graphics.UI.FLTK.LowLevel.Base.SimpleTerminal
import Graphics.UI.FLTK.LowLevel.SimpleTerminal()
import Graphics.UI.FLTK.LowLevel.Base.SingleWindow
import Graphics.UI.FLTK.LowLevel.SingleWindow()
import Graphics.UI.FLTK.LowLevel.Base.Slider
import Graphics.UI.FLTK.LowLevel.Slider()
import Graphics.UI.FLTK.LowLevel.Base.Spinner
import Graphics.UI.FLTK.LowLevel.Spinner()
import Graphics.UI.FLTK.LowLevel.Base.SysMenuBar
import Graphics.UI.FLTK.LowLevel.SysMenuBar()
import Graphics.UI.FLTK.LowLevel.Base.Table
import Graphics.UI.FLTK.LowLevel.Table()
import Graphics.UI.FLTK.LowLevel.Base.TableRow
import Graphics.UI.FLTK.LowLevel.TableRow()
import Graphics.UI.FLTK.LowLevel.Base.Tabs
import Graphics.UI.FLTK.LowLevel.Tabs()
import Graphics.UI.FLTK.LowLevel.TextBuffer
import Graphics.UI.FLTK.LowLevel.Base.TextDisplay
import Graphics.UI.FLTK.LowLevel.TextDisplay()
import Graphics.UI.FLTK.LowLevel.Base.TextEditor
import Graphics.UI.FLTK.LowLevel.TextEditor()
import Graphics.UI.FLTK.LowLevel.TextSelection()
import Graphics.UI.FLTK.LowLevel.Base.Tile
import Graphics.UI.FLTK.LowLevel.Tile()
import Graphics.UI.FLTK.LowLevel.Base.ToggleButton
import Graphics.UI.FLTK.LowLevel.ToggleButton()
import Graphics.UI.FLTK.LowLevel.Tooltip
import Graphics.UI.FLTK.LowLevel.Base.Tree
import Graphics.UI.FLTK.LowLevel.Tree()
import Graphics.UI.FLTK.LowLevel.TreeItem
import Graphics.UI.FLTK.LowLevel.TreePrefs
import Graphics.UI.FLTK.LowLevel.Base.Valuator
import Graphics.UI.FLTK.LowLevel.Valuator()
import Graphics.UI.FLTK.LowLevel.Base.ValueInput
import Graphics.UI.FLTK.LowLevel.ValueInput()
import Graphics.UI.FLTK.LowLevel.Base.ValueOutput
import Graphics.UI.FLTK.LowLevel.ValueOutput()
import Graphics.UI.FLTK.LowLevel.Base.ValueSlider
import Graphics.UI.FLTK.LowLevel.ValueSlider()
import Graphics.UI.FLTK.LowLevel.Base.Widget
import Graphics.UI.FLTK.LowLevel.Widget()
import Graphics.UI.FLTK.LowLevel.Base.Window
import Graphics.UI.FLTK.LowLevel.Window()
import Graphics.UI.FLTK.LowLevel.Base.Wizard
import Graphics.UI.FLTK.LowLevel.Wizard()
import Graphics.UI.FLTK.LowLevel.XBMImage
import Graphics.UI.FLTK.LowLevel.XPMImage
-- $Module Documentation
-- This module re-exports all the available widgets and
-- their core types. The types and list of widgets is listed under the __Core
-- Types__ and __Widgets__ section below.
--
-- A general introduction to the library follows.
--
-- $Motivation
-- This library aims to make it easy for users to build native apps that work portably across platforms.
--
-- I'm also very interested in the user interface renaissance in the programming community,
-- whether the various kinds of functional reactive programming, meta-object protocol UIs,
-- or something like React.js.
--
-- The hope is that a low-cost, hassle-free way of getting a UI up and running
-- without having to deal with browser, authentication, and compilation issues
-- will make it more fun to play around with these great ideas using Haskell.
--
-- == Why a native toolkit?
-- Even in this era of web interfaces, it is still
-- useful to be able to make native apps. They are usually faster and have fewer
-- security issues.
--
-- == Why FLTK?
-- - I chose FLTK because it was small enough that one person could bind the whole thing in an initial
-- pass. Larger toolkits like QT, although much slicker, would require many man-years of effort.
-- - FLTK is quite featureful.
-- - FLTK is mature and maintained. The project is about 20 years old, and I have had good experiences with the community.
-- - FLTK comes with a simple but quite useful GUI builder, <https://en.wikipedia.org/wiki/FLUID Fluid> which is now able to
-- generate Haskell code. See the `Fluid Support` section for more details.
--
-- == What about HsQML\/WxHaskell/Gtk2Hs?
-- These are all great projects and produce really nice UIs, but they all fail
-- at least one of criterion listed under the __Goals__ section below.
--
-- To my knowledge, as of the first quarter of 2019, no other package
-- in the Haskell ecosystem meets all those constraints.
--
-- $Goals
-- The goals of this library are to provide a low-level API to the <http://fltk.org FLTK> that:
--
-- (1) provides full coverage of the toolkit allowing the user to write GUIs in pure Haskell.
-- (2) feels like it has polymorphic dispatch, meaning a single function dispatches to the right implementation based on the type of widget it is given.
-- (3) is /not/ monolithic, meaning new widgets can be incorporated the user's application without needing to recompile this library.
-- (4) is easy to install. This library has a minimum of dependencies and <http://fltk.org FLTK> itself compiles cleanly on most architectures.
-- And now there is a <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-FLTKHS.html#g:4 bundled option> where Cabal/Stack build FLTK for you behind the scenes.
-- (5) allows the user to produce statically linked binaries with a minimum of external dependencies.
-- (6) includes a lot of <https://github.com/deech/fltkhs-demos complete> <https://github.com/deech/fltkhs-fluid-demos working> demos so that you can get up and running faster.
-- (7) comes with <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-FLTKHS.html#g:15 GUI builder support> to alleviate the tedium of laying out widgets by hand.
--
-- $FluidSupport
--
-- This package also comes with a utility (fltkhs-fluidtohs) that takes a user
-- interface generated using the <https://en.wikipedia.org/wiki/FLUID Fluid GUI builder>
-- that ships with FLTK and generates Haskell code.
--
-- Now the user can drag and drop widgets into place instead of having to
-- calculate coordinates and sizes by hand. Additionally, arbitrary Haskell code
-- can be inserted into Fluid interfaces, allowing the user to do most of the callback
-- wiring directly from Fluid.
--
-- The quickest way to get started is to download the
-- <https://github.com/deech/fltkhs-fluid-hello-world Fluid/Haskell project template>.
-- The @Setup.hs@ that comes with the skeleton is configured to use
-- the 'fltkhs-fluidtohs' utility to automatically convert any Fluid in 'src'
-- directory into a Haskell module of the same name during the preprocess step.
-- This means using Fluid in a FLTKHS project is as simple as creating a Fluid
-- interface and running 'stack build --flag fltkhs:bundled' or 'stack install --flag fltkhs:bundled'.
--
-- Additionally, the <https://github.com/deech/fltkhs-fluid-demos fltkhs-fluid-demos> package
-- comes with a number of demos that show how Fluid integrates with FLTKS.
--
-- $LookAndFeel
-- Now FLTKHS has a [themes
-- package](https://hackage.haskell.org/package/fltkhs-themes/docs/Graphics-UI-FLTK-Theme-Light.html)
-- which considerably improves look and feel. The documentation for this package
-- still applies because the theme mostly just re-draws widgets to look a little
-- nicer so the fundamentals of the API are not touched.
-- $Obstacles
-- This section attempts to briefly highlight some possible dealbreakers users
-- might want to know about before proceeding. To be clear, building and deploying
-- portable static application binaries works well on all platforms which is why the
-- library is considered usable. And most of these issues are being aggressively
-- addressed but in the interests of full disclosure ...
--
-- == Compile Times
-- Currently a dense app with ~ 160-180 widgets crammed into the same window takes
-- 9-12 seconds to compile with GHC 7.10.3 on a 32GB quad-core machine.
-- The good news is that this is a <https://ghc.haskell.org/trac/ghc/ticket/12506 known issue>.
--
-- $StackTrace
--
-- In a traditional callback-heavy API such as FLTKHS, null pointers happen, which
-- is why FLTKHS supports partial stack traces. All FLTK functions throw an
-- error along with a stack trace when given a null 'Ref'.
--
-- For pre-7.10 GHCs, stack traces will only be shown if the
-- <https://wiki.haskell.org/Debugging#General_usage 'xc'> flag is used when
-- compiling FLTKHS.
--
-- If compiled with GHC > 7.10, a partial stack trace is transparently available
-- to the user. The recently minted
-- <https://hackage.haskell.org/package/base-4.8.1.0/docs/GHC-Stack.html#g:3 'CallStack'>
-- implicit parameter is used to get a trace of the function that
-- made the offending call along with a file name and line number. For
-- example, in the following code:
--
-- @
-- buttonCb :: Ref Button -> IO ()
-- buttonCb b' = do
-- FL.deleteWidget b'
-- l' <- getLabel b'
-- ...
--
-- main :: IO ()
-- main = do
-- window <- windowNew ...
-- begin window
-- b' <- buttonNew ...
-- setCallback b' buttonCb
-- ...
-- @
--
-- a button is placed inside a window in the main method, but the first time it is clicked, the callback will delete it and then try
-- to extract the label from the null 'Ref'.
-- The resulting stack trace will look something like:
--
-- @
-- Ref does not exist. ?loc, called at src\/Graphics\/UI\/FLTK\/LowLevel\/Fl_Types.chs:395:58 in fltkh_Cx8029B5VOwKjdT0OwMERC:Graphics.UI.FLTK.LowLevel.Fl_Types
-- toRefPtr, called at src\/Graphics\/UI\/FLTK\/LowLevel\/Fl_Types.chs:403:22 in fltkh_Cx8029B5VOwKjdT0OwMERC:Graphics.UI.FLTK.LowLevel.Fl_Types
-- withRef, called at src\/Graphics\/UI\/FLTK\/LowLevel\/Hierarchy.hs:1652:166 in fltkh_Cx8029B5VOwKjdT0OwMERC:Graphics.UI.FLTK.LowLevel.Hierarchy
-- getLabel, called at src\/Main.hs:11:10 in main:Main
-- @
--
-- It says that the null pointer was originally detected in the library function 'toRefPtr' function, which was called by the library function 'withRef', which
-- was called by 'getLabel' on line 11 of 'src/Main.hs'. Notice, however, that the trace stops there. It does not tell you 'getLabel' was invoked from 'buttonCb'.
-- For a more detailed trace, the 'CallStack' implicit parameter needs to be passed to each function in the chain like:
--
-- @
-- buttonCb :: (?loc :: CallStack) => Ref Button ...
-- ...
-- main :: IO ()
-- ...
-- @
--
-- $InstallationSummary
-- There are two ways to install FLTKHS, building with the bundled build
-- ("Graphics.UI.FLTK.LowLevel.FLTKHS#BundledBuild"), or compiling and
-- installing FLTK from scratch yourself
-- ("Graphics.UI.FLTK.LowLevel.FLTKHS#SelfCompilation"). The bundled way is by
-- far the easiest on all platforms. It is completely self-contained, you don't
-- need any sudo access to your system.
--
-- For now FLTKHS tracks the [1.4 version Github repo](https://github.com/fltk/fltk) instead
-- of the stable releases. The reason is that it's been quite a while the FLTK
-- project cut an official release but the development branch is actually quite
-- stable and has acquired a lot of useful features including HiDPI and SVG
-- support which are exposed via these bindings.
--
-- NOTE: Since we are temporarily using stable releases please don't install FLTK with your package manager.
--
-- $InstallationLinuxBundled
-- The steps are:
--
-- - Make sure to have OpenGL installed if you need it.
-- - Ensure that 'make', 'autoconf' and 'autoheader' are available on your system.
-- - Download & install <http://docs.haskellstack.org/en/stable/README/#how-to-install Stack>.
-- - Download & install the <https://github.com/deech/fltkhs-hello-world/archive/master.tar.gz FLTKHS hello world skeleton>.
-- - Verify the install by running `fltkhs-hello-world`.
--
-- == Download & Install Stack
-- Pick the <http://docs.haskellstack.org/en/stable/README/#how-to-install Stack installer> that matches your distribution and install according to the instructions.
--
-- == Download & Install the FLTKHS Hello World Skeleton
-- === Downloading Without Git
-- If 'git' is not installed, download the latest version of the fltkhs-hello-world application skeleton from <https://github.com/deech/fltkhs-hello-world/archive/master.tar.gz here>.
--
--
-- Extract and rename the archive:
--
-- @
-- > tar -zxvf fltkhs-hello-world-master.tar.gz
-- > mv fltkhs-hello-world-master fltkhs-hello-world
-- @
--
-- === Downloading With Git
-- If 'git' is available:
--
-- @
-- > git clone http://github.com/deech/fltkhs-hello-world
-- @
--
-- === Building
-- Build it with Stack:
--
-- @
-- > cd fltkhs-hello-world
-- > stack setup
-- > stack install --flag fltkhs:bundled
-- or if you need OpenGL support
-- > stack install --flag fltkhs:bundled --flag fltkhs:opengl
-- @
--
-- == Verify The Install
-- Test that the build completed successfully by invoking:
--
-- @
-- > stack exec fltkhs-hello-world
-- @
--
-- You will be greeted by an incredibly boring little window with a button that says "Hello world".
-- If you click it, it will change to "Goodbye world".
--
--
-- $InstallationLinux
-- The steps are:
--
-- - Make sure you have OpenGL installed.
-- - Download & install <http://docs.haskellstack.org/en/stable/README/#how-to-install Stack>.
-- - Download & install <https://github.com/fltk/fltk/archive/master.tar.gz FLTK 1.4>.
-- - Download & install the <https://github.com/deech/fltkhs-hello-world/archive/master.tar.gz FLTKHS hello world skeleton>.
-- - Verify the install by running `fltkhs-hello-world`.
--
-- == Download & Install Stack
-- Pick the <http://docs.haskellstack.org/en/stable/README/#how-to-install Stack installer> that matches your distribution and install according the instructions.
--
-- == Download & Install FLTK-1.4
-- Please make sure to only download version <https://github.com/fltk/fltk/archive/master.tar.gz FLTK 1.4>.
-- It should build and install smoothly with the standard:
--
-- @
-- > ./configure --enable-shared --enable-localjpeg --enable-localzlib --enable-localpng --enable-xft
-- or if you need OpenGL support
-- > ./configure --enable-gl --enable-shared --enable-localjpeg --enable-localzlib --enable-localpng --enable-xft
-- > make
-- > sudo make install
-- @
--
--
-- If you didn't install FLTK from source, you can use the 'fltk-config' tool to ensure that version 1.4 is installed:
--
-- @
-- > fltk-config --version
-- 1.4
-- @
--
-- The FLTK headers should be in the include path, along with
-- the standard FLTK libraries, `fltk_images`, and `fltk_gl`. You will also need
-- the `make`, `autoconf`, and `autoheader` tools to build the Haskell bindings.
--
--
-- The reason we install from source is that some package managers seem to be
-- behind on versions (as of this writing Ubuntu 14.04 is still on 1.3.2) and
-- others put the headers and libraries in nonstandard locations, which will
-- cause the Haskell bindings to throw compilation errors.
--
--
-- == Download & Install the FLTKHS Hello World Skeleton
-- === Downloading Without Git
-- If 'git' is not installed, download the latest version of the `fltkhs-hello-world` application skeleton from <https://github.com/deech/fltkhs-hello-world/archive/master.tar.gz here>.
--
-- Extract and enter the archive:
--
-- @
-- > tar -zxvf fltkhs-hello-world-master.tar.gz
-- > mv fltkhs-hello-world-master fltkhs-hello-world
-- @
--
-- === Downloading With Git
-- If 'git' is available:
--
-- @
-- > git clone http://github.com/deech/fltkhs-hello-world
-- @
--
-- === Building
-- Build it with Stack:
--
-- @
-- > cd fltkhs-hello-world
-- > stack setup
-- > stack install
-- or if you need OpenGL support
-- > stack install --flag fltkhs:opengl
-- @
--
-- __Note:__ If the `install` step produces a flood of `undefined reference` errors,
-- please ensure that you have the right version of FLTK (1.4) installed and
-- that the headers are in the expected locations. Some package
-- managers put the libraries and headers in nonstandard places, so it
-- is best to build from source.
--
-- == Verify The Install
-- Test that the build completed successfully by invoking:
--
-- @
-- > stack exec fltkhs-hello-world
-- @
--
-- You will be greeted by an incredibly boring little window with a button that says "Hello world".
-- If you click it, it will change to "Goodbye world."
-- $InstallationMacBundled
-- Mac versions older than El Capitan and Yosemite are not supported.
--
-- The general steps are:
--
-- - Brew Install Stack.
-- - Download & install the <https://github.com/deech/fltkhs-hello-world/archive/master.tar.gz FLTKHS hello world skeleton>.
-- - Verify the install by running `fltkhs-hello-world`.
--
-- == Brew Install Stack
-- This should be as simple as:
--
-- @
-- > brew install haskell-stack
-- @
--
-- == Brew Install Autoconf
-- @
-- > brew install autoconf
-- @
--
--
-- == Download & Install the FLTKHS Hello World Skeleton
-- === Downloading Without Git
-- If 'git' is not installed, download the latest version of the fltkhs-hello-world application skeleton from <https://github.com/deech/fltkhs-hello-world/archive/master.tar.gz here>.
--
-- Extract the archive:
--
-- @
-- > cd \/Users\/\<username\>/Downloads\/
-- > tar -zxvf fltkhs-hello-world-master.tar.gz
-- > mv fltkhs-hello-world-master fltkhs-hello-world
-- @
--
-- === Downloading With Git
-- If 'git' is available:
--
-- @
-- > git clone http://github.com/deech/fltkhs-hello-world
-- @
--
-- === Building
-- Build it with Stack:
--
-- @
-- > cd fltkhs-hello-world
-- > stack setup
-- > stack install --flag fltkhs:bundled
-- or if you need OpenGL support
-- > stack install --flag fltkhs:bundled --flag fltkhs:opengl
-- @
--
-- == Verify The Install
-- Test that the build completed successfully by invoking:
--
-- @
-- > stack exec fltkhs-hello-world
-- @
--
-- You will be greeted by an incredibly boring little window with a button that says "Hello world",
-- if you click it, it will change to "Goodbye world."
-- $InstallationMac
-- Unfortunately Mac versions older than El Capitan and Yosemite are not supported.
--
-- The general steps are:
--
-- - Brew Install Stack.
-- - Download & install the <https://github.com/deech/fltkhs-hello-world/archive/master.tar.gz FLTKHS hello world skeleton>.
-- - Verify the install by running `fltkhs-hello-world`.
--
-- == Brew Install Stack
-- This should be as simple as:
--
-- @
-- > brew install haskell-stack
-- @
--
-- == Brew Install Autoconf
-- @
-- > brew install autoconf
-- @
--
-- == Compile & Install FLTK from Source.
-- The `brew` package for the current stable release of FLTK is broken. Fortunately installing from source is pretty
-- quick and painless.
--
--
-- @
-- > wget https://github.com/fltk/fltk/archive/master.tar.gz
-- > tar -zxf fltk-1.3.4-1-source.tar.gz
-- > cd fltk-master
-- > ./configure --enable-shared --enable-localjpeg --enable-localzlib --enable-localpng --enable-xft
-- or if you need OpenGL support
-- > ./configure --enable-gl --enable-shared --enable-localjpeg --enable-localzlib --enable-localpng --enable-xft
-- > make
-- > sudo make install
-- > fltk-config --version
-- 1.4
-- @
--
-- == Download & Install the FLTKHS Hello World Skeleton
-- === Downloading Without Git
-- If 'git' is not installed, download the latest version of the fltkhs-hello-world application skeleton from <https://github.com/deech/fltkhs-hello-world/archive/master.tar.gz here>.
--
--
-- Extract the archive:
--
-- @
-- > cd \/Users\/\<username\>/Downloads\/
-- > tar -zxvf fltkhs-hello-world-master.tar.gz
-- > mv fltkhs-hello-world-master fltkhs-hello-world
-- @
--
-- === Downloading With Git
-- If 'git' is available:
--
-- @
-- > git clone http://github.com/deech/fltkhs-hello-world
-- @
--
-- === Building
-- Build it with Stack:
--
-- @
-- > cd fltkhs-hello-world
-- > stack setup
-- > stack install
-- or if you need OpenGL support
-- > stack install --flag fltkhs:opengl
-- @
--
-- == Verify The Install
-- Test that the build completed successfully by invoking:
--
-- @
-- > stack exec fltkhs-hello-world
-- @
--
-- You will be greeted by an incredibly boring little window with a button that says "Hello world".
-- If you click it, it will change to "Goodbye world".
-- $InstallationWindowsBundled
--
-- This install guide has been tested on a Windows 7, 8 and 10.
--
-- == Install Stack
-- Downloading and following the default instructions for the standard <https://www.stackage.org/stack/windows-x86_64-installer Windows installer> should be enough.
-- If the install succeeded 'stack' should on the PATH. To test run 'cmd.exe' and do:
--
-- @
-- > stack --version
-- @
--
-- Now downloading and setup the latest GHC via 'stack':
--
-- @
-- > stack setup
-- @
--
-- From this point on we can live in the MSYS2 shell that comes with Stack. It is a far superior environment to the command prompt. To open the MSYS2 shell do:
--
-- @
-- > stack exec mintty
-- @
--
-- == Install Necessary Utilities via Pacman
-- In the MSYS2 shell prompt update and upgrade the MSYS2 installation:
--
-- @
-- > pacman -Syy
-- > pacman -Syu
-- @
--
-- ... install packages for download and extracting packages:
--
-- @
-- > pacman -S wget
-- > pacman -S tar
-- > pacman -S unzip
-- > pacman -S zip
-- > pacman -S man
-- @
--
-- ... and building C/C++ programs:
--
-- @
-- > pacman -S autoconf
-- > pacman -S make
-- > pacman -S automake
-- @
--
--
-- == Download And Install The FLTKHS Hello World Skeleton
-- The <https://github.com/deech/fltkhs-hello-world fltkhs-hello-world> skeleton is a simple Hello World GUI which provides the base structure for FLTKHS applications. Please see the 'Demos' section of this document for examples of apps that show off more complex uses of the API.
--
-- @
-- > wget --no-check-certificate https://github.com/deech/fltkhs-hello-world/archive/master.zip
-- > unzip master.zip
-- > mv fltkhs-hello-world-master fltkhs-hello-world
-- > cd fltkhs-hello-world
-- @
--
-- And install with:
--
-- @
-- > stack install --flag fltkhs:bundled
-- or if you need OpenGL support
-- > stack install --flag fltkhs:bundled --flag fltkhs:opengl
-- @
--
-- To test the installation:
--
-- @
-- > stack exec fltkhs-hello-world
-- @
--
-- You will be greeted by an incredibly boring little window with a button that says "Hello world",
-- if you click it, it will change to "Goodbye world."
--
-- == Packaging A Windows Executable
--
-- While the 'fltkhs-hello-world' application is mostly stand-alone the MSYS2 environment bundled with 'stack' seems to require 3 runtime DLLs. The DLLs are bundled with 'stack' so it's easy to zip them up with the executable and deploy. The required DLLs are: 'libstdc++-6.dll', 'libgcc_s_seh-1.dll' and 'libwinpthread-1.dll'.
--
--
--
-- First create the directory that will contain the executable and DLLs:
--
-- @
-- > mkdir \/tmp\/fltkhs-hello-world
-- @
--
-- Copy the executable over to that directory:
--
-- @
-- > cp `which fltkhs-hello-world` \/tmp\/fltkhs-hello-world
-- @
--
-- Copy over the DLLs. They are usually located in '../<ghc-version>/mingw/bin' but to make the process slightly less fragile we specify the directory relative to whatever 'ghc' is currently in 'stack' 's context:
--
-- @
-- > cp `dirname $(which ghc)`..\/mingw\/bin\/libstdc++-6.dll \/tmp\/fltkhs-hello-world
-- > cp `dirname $(which ghc)`..\/mingw\/bin\/libgcc_s_seh-1.dll \/tmp\/fltkhs-hello-world
-- > cp `dirname $(which ghc)`..\/mingw\/bin\/libwinpthread-1.dll \/tmp\/fltkhs-hello-world
-- @
--
-- Zip up archive:
--
-- @
-- > cd /tmp
-- > zip fltkhs-hello-world.zip fltkhs-hello-world/*
-- @
--
-- And that's it! Any Windows 10 user should now be able to extract 'fltkhs-hello-world.zip' and run 'fltkhs-hello-world.exe'.
--
-- $InstallationWindows10
--
-- This install guide has been tested on a Windows 7, 8 and 10.
--
-- == Install Stack
-- Downloading and following the default instructions for the standard <https://www.stackage.org/stack/windows-x86_64-installer Windows installer> should be enough.
-- If the install succeeded 'stack' should on the PATH. To test run 'cmd.exe' and do:
--
-- @
-- > stack --version
-- @
--
-- Now downloading and setup the latest GHC via 'stack':
--
-- @
-- > stack setup
-- @
--
-- From this point on we can live in the MSYS2 shell that comes with Stack. It is a far superior environment to the command prompt. To open the MSYS2 shell do:
--
-- @
-- > stack exec mintty
-- @
--
-- == Install Necessary Utilities via Pacman
-- In the MSYS2 shell prompt update and upgrade the MSYS2 installation:
--
-- @
-- > pacman -Syy
-- > pacmay -Syu
-- @
--
-- ... install packages for download and extracting packages:
--
-- @
-- > pacman -S wget
-- > pacman -S tar
-- > pacman -S unzip
-- > pacman -S zip
-- > pacman -S man
-- @
--
-- ... and building C/C++ programs:
--
-- @
-- > pacman -S autoconf
-- > pacman -S make
-- > pacman -S automake
-- @
--
-- == Download and Install FLTK
--
-- Download the latest stable build of FLTK:
--
-- @
-- > wget --no-check-certificate https://github.com/fltk/fltk/archive/master.tar.gz
-- @
--
-- Untar the FLTK archive and enter the directory:
--
-- @
-- > tar -zxf master.tar.gz
-- > cd fltk-master
-- @
--
-- Configure, make and install:
--
-- @
-- > ./configure --enable-shared --enable-localjpeg --enable-localzlib --enable-localpng --enable-xft
-- or if you need OpenGL support
-- > ./configure --enable-gl --enable-shared --enable-localjpeg --enable-localzlib --enable-localpng --enable-xft
-- > make
-- > make install
-- @
--
-- You can test your installation by running:
--
-- @
-- > fltk-config
-- 1.4
-- @
--
-- == Download And Install The FLTKHS Hello World Skeleton
-- The <https://github.com/deech/fltkhs-hello-world fltkhs-hello-world> skeleton is a simple Hello World GUI which provides the base structure for FLTKHS applications. Please see the 'Demos' section of this document for examples of apps that show off more complex uses of the API.
--
-- @
-- > wget --no-check-certificate https://github.com/deech/fltkhs-hello-world/archive/master.zip
-- > unzip master.zip
-- > mv fltkhs-hello-world-master fltkhs-hello-world
-- > cd fltkhs-hello-world
-- @
--
-- And install with:
--
-- @
-- > stack install
-- or if you need OpenGL support
-- > stack install --flag fltkhs:opengl
-- @
--
-- To test the installation:
--
-- @
-- > stack exec fltkhs-hello-world
-- @
--
-- You will be greeted by an incredibly boring little window with a button that says "Hello world".
-- If you click it, it will change to "Goodbye world".
--
-- == Packaging A Windows Executable #PackagingAWindowsExecutable#
--
-- While the 'fltkhs-hello-world' application can mostly stand alone, the MSYS2 environment bundled with 'stack' seems to require 3 runtime DLLs. The DLLs are bundled with 'stack', so you can zip them up with the executable and deploy. The required DLLs are: 'libstdc++-6.dll', 'libgcc_s_seh-1.dll' and 'libwinpthread-1.dll'.
--
--
--
-- First create the directory that will contain the executable and DLLs:
--
-- @
-- > mkdir \/tmp\/fltkhs-hello-world
-- @
--
-- Copy the executable over to that directory:
--
-- @
-- > cp `which fltkhs-hello-world` \/tmp\/fltkhs-hello-world
-- @
--
-- Copy over the DLLs. They are usually located in '../<ghc-version>/mingw/bin', but to make the process slightly less fragile we specify the directory relative to whatever 'ghc' is currently in Stack's context:
--
-- @
-- > cp `dirname $(which ghc)`..\/mingw\/bin\/libstdc++-6.dll \/tmp\/fltkhs-hello-world
-- > cp `dirname $(which ghc)`..\/mingw\/bin\/libgcc_s_seh-1.dll \/tmp\/fltkhs-hello-world
-- > cp `dirname $(which ghc)`..\/mingw\/bin\/libwinpthread-1.dll \/tmp\/fltkhs-hello-world
-- @
--
-- Zip up the archive:
--
-- @
-- > cd /tmp
-- > zip fltkhs-hello-world.zip fltkhs-hello-world/*
-- @
--
-- And that's it! Any Windows 10 user should now be able to extract 'fltkhs-hello-world.zip' and run 'fltkhs-hello-world.exe'.
--
-- $Demos
--
-- FLTKHS has almost 25 end-to-end demo applications to help you get started. They are
-- split into two sets: those <http://github.com/deech/fltkhs-demos written manually> and those
-- that <http://github.com/deech/fltkhs-fluid-demos show how to use FLUID>.
--
-- The READMEs in the repos have installation instructions, but they assume that you have
-- successfully installed FLTK and the 'fltkhs-hello-world' app (see platform specific instructions above).
--
-- $GettingStarted
--
-- By this point, I assume that you have successfully installed <https://github.com/deech/fltkhs-hello-world hello world>
-- (see above) or one of the <https://github.com/deech/fltkhs-demos demo> <https://github.com/deech/fltkhs-fluid-demos packages>.
--
--
-- = Quick Start
-- The quickest way to get started is to look at the source for the
-- <http://github.com/deech/fltkhs-hello-world FLTKHS project skeleton>. Though it is a
-- simple app, it shows the basics of widget creation and
-- callbacks.
--
-- Other <https://github.com/deech/fltkhs-demos demo> <https://github.com/deech/fltkhs-fluid-demos packages> show more complicated usage of the API.
--
-- Since the API is a low-level binding, code using it takes on the imperative
-- style of the underlying toolkit. Fortunately, it should look pretty familiar
-- to those who have used object-oriented GUI toolkits before.
--
--
-- $guidetothehaddockdocs
--
-- Convenient access to the underlying C++ is achieved using typeclasses and
-- type-level programming to emulate OO classes and multiple dispatch. This approach makes
-- Haddock very unhappy and the generated documentation is frequently unhelpful.
-- For instance, I urge newcomers to this library not to look at
-- "Graphics.UI.FLTK.LowLevel.Dispatch" or
-- "Graphics.UI.FLTK.LowLevel.Hierarchy". The purpose of this guide is to point
-- you in a more useful direction.
--
--
-- The documentation provided with this API is not yet self-contained and is
-- meant to be used in tandem with the <http://www.fltk.org/doc-1.4/classes.html C++ documentation>.
-- The rest of this section is about how the Haskell
-- functions and datatypes map to the C++ ones and how to, in some limited cases, override a C++ function
-- with a Haskell implementations.
-- $widgetconstruction
-- Each widget has its own module, all of which are listed
-- below under the __Widgets__ heading. Most modules include a function named
-- `<widgetName>New` that returns a reference to that widget. Although you
-- do not have to deal with raw pointers directly, it might help to understand
-- that this reference is a pointer to a void pointer to a C++ object.
--
-- For instance, 'windowNew' creates a 'Ref' 'Window', which is a pointer to a
-- C++ object of type <http://www.fltk.org/doc-1.4/classFl__Window.html `Fl_Window`>, the FLTK class that knows how to draw,
-- display, and handle window events.
--
-- This value of type 'Ref' 'Window' is then passed along to various functions
-- which transparently extract the pointer and pass it to the
-- appropriate <http://www.fltk.org/doc-1.4/classFl__Window.html `Fl_Window`> instance method.
--
-- $widgetmethods
--
-- The Haskell functions that bind to the instance methods of an FLTK class are
-- listed under the __Functions__ heading in that widget's module. It's worth
-- remembering that these type signatures associated with the functions listed
-- under the __Functions__ heading are not the real ones but are artifically
-- generated because they are much more helpful to users. For instance, the
-- actual type of 'activate' exposes all the type level arithmetic required so
-- it can be used by subclasses of 'Widget' but is unhelpful as a
-- reference compared to the artificial type under __Functions__ heading of
-- "Graphics.UI.FLTK.LowLevel.Widget".
--
-- Unfortunately to see this more helpful type signature the poor reader has to
-- navigate to the corresponding widget's module, find the __Functions__ header
-- and scroll down to the desired function. Haddock, unfortunately, does not
-- support anchors that link to a named point in the page. I'm /very/
-- open to ideas on how to make this easier.
--
-- Carrying on the previous example from the __Widget Creation__ section, the
-- methods on a 'Ref' 'Window' widget are documented in
-- "Graphics.UI.FLTK.LowLevel.Window" under __Functions__. Each function takes
-- the 'Ref' 'Window' reference as its first argument followed by whatever else
-- it needs and delegates it appropriately.
--
-- As this is a low-level binding, the Haskell functions are kept as close as
-- possible in name and argument list to the underlying C++. This allows users
-- familiar with the FLTK API to use this library with less learning overhead
-- and it lets newcomers to FLTK take advantage of the already extensive
-- <http://www.fltk.org/doc-1.4/classes.html C++ documentation>.
--
-- Functions are named to make it as easy as possible to find the corresponding
-- C++ function, however there are some naming conventions to keep in mind:
--
-- (1) Setters and getters are prefixed with /set/ and /get/ respectively. In
-- C++ both have the same name; the setter takes an argument while the getter
-- does not. Since Haskell does not support overloading, this convention is used.
--
-- (2) In many cases C++ uses overloading to provide default values to
-- arguments. Since Haskell does not support overloading, these arguments are
-- 'Maybe' types, e.g., the `hotspot` function in
-- "Graphics.UI.FLTK.LowLevel.Window". In other cases, where the common use case
-- leaves the default arguments unspecified, the binding provides two functions:
-- a longer less-convenient-to-type one that takes the default argument, and a
-- short one that does not, e.g., `drawBox` and `drawBoxWithBoxtype`, also in
-- "Graphics.UI.FLTK.LowLevel.Window".
--
-- (3) Error codes are 'Either' types.
--
-- (4) Function arguments that are pointers to be filled are not exposed to the
-- API user. For instance, a common C++ idiom is to return a string by taking a
-- pointer to some initialized but empty chunk of memory and filling it up. The
-- corresponding Haskell function just returns a 'Data.Text'.
--
-- (5) Widget destructors can be called explicitly with 'destroy'. The reason it
-- is called 'destroy' instead of 'delete' to match C++ is that it is a mistake
-- and it's too late to change it now.
--
--
-- It is hoped that until the documentation becomes more self-sustaining the
-- user can use these heuristics (and the type signatures) along with the
-- official FLTK documentation to "guess" what the binding functions do.
--
-- $widgethierarchyguide
-- Every widget module in the API has a __Hierarchy__ heading that shows all its parents.
--
-- The design of the API makes all the parent functions transparently available
-- to that widget. This is also the reason why the actual type signatures are so
-- complicated requiring the manual generation of artificial type signatures.
--
-- For instance, the __Functions__ section under
-- "Graphics.UI.FLTK.LowLevel.Window" shows that a 'Ref' 'Window' can be passed
-- to @getModal@ to check if the window is modal, but it can also be passed to
-- @children@ in "Graphics.UI.FLTK.LowLevel.Group" which counts up the number of
-- widgets inside the 'Window' and @getX@ in "Graphics.UI.FLTK.LowLevel.Widget"
-- which returns the X coordinate of the 'Window''s top-left hand corner.
--
-- The hierarchy corresponds almost exactly to the underlying C++ class
-- hierarchy so, again, you should be able to take advantage of the
-- <http://www.fltk.org/doc-1.4/classes.html C++ documentation> to use the
-- binding API.
--
-- $overriding
--
-- The binding API allows a limited but powerful form of "inheritance" allowing
-- users to override certain key FLTK methods with Haskell functions. All GUI
-- elements that derive from the C++ base class
-- <http://www.fltk.org/doc-1.4/classFl__Widget.html Fl_Widget> and the Haskell
-- analog
-- <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-Base-Widget.html WidgetBase> now allow Haskell
-- <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-Base-Widget.html#g:2 functions> to be passed at widget construction time that give Haskell
-- complete control over
-- <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-Base-Widget.html#v:widgetCustom drawing>,
-- <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-Base-Widget.html#t:CustomWidgetFuncs handling, resizing and other key functions>. This means that the Haskell user
-- can control the look and feel as well as the event loop. The
-- <https://github.com/deech/fltkhs-demos/blob/master/src/Examples/table-as-container.hs#L105 table> demos are an example of drawing in Haskell. An example of taking over
-- the event loop is an FLTKHS <https://github.com/deech/fltkhs-reflex-host proof-of-concept> that
-- <https://github.com/deech/fltkhs-reflex-host/blob/master/src/reflex-host.hs#L33 overrides> the FLTKHS event loop with the
-- <https://hackage.haskell.org/package/reflex Reflex FRP> allowing native
-- functional reactive programming. The sky is the limit!
--
-- When providing custom methods, the object constructor is no longer
-- `<widgetName>New` but `<widgetName>Custom`, which, in addition to the parameters
-- taken by `<widgetName>New` also takes records of Haskell functions which are
-- then passed to the C++ side.
--
-- Much like a callback, the Haskell functions are passed as function pointers
-- to the C++ side and called whenever the event loop deems appropriate. Unlike
-- callbacks, they can be set only on object instantiation.
--
-- An example of this is "Graphics.UI.FLTK.LowLevel.Base.Widget" which, since it is a
-- base class for most widgets and doesn't have much functionality of its own,
-- only allows custom construction using 'widgetCustom'. This constructor takes
-- a 'CustomWidgetFuncs' datatype which is a record of functions which tells a
-- "Graphics.UI.FLTK.LowLevel.Base.Widget" how to handle events and draw, resize and
-- display itself.
--
-- Again "Graphics.UI.FLTK.LowLevel.Base.Window" can be used a motivating example.
-- Its custom constructor 'windowCustom', in fact, takes two records: a
-- 'CustomWidgetFuncs' which allows you to override methods in its
-- "Graphics.UI.FLTK.LowLevel.Base.Widget" parent class, and also a
-- 'CustomWindowFuncs' record which allows you to override @flush@, a
-- method on the Window class which tells the window how to force a redraw. For
-- example, the demo /src\/Examples\/doublebuffer.hs/ (which corresponds to the
-- executable 'ftlkhs-doublebuffer') tells both windows how to draw themselves
-- in a Haskell function that uses low-level FLTK drawing routines by overriding
-- the draw function of their "Graphics.UI.FLTK.LowLevel.Base.Widget" parent.
--
-- Every widget that supports customizing also provides a default function
-- record that can be passed to the constructor. For example,
-- "Graphics.UI.FLTK.LowLevel.Base.Widget" provides 'defaultCustomWidgetFuncs' and
-- "Graphics.UI.FLTK.LowLevel.Base.Window" has 'defaultCustomWindowFuncs'. In the
-- demo mentioned above, the 'singleWindowCustom' function is given
-- 'defaultCustom.WidgetFuncs' but with an overridden 'drawCustom'.
--
-- Another case where customization comes up a lot is when using
-- "Graphics.UI.FLTK.LowLevel.Base.Table" which is a low-level table widget that
-- needs to be told, for example, how to draw its cells. The demo
-- /src\/Examples\/table-simple.hs/ (corresponding to the executable
-- 'fltkhs-table-simple') shows this in action.
--
-- Hopefully the demos just mentioned and others included with this library show
-- that, even though customizing is limited, it is possible to do a lot.
-- $explicitbaseclasscalling
-- A common pattern when overring parent class methods is augment them,
-- some logic followed by an explicit call to the parent method. In C++
-- this is done by explicitly by annotating the call with the parent's class name:
--
-- @
-- void Child::f() {
-- ... some code
-- Parent::f();
-- }
-- @
--
-- In this binding the widget methods that can be overridden have a corresponding
-- explict call to the parent class method in that widget's module. For example,
-- the <https://www.fltk.org/doc-1.4/classFl__Widget.html#a9cb17cc092697dfd05a3fab55856d218 handle> method
-- can be overridden by <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-Base-Widget.html#t:CustomWidgetFuncs handleCustom>
-- but you can still call the base class 'handle' with <https://hackage.haskell.org/package/fltkhs/Graphics-UI-FLTK-LowLevel-Base-Widget.html#v:handleWidgetBase handleWidgetBase> so
-- a custom handler that just prints console when a widget is minimized but delegates to the parent for all other events could look something like:
--
-- @
-- myHandle :: Ref Widget -> Event -> IO (Either UnknownEvent ())
-- myHandle w e = do
-- case e of
-- Hide -> print "widget has been hidden"
-- _ -> return ()
-- handleWidgetBase (safeCast w) e
-- @
--
-- The 'safeCast' is needed to explicitly cast a widget to it's parent, in this case casting 'Widget' to 'WidgetBase'.
-- The cast is safe because it is statically restricted to only classes in the hierarchy.
--
-- $destructors
-- Most of the <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-Base-Widget.html#t:CustomWidgetFuncs overrideable methods> correspond to
-- some method in FLTK. <https://hackage.haskell.org/package/fltkhs/docs/Graphics-UI-FLTK-LowLevel-Base-Widget.html#t:CustomWidgetFuncs resizeCustom>, for instance
-- overrides <https://www.fltk.org/doc-1.4/classFl__Widget.html#aca98267e7a9b94f699ebd27d9f59e8bb resize>, but 'destroyCallbacksCustom' does not. This function is called
-- in the widget's C++ destructor and can be used for any Haskell side clean up but exists specifically to release function pointers given to the C++ side by the GHC runtime.
-- This is necessary because any resources closed over by the Haskell function to which we generate a pointer are ignored by the garbage collector until that pointer is
-- explicitly freed. Over time this could cause significant memory bloat. Normally the binding does this for you freeing callbacks set with 'setCallback' and the overriding functions
-- themselves but occasionally there are function pointers the binding does not know about.
--
-- For example <https://hackage.haskell.org/package/fltkhs/Graphics-UI-FLTK-LowLevel-FL.html#v:addTimeout adding a timer>
-- entails passing in a function pointer to a closure that will be invoked at at some specified frequency but the binding has no idea when that needs to be cleaned up
-- so that becomes your responsibility. A custom 'destroyCallbacksCustom' might look something like:
--
-- @
-- myDestroyCallbacks :: FunPtr (IO ()) -> Ref Widget -> [Maybe (FunPtr (IO ())] -> IO ()
-- myDestroyCallbacks myFunptr w cbs = do
-- freeHaskellFunPtr myFunPtr
-- defaultDestroyWidgetCallbacks w cbs
-- @
--
-- The function takes 'myFunPtr', a pointer to the timer's closure, and a widget 'w' and that widget's associated callbacks, 'myFunPtr' is then freed with 'freeHaskellFunPtr'
-- and control passes to 'defaultDestroyWidgetCallbacks' which frees the rest of them. Passing control to 'defaultDestroyWidgetCallbacks' is critical otherwise those callbacks
-- will never be freed.
--
-- $Compilation
--
-- As described above, the API emulates multiple dispatch using type-level
-- programming, closed type families, and typeclasses. While this makes for a
-- nice API, it has also
-- slowed down compilation of executables much more than expected.
--
-- To clarify, the time taken to compile the library itself has not changed, but
-- applications that use the library to create executables are taking a lot
-- longer to compile. To further emphasize, there do not appear to be any
-- runtime performance issues. This is only a compile time problem.
--
-- To preserve your and my sanity, a flag `fastCompile` has been
-- introduced to the <https://github.com/deech/fltkhs-hello-world skeleton>, the <https://github.com/deech/fltkhs-fluid-hello-world projects>, the <https://github.com/deech/fltkhs-demos fltkhs-demos>, and
-- the <https://github.com/deech/fltkhs-fluid-demos fltkhs-fluid-demos>.
-- This flag, which tells the compiler to skip some steps when
-- compiling executables, dramatically decreases compile time but also bloats
-- the resulting executable size and probably makes runtime performance much
-- slower. In this package and <https://github.com/deech/fltkhs-fluid-demos fltkhs-fluid-demos>
-- it is enabled by default since the executables are
-- demos that are not meant to show off performance. To disable this flag, tell
-- Stack to ignore it during the `build` step:
--
-- @
-- > stack build --flag fltkhs:bundled --flag fltkhs-demos:-fastCompile
-- @
--
-- In the <https://github.com/deech/fltkhs-hello-world fltkhs> and the
-- <https://github.com/deech/fltkhs-fluid-hello-world fltkhs-fluid> project
-- skeletons, this flag is /disabled/ by default to provide the best runtime
-- performance. To enable the flag for a smoother development workflow, tell
-- Stack to enable it during the `configure` step:
--
-- @
-- > stack build --flag fltkhs:bundled --flag fltkhs-hello-world:fastCompile
-- @
--
-- =File Layout
-- @
-- Root
-- - c-src -- The C bindings
-- - c-examples -- demos written using the C bindings (not installed)
-- - fltk-\<version\>.tar.gz -- The bundled FLTK library
-- - src
-- - TestPrograms -- Haskell test programs
-- - Fluid -- The Fluid file to Haskell conversion utility
-- - Graphics
-- - UI
-- - FLTK
-- - LowLevel -- Haskell bindings
-- - scripts -- various helper scripts (probably not interesting to anyone but myself)
-- @
-- $REPL
-- Running GUIs in GHCi is fully supported. Using the <https://github.com/deech/fltkhs-hello-world hello world skeleton> as
-- an example the following steps will run it in the REPL:
--
-- @
-- > git clone http://github.com/deech/fltkhs-hello-world
-- > cd fltkhs-hello-world
-- > stack build --flag fltkhs:bundled
-- > stack ghci --flag fltkhs:bundled fltkhs-hello-world:exe:fltkhs-hello-world
-- [1 of 1] Compiling Main ...
-- Ok, modules loaded: Main ...
-- Loaded GHCi configuration ...
-- Prelude Main> replMain
-- @
--
-- Unfortunately since FLTKHS is hybrid Haskell/C++ there are limitations compared to
-- running a normal Haskell library on the REPL:
--
-- 1. The 'stack build ...' is an essential first step before running 'stack
-- ghci ...'. The reason is the REPL uses '-fobject-code' to link in all the C++
-- libraries which must be built first.
-- 2. The use of 'replMain' instead of just ':main' as you might expect. This
-- is because
--
-- (1) it allows closing the GUI to correctly return control to
-- the REPL prompt and
-- (2) typing 'Ctrl-C' also correctly hands control back to the REPL.
--
-- With just ':main' (1) works but (2) results in a "ghosted" UI where the
-- GUI window is still visible but unable to accept any keyboard/mouse
-- input. The reason for the ghosted GUI is that ':main' delegates to the
-- FLTK C++ event loop which is unable to listen for user interrupts on
-- the Haskell side and so has no way of knowing that it should destroy
-- itself.'replMain' emulates the event loop on the Haskell side allowing
-- it to stop, clean up and return control when it 'catch'es a
-- 'UserInterrupt'. Thus the 'replMain' is slower than the optimized C++
-- event loop but hopefully that's not too big an impediment for REPL
-- work.
| deech/fltkhs | src/Graphics/UI/FLTK/LowLevel/FLTKHS.hs | mit | 61,697 | 0 | 5 | 10,031 | 3,849 | 3,217 | 632 | 288 | 0 |
{-# LANGUAGE DeriveGeneric, TypeSynonymInstances, TypeOperators,
FlexibleInstances, FlexibleContexts, OverlappingInstances #-}
module Network.Google.ApiIO.GenericParams where
import Control.Applicative ((<*>), (<$>), (<|>), pure)
import GHC.Generics
import Data.DList (DList, toList, empty)
import Data.Monoid (mappend)
import Network.Google.ApiIO.Common
import qualified Data.Text as T
class ToString a where
toString :: a -> String
instance ToString String where
toString = id
instance ToString Int where
toString = show
instance ToString Bool where
toString = show
instance ToString T.Text where
toString = T.unpack
instance (ToString s) => ToString (Maybe s) where
toString (Just v) = toString v
toString Nothing = ""
type Pair = (String, String)
genericParams :: (Generic a, GEntity (Rep a)) => Options -> a -> [(String, String)]
genericParams opts = extractParams opts . from
class GEntity f where
extractParams :: Options -> f a -> [(String, String)]
instance (GEntity f) => GEntity (M1 i c f) where
extractParams opts = extractParams opts . unM1
instance (ToString a) => ToString (K1 i a p) where
toString = toString . unK1
instance GEntity U1 where
extractParams _ _ = []
data Options = Options { fieldLabelModifier :: String -> String
, omitNothingFields :: Bool
}
defaultOptions = Options id True
removePrefixLCFirstOpts prefix = defaultOptions { fieldLabelModifier = removePrefixLCFirst prefix }
instance (RecordToPairs f) => GEntity (C1 c f) where
extractParams opts = toList . recordToPairs opts. unM1
class RecordToPairs f where
recordToPairs :: Options -> f a -> DList Pair
instance (RecordToPairs a, RecordToPairs b) => RecordToPairs (a :*: b) where
recordToPairs opts (a :*: b) = recordToPairs opts a `mappend`
recordToPairs opts b
instance (Selector s, ToString c) => RecordToPairs (S1 s (K1 i c)) where
recordToPairs = fieldToPair
instance (Selector s, ToString c) => RecordToPairs (S1 s (K1 i (Maybe c))) where
recordToPairs opts (M1 (K1 Nothing)) | omitNothingFields opts = empty
recordToPairs opts m1 = fieldToPair opts m1
fieldToPair opts m1 = pure ( fieldLabelModifier opts $ selName m1
, toString ( unM1 m1 )
)
| georgeee/haskell-google-apiIO | Network/Google/ApiIO/GenericParams.hs | mit | 2,402 | 0 | 11 | 584 | 766 | 408 | 358 | 51 | 1 |
-- Web API part of the code
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module StarServer where
import Prelude ()
import Prelude.Compat
import Control.Monad.Trans.Except
import Control.Monad.Trans.Either
import Control.Monad.Except
import Control.Monad.Reader
import Data.Aeson.Compat
import Data.Aeson.Types
import Data.Attoparsec.ByteString
import Data.ByteString (ByteString)
import Data.List
import Data.Maybe
import Data.String.Conversions
import Data.Time.Calendar
import Data.Vector.V3
import GHC.Generics
import Lucid
import Network.HTTP.Media ((//), (/:))
import Network.Wai
import Network.Wai.Handler.Warp
import Servant
import Servant.API
import Servant.API.ContentTypes
import Servant.JS
import System.Directory
import System.FilePath
import Text.Blaze
import Text.Blaze.Html.Renderer.Utf8
import qualified Data.Aeson.Parser
import qualified Text.Blaze.Html
-- StarData imports
import StarData
import StarTree
-- File to output javascript interface to
ifaceFile :: FilePath
ifaceFile = "StarMap.js"
type StarsAPI =
"starsInRadius"
:> QueryParam "radius" Double
:> QueryParam "pointX" Double
:> QueryParam "pointY" Double
:> QueryParam "pointZ" Double
:> Get '[JSON] [StarDataRec]
:<|>
"visibleStars"
:> QueryParam "minLum" Double
:> QueryParam "pointX" Double
:> QueryParam "pointY" Double
:> QueryParam "pointZ" Double
:> Get '[JSON] [StarDataRec]
:<|>
"visibleStarsMagic"
:> QueryParam "minLum" Double
:> QueryParam "blurRad" Double
:> QueryParam "pointX" Double
:> QueryParam "pointY" Double
:> QueryParam "pointZ" Double
:> Get '[JSON] [StarDataRec]
type FilesAPI =
Raw
type StarMapAPI =
StarsAPI :<|> FilesAPI
starsAPI :: Proxy StarsAPI
starsAPI = Proxy
starMapAPI :: Proxy StarMapAPI
starMapAPI = Proxy
genStarServer :: StarTree StarDataAbbrev -> String -> Server StarMapAPI
genStarServer tree scriptDir = ((starsInRadius tree) :<|>
(visStars tree) :<|>
(visStarsM tree)) :<|>
(serveDirectory scriptDir)
starsInRadius :: StarTree StarDataAbbrev ->
Maybe Double ->
Maybe Double -> Maybe Double -> Maybe Double ->
ExceptT ServantErr IO [StarDataRec]
starsInRadius tree (Just radius) (Just px) (Just py) (Just pz) =
let starList = inRadius (Vector3 px py pz) radius tree
in
return $ map starDataRecFromStarData starList
visStars :: StarTree StarDataAbbrev ->
Maybe Double ->
Maybe Double -> Maybe Double -> Maybe Double ->
ExceptT ServantErr IO [StarDataRec]
visStars tree (Just minLum) (Just px) (Just py) (Just pz) =
let starList = visibleStars tree (Vector3 px py pz) minLum
in
return $ map starDataRecFromStarData starList
visStarsM :: StarTree StarDataAbbrev ->
Maybe Double ->
Maybe Double ->
Maybe Double -> Maybe Double -> Maybe Double ->
ExceptT ServantErr IO [StarDataRec]
visStarsM
tree (Just minLum) (Just blurRad) (Just px) (Just py) (Just pz) =
let starList = visibleStarsMagic tree (Vector3 px py pz) minLum blurRad
in
return $ map starDataRecFromStarData starList
writeJSInterface :: FilePath -> IO ()
writeJSInterface scriptDir =
writeJSForAPI starsAPI vanillaJS (scriptDir </> ifaceFile)
starServerApp :: StarTree StarDataAbbrev -> String -> Application
starServerApp tree staticDir =
serve starMapAPI (genStarServer tree staticDir)
| j3camero/galaxyatlas | OldCrap/haskell/server/src/StarServer.hs | mit | 3,731 | 0 | 25 | 760 | 983 | 518 | 465 | -1 | -1 |
module Doppler.Tag.Syntax (
parseTag
) where
import Doppler.Tag.Types
import Text.Parsec
import Text.Parsec.String (Parser)
import Control.Monad (void)
-- Generic tag structure parser.
parseTag :: Monoid v =>
Parser TagName ->
-- ^ Parser for tag names.
Parser k ->
-- ^ Parse for attribute keys.
(Quote -> k -> Parser v) ->
-- ^ Parser for attribute values.
(TagName -> Parser c) ->
-- ^ Parser for tag contents.
Parser Char ->
-- ^ Parser for whitespace characters.
Parser [Tag (k, v) c]
-- ^ Tag structure parser.
parseTag tagName attrName attrValue content whitespace =
many whitespace *> tag
where
tag = do
_ <- char '<'
name <- tagName <* many whitespace
attributes <- parseAttribute whitespace attrName attrValue `sepEndBy` many1 whitespace
closing <- string "/>" <|> string ">"
-- Make a short tag if it is explicitly closed.
if closing == "/>" then
return [ShortTag name attributes]
-- Parse full or dangling tag structures.
else do
rest <- manyTill (tagOrContent name) $ try (void (lookAhead endTagName) <|> eof)
endName <- optionMaybe $ lookAhead endTagName
-- Make a full tag if it is closed properly. Consume end tag
-- name because this tag is closed.
if maybe False (== name) endName then
endTagName *> return [FullTag name attributes $ concat rest]
-- Make a dangling tag if it is implicitly short. Leave end tag
-- because parent tag closes it.
else
return $ DanglingTag name attributes : concat rest
tagOrContent name =
tag <|> many (parseContent content name)
endTagName =
between (string "</") (char '>') (tagName <* many whitespace)
parseAttribute :: Monoid v => Parser Char -> Parser k -> (Quote -> k -> Parser v) -> Parser (k, v)
parseAttribute whitespace key value = do
k <- key <* many whitespace
equal <- optionMaybe $ char '=' <* many whitespace
v <- maybe (return mempty)
(const $ between doubleQuote doubleQuote (many $ value DoubleQuotes k)
<|> between singleQuote singleQuote (many $ value SingleQuotes k)
<|> many (value Unquoted k))
equal
return (k, mconcat v)
where
singleQuote = char '\''
doubleQuote = char '"'
parseContent :: (TagName -> Parser b) -> TagName -> Parser (Tag a b)
parseContent content name =
Content <$> content name
| oinuar/doppler-html | src/Doppler/Tag/Syntax.hs | mit | 2,690 | 0 | 19 | 885 | 697 | 346 | 351 | 47 | 3 |
import Data.Digest.SHA2
import qualified Data.ByteString as B
import System.IO
doManyHashes :: Int -> IO Bool
doManyHashes 0 = do
return True
doManyHashes counter = do
contents <- B.readFile "/tmp/data.0512"
-- let contentsWord8 = B.unpack contents
let hash = sha256 $ B.unpack contents
-- let oct = toOctets hash
-- putStr "."
-- hFlush stdout
-- putStrLn (show oct)
doManyHashes $ counter-1
main = do
doManyHashes 500000
-- putStrLn "" | adizere/nifty-tree | playground/sha2-dd.hs | mit | 486 | 0 | 12 | 117 | 112 | 58 | 54 | 12 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Language.Sampler where
import Language.Syntax
import Language.Type
import Language.Inter
import Autolib.Util.Zufall
import Autolib.Util.Wort ( alle )
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Set
import Data.Typeable
import Data.List
data Sampler =
Sampler { language :: Language.Syntax.Type
, num_samples :: Int -- ^ anzahl der samples
, min_sample_length :: Int -- ^ minimale länge der samples
, max_sample_length :: Int -- ^ maximal länge der samples
}
deriving ( Eq, Typeable )
$(derives [makeReader, makeToDoc] [''Sampler])
example = Sampler
{ language = Lukas
, num_samples = 50
, min_sample_length = 4
, max_sample_length = 40
}
create :: Integral s
=> Sampler
-> s -- ^ random seed
-> Maybe Int -- ^ if present, create some words longer than this
-> ( [String], [String] ) -- ^ (yeah, noh)
create i seed large = randomly ( fromIntegral seed ) $ do
let l = inter $ language i
w = min_sample_length i
n = num_samples i
-- die kleinen sollten ja auch schnell zu testen sein
let klein = take 40 $ do
n <- [0 .. ]
alle ( setToList $ alphabet l ) n
-- watch argument ordering! -- TODO: use records instead
here <- samples l n w
there <- anti_samples l n w
let top = case large of
Nothing -> max_sample_length i
Just lrg -> 5 + max lrg ( max_sample_length i )
farout <- samples l 10 top
return $ partition (contains l)
$ nub
$ filter ( \ w -> length w <= top )
$ klein ++ here ++ there ++ farout
-- local variables:
-- mode: haskell
-- end:
| florianpilz/autotool | src/Language/Sampler.hs | gpl-2.0 | 1,786 | 0 | 17 | 556 | 463 | 248 | 215 | 46 | 2 |
module Chap05.Exercise01 (module Chap05.Data.BatchedDeque) where
import Chap05.Data.BatchedDeque (BatchedDeque(..))
| stappit/okasaki-pfds | src/Chap05/Exercise01.hs | gpl-3.0 | 117 | 0 | 6 | 9 | 30 | 20 | 10 | 2 | 0 |
{-# LANGUAGE FlexibleInstances #-}
module Lib where
newtype Identity a = Identity a deriving (Eq, Show)
instance Functor Identity where
fmap f (Identity a) = Identity (f a)
data Pair a = Pair a a deriving (Eq, Show)
instance Functor Pair where
fmap f (Pair a b) = Pair (f a) (f b)
data Two a b = Two a b deriving (Eq, Show)
instance Functor (Two a) where
fmap f (Two a b) = Two a $ f b
data Three a b c = Three a b c deriving (Eq, Show)
instance Functor (Three a b) where
fmap f (Three a b c) = Three a b $ f c
data Three' a b = Three' a b b deriving (Eq, Show)
instance Functor (Three' a) where
fmap f (Three' a b b') = Three' a (f b) (f b')
-- Can you implement one for this type? Why? Why not?
-- Since the kind of this type is * and not * -> *,
-- a Functor instance cannot be defined.
data Trivial = Trivial
-- Write a Functor instance for a datatype identical to Maybe. We’ll use
-- our own datatype because Maybe already has a Functor instance and
-- we cannot make a duplicate one.
data Possibly a =
LolNope
| Yeppers a
deriving (Eq, Show)
instance Functor Possibly where
fmap f LolNope = LolNope
fmap f (Yeppers a) = Yeppers (f a)
-- 1. Write a Functor instance for a datatype identical to Either. We’ll
-- use our own datatype because Either has a Functor instance.
data Sum a b =
First a
| Second b
deriving (Eq, Show)
instance Functor (Sum a) where
fmap _ (First a) = First a
fmap f (Second b) = Second (f b)
-- Rearrange the arguments to the type constructor of the datatype
-- so the Functor instance works.
-- 1.
data Sum' a b =
First' b
| Second' a
instance Functor (Sum' e) where
fmap f (First' b) = First' (f b)
fmap f (Second' a) = Second' a
-- 2.
data Company a b c =
DeepBlue a b
| Something c
instance Functor (Company e e') where
fmap f (Something c) = Something (f c)
fmap _ (DeepBlue a b) = DeepBlue a b
-- 3.
data More a b =
L b a b
| R a b a
deriving (Eq, Show)
instance Functor (More x) where
fmap f (L b a b') = L (f b) a (f b')
fmap f (R a b a') = R a (f b) a'
-- Write Functor instances for the following datatypes.
-- 1.
data Quant a b =
Finance
| Desk a
| Bloor b
instance Functor (Quant x) where
fmap _ Finance = Finance
fmap _ (Desk a) = Desk a
fmap f (Bloor b) = Bloor (f b)
-- 2. No, it’s not interesting by itself.
data K a b =
K a
instance Functor (K a) where
fmap _ (K a) = K a
-- 3.
newtype Flip f a b =
Flip (f b a)
deriving (Eq, Show)
newtype K' a b =
K' a
instance Functor (Flip K' x) where
fmap f (Flip (K' a)) = Flip (K' (f a))
-- should remind you of an
-- instance you've written before
-- instance Functor (Flip K' a) where
-- fmap f (Flip K' a b) = Flip $ K' (f a) b
-- 4.
data EvilGoateeConst a b =
GoatyConst b
instance Functor (EvilGoateeConst a) where
fmap f (GoatyConst b) = GoatyConst (f b)
-- 5. Do you need something extra to make the instance work?
data LiftItOut f a =
LiftItOut (f a)
instance (Functor f) => Functor (LiftItOut f) where
fmap fn (LiftItOut fa) = LiftItOut $ fmap fn fa
-- 6.
data Parappa f g a =
DaWrappa (f a) (g a)
instance (Functor f, Functor g) => Functor (Parappa f g) where
fmap fn (DaWrappa fa ga) = DaWrappa (fmap fn fa) (fmap fn ga)
-- 7. Don’t ask for more typeclass instances than you need. You can
-- let GHC tell you what to do.
data IgnoreOne f g a b =
IgnoringSomething (f a) (g b)
instance (Functor g) => Functor (IgnoreOne f g a) where
fmap fn (IgnoringSomething fa gb) = IgnoringSomething fa (fmap fn gb)
-- 8.
data Notorious g o a t =
Notorious (g o) (g a) (g t)
instance (Functor g) => Functor (Notorious g o a) where
fmap f (Notorious go ga gt) = Notorious go ga (fmap f gt)
-- 9. You’ll need to use recursion.
data List a =
Nil
| Cons a (List a)
instance Functor List where
fmap f Nil = Nil
fmap f (Cons a ls) = Cons (f a) (fmap f ls)
-- 10. A tree of goats forms a Goat-Lord, fearsome poly-creature.
data GoatLord a =
NoGoat
| OneGoat a
| MoreGoats (GoatLord a)
(GoatLord a)
(GoatLord a)
instance Functor GoatLord where
fmap _ NoGoat = NoGoat
fmap f (OneGoat a) = OneGoat (f a)
fmap f (MoreGoats x y z) = MoreGoats (fmap f x) (fmap f y) (fmap f z)
-- 11. You’ll use an extra functor for this one, although your solution
-- might do it monomorphically without using fmap. Keep in
-- mind that you will probably not be able to validate this one in
-- the usual manner. Do your best to make it work.
data TalkToMe a =
Halt
| Print String a
| Read (String -> a)
instance Functor TalkToMe where
fmap _ Halt = Halt
fmap f (Print x a) = Print x (f a)
fmap f (Read fn) = Read (f.fn)
| nirvinm/Solving-Exercises-in-Haskell-Programming-From-First-Principles | Functor/src/Lib.hs | gpl-3.0 | 4,860 | 0 | 10 | 1,325 | 1,739 | 922 | 817 | 114 | 0 |
table cells =
do
H.h1 "Html Elements"
let elements = [H.button,(H.! A.style "background-color:blue") . H.div,H.textarea,H.h1,H.li,H.select.H.option]
let contents = map (fromString . show) [1..]
let cells = [zipWith ($) elements contents]
hTable cells H.! A.border "1"
| xpika/interpreter-haskell | Plugins/examples/Html.hs | gpl-3.0 | 284 | 1 | 14 | 51 | 139 | 70 | 69 | 7 | 1 |
factorial :: Integer -> Integer
factorial n
| n <= 1 = 1
| otherwise = n * factorial (pred n)
pascal n k
| n == k = 1
| n > k = factorial n `div` (( factorial k) * (factorial (n - k)))
| otherwise = 0
triangle k = [ line n | n <- [0..k] ]
where line n = [ pascal n k | k <- [0..n] ]
prettyOut triangle = [ prettyLine l | l <- triangle ]
where prettyLine l = unwords [ show i | i <- l ]
main :: IO ()
main = do
k <- getLine
let
k' = (read k)::Integer
mapM_ putStrLn (prettyOut (triangle k'))
| icot/hackerrank | funprog/recursion/pascals-triangle.hs | gpl-3.0 | 545 | 0 | 12 | 172 | 294 | 144 | 150 | 18 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Database.Hedsql.Tests.TableManipulations
( tests
) where
--------------------------------------------------------------------------------
-- IMPORTS
--------------------------------------------------------------------------------
import Data.Monoid
import Data.Text.Lazy ()
import Database.Hedsql.Examples.Create
import Database.Hedsql.Examples.Drop
import qualified Database.Hedsql.SqLite as S
import qualified Database.Hedsql.PostgreSQL as P
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit hiding (Test)
--------------------------------------------------------------------------------
-- PRIVATE
--------------------------------------------------------------------------------
----------------------------------------
-- PostgreSQL
----------------------------------------
--------------------
-- Full examples
--------------------
testCountriesPostgreSQL :: Test
testCountriesPostgreSQL =
testCase "Create table \"Countries\" for PostgreSQL" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table \"Countries\" is incorrect for PostgreSQL"
( "CREATE TABLE \"Countries\" ("
<> "\"countryId\" serial PRIMARY KEY, "
<> "\"name\" varchar(256) NOT NULL, UNIQUE, "
<> "\"size\" integer, "
<> "\"inhabitants\" integer)"
)
(P.codeGen countries)
testPeoplePostgreSQL :: Test
testPeoplePostgreSQL =
testCase "Create table \"People\" for PostgreSQL" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table \"People\" is incorrect for PostgreSQL"
( "CREATE TABLE \"People\" ("
<> "\"personId\" serial PRIMARY KEY, "
<> "\"title\" char(2) DEFAULT('Ms'), "
<> "\"firstName\" varchar(256) NOT NULL, "
<> "\"lastName\" varchar(256) NOT NULL, "
<> "\"age\" integer CHECK (\"age\" > -1), "
<> "\"married\" boolean DEFAULT(FALSE), NOT NULL, "
<> "\"passportNo\" varchar(256) UNIQUE, "
<> "\"father\" integer REFERENCES \"People\"(\"personId\"), "
<> "\"countryId\" integer REFERENCES \"Countries\"(\"countryId\"))"
)
(P.codeGen people)
--------------------
-- Primary key
--------------------
testPrimaryKeyPostgreSQL :: Test
testPrimaryKeyPostgreSQL = testCase "Create table with primary key" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table with a primary key is incorrect for PostgreSQL"
"CREATE TABLE \"People\" (\"personId\" integer PRIMARY KEY)"
(P.codeGen primaryKeyCol)
----------------------------------------
-- SQLite
----------------------------------------
--------------------
-- Full examples
--------------------
testCountriesSqLite :: Test
testCountriesSqLite =
testCase "Create table \"Countries\" for SqLite" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table \"Countries\" is incorrect for SqLite"
( "CREATE TABLE \"Countries\" ("
<> "\"countryId\" INTEGER PRIMARY KEY AUTOINCREMENT, "
<> "\"name\" VARCHAR(256) NOT NULL, UNIQUE, "
<> "\"size\" INTEGER, "
<> "\"inhabitants\" INTEGER)"
)
(S.codeGen countries)
testPeopleSqLite :: Test
testPeopleSqLite = testCase "Create table \"People\" for SqLite" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table \"People\" is incorrect for SqLite"
( "CREATE TABLE \"People\" ("
<> "\"personId\" INTEGER PRIMARY KEY AUTOINCREMENT, "
<> "\"title\" CHARACTER(2) DEFAULT('Ms'), "
<> "\"firstName\" VARCHAR(256) NOT NULL, "
<> "\"lastName\" VARCHAR(256) NOT NULL, "
<> "\"age\" INTEGER CHECK (\"age\" > -1), "
<> "\"married\" BOOLEAN DEFAULT(0), NOT NULL, "
<> "\"passportNo\" VARCHAR(256) UNIQUE, "
<> "\"father\" INTEGER REFERENCES \"People\"(\"personId\"), "
<> "\"countryId\" INTEGER REFERENCES \"Countries\"(\"countryId\"))"
)
(S.codeGen people)
--------------------
-- Primary key
--------------------
testPrimaryKeySqLite :: Test
testPrimaryKeySqLite =
testCase "Create table with primary key" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table with a primary key is incorrect for SqLite"
"CREATE TABLE \"People\" (\"personId\" INTEGER PRIMARY KEY)"
(S.codeGen primaryKeyCol)
testPrimaryKeyAutoSqLite :: Test
testPrimaryKeyAutoSqLite =
testCase "Create table with primary key and auto increment" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
( "Create table with a primary key with an auto increment"
<> "is incorrect for SqLite"
)
( "CREATE TABLE \"People\" (\"personId\" INTEGER PRIMARY KEY "
<> "AUTOINCREMENT)"
)
(S.codeGen primaryKeyColAuto)
testPrimaryKeyTableSqLite :: Test
testPrimaryKeyTableSqLite = testCase "Create table with primary key" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table with a primary key is incorrect for SqLite"
("CREATE TABLE \"People\" ("
<> "\"firstName\" VARCHAR(256), "
<> "\"lastName\" VARCHAR(256), "
<> "CONSTRAINT \"pk\" PRIMARY KEY (\"firstName\", \"lastName\"))")
(S.codeGen primaryKeyTable)
testPrimaryKeyAutoPostgreSQL :: Test
testPrimaryKeyAutoPostgreSQL =
testCase "Create table with primary key with auto increment" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
("Create table with a primary key with auto increment"
<> "is incorrect for PostgreSQL")
"CREATE TABLE \"People\" (\"personId\" serial PRIMARY KEY)"
(P.codeGen primaryKeyColAuto)
testDefaultValSqLite :: Test
testDefaultValSqLite = testCase "Create table with a default value" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table with a default value"
"CREATE TABLE \"People\" (\"country\" INTEGER DEFAULT(1))"
(S.codeGen defaultVal)
testNoNullsSqLite :: Test
testNoNullsSqLite =
testCase "Create table with not null constraints" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table with not null constraints"
("CREATE TABLE \"People\" ("
<> "\"firstName\" VARCHAR(256) CONSTRAINT \"no_null\" NOT NULL, "
<> "\"lastName\" VARCHAR(256) NOT NULL)")
(S.codeGen noNulls)
testCreateCheckSqLite :: Test
testCreateCheckSqLite = testCase "Create table with check" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Check constraint in table statement is incorrect"
"CREATE TABLE \"People\" (\"age\" INTEGER CHECK (\"age\" > -1))"
(S.codeGen createCheck)
testCreateChecksSqLite :: Test
testCreateChecksSqLite =
testCase "Create table with many checks" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Check constraints in table statement are incorrect"
("CREATE TABLE \"People\" ("
<> "\"lastName\" VARCHAR(256), \"age\" INTEGER, "
<> "CONSTRAINT \"checks\" CHECK (\"age\" > -1 AND \"lastName\" <> '')"
<> ")")
(S.codeGen createChecks)
testCreateFKSqLite :: Test
testCreateFKSqLite =
testCase "Create table with a foreign key" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Foreign key in table statement is incorrect"
("CREATE TABLE \"People\" "
<> "(\"countryId\" INTEGER REFERENCES \"Countries\"(\"countryId\"))")
(S.codeGen createFK)
testCreateTableSqLite :: Test
testCreateTableSqLite = testCase "Create table" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table statement is incorrect"
"CREATE TABLE \"People\" (\"firstName\" VARCHAR(256))"
(S.codeGen simpleTable)
testCreateUniqueSqLite :: Test
testCreateUniqueSqLite =
testCase "Create table with unique constraint" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table with unique constraint is incorrect"
"CREATE TABLE \"People\" (\"passportNo\" VARCHAR(256) UNIQUE)"
(S.codeGen createUnique)
testCreateUniqueTSqLite :: Test
testCreateUniqueTSqLite =
testCase "Create table with unique constraint on two columns" assertCreate
where
assertCreate :: Assertion
assertCreate = assertEqual
"Create table with unique constraint on two columns is incorrect"
("CREATE TABLE \"People\" (\"firstName\" VARCHAR(256), "
<> "\"lastName\" VARCHAR(256), UNIQUE (\"firstName\", \"lastName\"))")
(S.codeGen createUniqueT)
--------------------
-- DROP statements
--------------------
testDropTable :: Test
testDropTable = testCase "Drop a table" assertDrop
where
assertDrop :: Assertion
assertDrop = assertEqual
"Drop table is incorrect for SqLite"
"DROP TABLE \"People\""
(S.codeGen dropTableStmt)
testDropTableIfExists :: Test
testDropTableIfExists = testCase "Drop a table if it exists" assertDrop
where
assertDrop :: Assertion
assertDrop = assertEqual
"Drop table if table exists is incorrect for SqLite"
"DROP TABLE IF EXISTS \"People\""
(S.codeGen dropTableIfExistsStmt)
--------------------------------------------------------------------------------
-- PUBLIC
--------------------------------------------------------------------------------
-- | Gather all tests.
tests :: Test
tests = testGroup "Table manipulations"
[ testGroup "PostgreSQL"
[ testGroup "Full examples"
[ testPeoplePostgreSQL
, testCountriesPostgreSQL
]
, testGroup "Create tables"
[ testPrimaryKeyAutoPostgreSQL
, testPrimaryKeyPostgreSQL
]
]
, testGroup "All vendors"
[ testGroup "Full examples"
[ testCountriesSqLite
, testPeopleSqLite
]
, testGroup "Create tables"
[ testCreateCheckSqLite
, testCreateChecksSqLite
, testCreateFKSqLite
, testCreateTableSqLite
, testCreateUniqueSqLite
, testCreateUniqueTSqLite
, testDefaultValSqLite
, testNoNullsSqLite
, testPrimaryKeySqLite
, testPrimaryKeyAutoSqLite
, testPrimaryKeyTableSqLite
]
, testGroup "Drop statements"
[ testDropTable
, testDropTableIfExists
]
]
]
| momomimachli/Hedsql-tests | src/Database/Hedsql/Tests/TableManipulations.hs | gpl-3.0 | 11,538 | 0 | 17 | 3,123 | 1,245 | 691 | 554 | 223 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module OhBool.Evaluation where
import OhBool.Common
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Monad.Reader (runReader)
import Control.Monad.Reader.Class (MonadReader, ask)
import Control.Monad (liftM)
import Data.Bits ((.|.), (.&.), xor)
{-| Expression tree transformed into a State -}
evaluate :: (MonadReader Vars m) => Expression -> m Bool
evaluate (Not ex) = liftM not (evaluate ex)
evaluate (BinaryExpression op ex1 ex2) = do
r1 <- evaluate ex1
r2 <- evaluate ex2
return $ performOperation op r1 r2
evaluate (BoolValue b) = return b
evaluate (BoolChain op exprs) = do
values <- mapM evaluate exprs
return $ foldl1 (performOperation op) values
evaluate (Variable v) = do
vars <- ask
case getValue v vars of
Just b -> return b
Nothing -> error $ "Variable not found " ++ show v
{-| Get the concrete binary operator corresponding to the function -}
performOperation :: BinaryOperator -> Bool -> Bool -> Bool
performOperation Or = (.|.)
performOperation And = (.&.)
performOperation Xor = xor
getVariables' :: Expression -> [Var]
getVariables' (Variable v) = [v]
getVariables' (Not ex) = getVariables' ex
getVariables' (BinaryExpression _ ex1 ex2) = getVariables' ex1 ++ getVariables' ex2
getVariables' (BoolChain _ xs) = concat . map getVariables' $ xs
getVariables' (BoolValue _) = []
getVariables :: Expression -> S.Set Var
getVariables = S.fromList . getVariables'
constructTruthTable :: Expression -> TruthTable
constructTruthTable ex = TruthTable ex eval
where variables = S.toAscList $ getVariables ex
states = allPossibilities variables
eval = M.fromList $ map (\vars -> (vars, runReader (evaluate ex) vars)) states
allPossibilities :: [Var] -> [Vars]
allPossibilities vars = map (\v -> M.fromList v) $ foldl (\ls v -> concatMap (\l -> [(v,False):l,(v,True):l]) ls) [[]] vars
| RomainGehrig/OhBool | src/OhBool/Evaluation.hs | gpl-3.0 | 1,903 | 0 | 14 | 328 | 689 | 362 | 327 | 43 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.GamesConfiguration.AchievementConfigurations.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Insert a new achievement configuration in this application.
--
-- /See:/ <https://developers.google.com/games/services Google Play Game Services Publishing API Reference> for @gamesConfiguration.achievementConfigurations.insert@.
module Network.Google.Resource.GamesConfiguration.AchievementConfigurations.Insert
(
-- * REST Resource
AchievementConfigurationsInsertResource
-- * Creating a Request
, achievementConfigurationsInsert
, AchievementConfigurationsInsert
-- * Request Lenses
, aciPayload
, aciApplicationId
) where
import Network.Google.GamesConfiguration.Types
import Network.Google.Prelude
-- | A resource alias for @gamesConfiguration.achievementConfigurations.insert@ method which the
-- 'AchievementConfigurationsInsert' request conforms to.
type AchievementConfigurationsInsertResource =
"games" :>
"v1configuration" :>
"applications" :>
Capture "applicationId" Text :>
"achievements" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AchievementConfiguration :>
Post '[JSON] AchievementConfiguration
-- | Insert a new achievement configuration in this application.
--
-- /See:/ 'achievementConfigurationsInsert' smart constructor.
data AchievementConfigurationsInsert = AchievementConfigurationsInsert'
{ _aciPayload :: !AchievementConfiguration
, _aciApplicationId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AchievementConfigurationsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aciPayload'
--
-- * 'aciApplicationId'
achievementConfigurationsInsert
:: AchievementConfiguration -- ^ 'aciPayload'
-> Text -- ^ 'aciApplicationId'
-> AchievementConfigurationsInsert
achievementConfigurationsInsert pAciPayload_ pAciApplicationId_ =
AchievementConfigurationsInsert'
{ _aciPayload = pAciPayload_
, _aciApplicationId = pAciApplicationId_
}
-- | Multipart request metadata.
aciPayload :: Lens' AchievementConfigurationsInsert AchievementConfiguration
aciPayload
= lens _aciPayload (\ s a -> s{_aciPayload = a})
-- | The application ID from the Google Play developer console.
aciApplicationId :: Lens' AchievementConfigurationsInsert Text
aciApplicationId
= lens _aciApplicationId
(\ s a -> s{_aciApplicationId = a})
instance GoogleRequest
AchievementConfigurationsInsert where
type Rs AchievementConfigurationsInsert =
AchievementConfiguration
type Scopes AchievementConfigurationsInsert =
'["https://www.googleapis.com/auth/androidpublisher"]
requestClient AchievementConfigurationsInsert'{..}
= go _aciApplicationId (Just AltJSON) _aciPayload
gamesConfigurationService
where go
= buildClient
(Proxy ::
Proxy AchievementConfigurationsInsertResource)
mempty
| rueshyna/gogol | gogol-games-configuration/gen/Network/Google/Resource/GamesConfiguration/AchievementConfigurations/Insert.hs | mpl-2.0 | 3,900 | 0 | 14 | 814 | 386 | 232 | 154 | 67 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AppEngine.Apps.AuthorizedCertificates.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the specified SSL certificate.
--
-- /See:/ <https://cloud.google.com/appengine/docs/admin-api/ App Engine Admin API Reference> for @appengine.apps.authorizedCertificates.get@.
module Network.Google.Resource.AppEngine.Apps.AuthorizedCertificates.Get
(
-- * REST Resource
AppsAuthorizedCertificatesGetResource
-- * Creating a Request
, appsAuthorizedCertificatesGet
, AppsAuthorizedCertificatesGet
-- * Request Lenses
, aacgAuthorizedCertificatesId
, aacgXgafv
, aacgUploadProtocol
, aacgAccessToken
, aacgUploadType
, aacgAppsId
, aacgView
, aacgCallback
) where
import Network.Google.AppEngine.Types
import Network.Google.Prelude
-- | A resource alias for @appengine.apps.authorizedCertificates.get@ method which the
-- 'AppsAuthorizedCertificatesGet' request conforms to.
type AppsAuthorizedCertificatesGetResource =
"v1" :>
"apps" :>
Capture "appsId" Text :>
"authorizedCertificates" :>
Capture "authorizedCertificatesId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "view" AppsAuthorizedCertificatesGetView
:>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] AuthorizedCertificate
-- | Gets the specified SSL certificate.
--
-- /See:/ 'appsAuthorizedCertificatesGet' smart constructor.
data AppsAuthorizedCertificatesGet =
AppsAuthorizedCertificatesGet'
{ _aacgAuthorizedCertificatesId :: !Text
, _aacgXgafv :: !(Maybe Xgafv)
, _aacgUploadProtocol :: !(Maybe Text)
, _aacgAccessToken :: !(Maybe Text)
, _aacgUploadType :: !(Maybe Text)
, _aacgAppsId :: !Text
, _aacgView :: !(Maybe AppsAuthorizedCertificatesGetView)
, _aacgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AppsAuthorizedCertificatesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aacgAuthorizedCertificatesId'
--
-- * 'aacgXgafv'
--
-- * 'aacgUploadProtocol'
--
-- * 'aacgAccessToken'
--
-- * 'aacgUploadType'
--
-- * 'aacgAppsId'
--
-- * 'aacgView'
--
-- * 'aacgCallback'
appsAuthorizedCertificatesGet
:: Text -- ^ 'aacgAuthorizedCertificatesId'
-> Text -- ^ 'aacgAppsId'
-> AppsAuthorizedCertificatesGet
appsAuthorizedCertificatesGet pAacgAuthorizedCertificatesId_ pAacgAppsId_ =
AppsAuthorizedCertificatesGet'
{ _aacgAuthorizedCertificatesId = pAacgAuthorizedCertificatesId_
, _aacgXgafv = Nothing
, _aacgUploadProtocol = Nothing
, _aacgAccessToken = Nothing
, _aacgUploadType = Nothing
, _aacgAppsId = pAacgAppsId_
, _aacgView = Nothing
, _aacgCallback = Nothing
}
-- | Part of \`name\`. See documentation of \`appsId\`.
aacgAuthorizedCertificatesId :: Lens' AppsAuthorizedCertificatesGet Text
aacgAuthorizedCertificatesId
= lens _aacgAuthorizedCertificatesId
(\ s a -> s{_aacgAuthorizedCertificatesId = a})
-- | V1 error format.
aacgXgafv :: Lens' AppsAuthorizedCertificatesGet (Maybe Xgafv)
aacgXgafv
= lens _aacgXgafv (\ s a -> s{_aacgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
aacgUploadProtocol :: Lens' AppsAuthorizedCertificatesGet (Maybe Text)
aacgUploadProtocol
= lens _aacgUploadProtocol
(\ s a -> s{_aacgUploadProtocol = a})
-- | OAuth access token.
aacgAccessToken :: Lens' AppsAuthorizedCertificatesGet (Maybe Text)
aacgAccessToken
= lens _aacgAccessToken
(\ s a -> s{_aacgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
aacgUploadType :: Lens' AppsAuthorizedCertificatesGet (Maybe Text)
aacgUploadType
= lens _aacgUploadType
(\ s a -> s{_aacgUploadType = a})
-- | Part of \`name\`. Name of the resource requested. Example:
-- apps\/myapp\/authorizedCertificates\/12345.
aacgAppsId :: Lens' AppsAuthorizedCertificatesGet Text
aacgAppsId
= lens _aacgAppsId (\ s a -> s{_aacgAppsId = a})
-- | Controls the set of fields returned in the GET response.
aacgView :: Lens' AppsAuthorizedCertificatesGet (Maybe AppsAuthorizedCertificatesGetView)
aacgView = lens _aacgView (\ s a -> s{_aacgView = a})
-- | JSONP
aacgCallback :: Lens' AppsAuthorizedCertificatesGet (Maybe Text)
aacgCallback
= lens _aacgCallback (\ s a -> s{_aacgCallback = a})
instance GoogleRequest AppsAuthorizedCertificatesGet
where
type Rs AppsAuthorizedCertificatesGet =
AuthorizedCertificate
type Scopes AppsAuthorizedCertificatesGet =
'["https://www.googleapis.com/auth/appengine.admin",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient AppsAuthorizedCertificatesGet'{..}
= go _aacgAppsId _aacgAuthorizedCertificatesId
_aacgXgafv
_aacgUploadProtocol
_aacgAccessToken
_aacgUploadType
_aacgView
_aacgCallback
(Just AltJSON)
appEngineService
where go
= buildClient
(Proxy ::
Proxy AppsAuthorizedCertificatesGetResource)
mempty
| brendanhay/gogol | gogol-appengine/gen/Network/Google/Resource/AppEngine/Apps/AuthorizedCertificates/Get.hs | mpl-2.0 | 6,351 | 0 | 19 | 1,444 | 865 | 503 | 362 | 132 | 1 |
-- This file is part of purebred
-- Copyright (C) 2017-2019 Róman Joost and Fraser Tweedale
--
-- purebred is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeApplications #-}
module Purebred.UI.GatherHeaders.Keybindings where
import qualified Graphics.Vty as V
import Purebred.Types
import Purebred.UI.Actions
import qualified Brick.Types as T
gatherFromKeybindings :: [Keybinding 'Threads 'ComposeFrom]
gatherFromKeybindings =
[ Keybinding (V.EvKey V.KEsc []) (abort `focus` continue @'Threads @'ListOfThreads)
, Keybinding (V.EvKey (V.KChar 'g') [V.MCtrl]) (abort `focus` continue @'Threads @'ListOfThreads)
, Keybinding (V.EvKey V.KEnter []) (noop `focus` continue @'Threads @'ComposeTo)
]
gatherToKeybindings :: [Keybinding 'Threads 'ComposeTo]
gatherToKeybindings =
[ Keybinding (V.EvKey V.KEsc []) (abort `focus` continue @'Threads @'ListOfThreads)
, Keybinding (V.EvKey (V.KChar 'g') [V.MCtrl]) (abort `focus` continue @'Threads @'ListOfThreads)
, Keybinding (V.EvKey V.KEnter []) (noop `focus` continue @'Threads @'ComposeSubject)
]
gatherSubjectKeybindings :: [Keybinding 'Threads 'ComposeSubject]
gatherSubjectKeybindings =
[ Keybinding (V.EvKey V.KEsc []) (abort `focus` continue @'Threads @'ListOfThreads)
, Keybinding (V.EvKey (V.KChar 'g') [V.MCtrl]) (abort `focus` continue @'Threads @'ListOfThreads)
, Keybinding (V.EvKey V.KEnter []) (
noop
`focus` (
invokeEditor Threads ListOfThreads
:: Action 'ComposeView 'ComposeListOfAttachments (T.Next AppState))
)
]
| purebred-mua/purebred | src/Purebred/UI/GatherHeaders/Keybindings.hs | agpl-3.0 | 2,222 | 0 | 13 | 385 | 558 | 309 | 249 | 26 | 1 |
module Syntax_Test where
-- see: https://leiffrenzel.de/papers/getting-started-with-hunit.html
import Test.HUnit
import Syntax
-- The structure of a test case is always this:
-- 1. create some input,
-- 2. run the tested code on that input,
-- 3. make some assertions over the results.
-- 4. group them by test lists, and label them
-- TestCase :: Assertion -> Test
-- assertEqual :: (Eq a, Show a) => String -> a -> a -> Assertion
-- TestList :: [Test] -> Test
testSeven :: Test
testSeven = TestCase $ assertEqual
"Should get seven" "LUCKY NUMBER SEVEN!" $ lucky 7
simpleCases :: Test
simpleCases = TestLabel "Simple cases: " $ TestList [testSeven]
testEdge :: (Integral a) => a -> Test
testEdge a = TestCase $ assertEqual
"Should say sorry" "Sorry, you're out of luck, pal!" $ lucky a
borderCases :: Test
borderCases = TestLabel "Border cases: " $ TestList [testEdge 6, testEdge 8]
-- runTestTT :: Test -> IO Counts
-- TestList :: [Test] -> Test
main :: IO Counts
main = runTestTT $ TestList [simpleCases, borderCases] | dnvriend/study-category-theory | haskell/learn_a_haskell/ch4/Syntax_Test.hs | apache-2.0 | 1,040 | 0 | 8 | 192 | 172 | 95 | 77 | 15 | 1 |
module Poset.A334231Spec (main, spec) where
import Test.Hspec
import Poset.A334231 (a334231)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A334231" $
it "correctly computes the first 20 elements" $
map a334231 [1..20] `shouldBe` expectedValue where
expectedValue = [1,2,2,3,3,3,4,4,6,4,5,5,15,5,5,6,6,6,6,15]
| peterokagey/haskellOEIS | test/Poset/A334231Spec.hs | apache-2.0 | 339 | 0 | 8 | 57 | 154 | 92 | 62 | 10 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QStyleOptionTab.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:36
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QStyleOptionTab (
QStyleOptionTabStyleOptionType
, QStyleOptionTabStyleOptionVersion
, QStyleOptionTabTabPosition
, QStyleOptionTabSelectedPosition
, CornerWidget, CornerWidgets, eNoCornerWidgets, fNoCornerWidgets, eLeftCornerWidget, fLeftCornerWidget, eRightCornerWidget, fRightCornerWidget
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CQStyleOptionTabStyleOptionType a = CQStyleOptionTabStyleOptionType a
type QStyleOptionTabStyleOptionType = QEnum(CQStyleOptionTabStyleOptionType Int)
ieQStyleOptionTabStyleOptionType :: Int -> QStyleOptionTabStyleOptionType
ieQStyleOptionTabStyleOptionType x = QEnum (CQStyleOptionTabStyleOptionType x)
instance QEnumC (CQStyleOptionTabStyleOptionType Int) where
qEnum_toInt (QEnum (CQStyleOptionTabStyleOptionType x)) = x
qEnum_fromInt x = QEnum (CQStyleOptionTabStyleOptionType x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QStyleOptionTabStyleOptionType -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeType QStyleOptionTabStyleOptionType where
eType
= ieQStyleOptionTabStyleOptionType $ 3
data CQStyleOptionTabStyleOptionVersion a = CQStyleOptionTabStyleOptionVersion a
type QStyleOptionTabStyleOptionVersion = QEnum(CQStyleOptionTabStyleOptionVersion Int)
ieQStyleOptionTabStyleOptionVersion :: Int -> QStyleOptionTabStyleOptionVersion
ieQStyleOptionTabStyleOptionVersion x = QEnum (CQStyleOptionTabStyleOptionVersion x)
instance QEnumC (CQStyleOptionTabStyleOptionVersion Int) where
qEnum_toInt (QEnum (CQStyleOptionTabStyleOptionVersion x)) = x
qEnum_fromInt x = QEnum (CQStyleOptionTabStyleOptionVersion x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QStyleOptionTabStyleOptionVersion -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeVersion QStyleOptionTabStyleOptionVersion where
eVersion
= ieQStyleOptionTabStyleOptionVersion $ 1
data CQStyleOptionTabTabPosition a = CQStyleOptionTabTabPosition a
type QStyleOptionTabTabPosition = QEnum(CQStyleOptionTabTabPosition Int)
ieQStyleOptionTabTabPosition :: Int -> QStyleOptionTabTabPosition
ieQStyleOptionTabTabPosition x = QEnum (CQStyleOptionTabTabPosition x)
instance QEnumC (CQStyleOptionTabTabPosition Int) where
qEnum_toInt (QEnum (CQStyleOptionTabTabPosition x)) = x
qEnum_fromInt x = QEnum (CQStyleOptionTabTabPosition x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QStyleOptionTabTabPosition -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeBeginning QStyleOptionTabTabPosition where
eBeginning
= ieQStyleOptionTabTabPosition $ 0
instance QeMiddle QStyleOptionTabTabPosition where
eMiddle
= ieQStyleOptionTabTabPosition $ 1
instance QeEnd QStyleOptionTabTabPosition where
eEnd
= ieQStyleOptionTabTabPosition $ 2
instance QeOnlyOneTab QStyleOptionTabTabPosition where
eOnlyOneTab
= ieQStyleOptionTabTabPosition $ 3
data CQStyleOptionTabSelectedPosition a = CQStyleOptionTabSelectedPosition a
type QStyleOptionTabSelectedPosition = QEnum(CQStyleOptionTabSelectedPosition Int)
ieQStyleOptionTabSelectedPosition :: Int -> QStyleOptionTabSelectedPosition
ieQStyleOptionTabSelectedPosition x = QEnum (CQStyleOptionTabSelectedPosition x)
instance QEnumC (CQStyleOptionTabSelectedPosition Int) where
qEnum_toInt (QEnum (CQStyleOptionTabSelectedPosition x)) = x
qEnum_fromInt x = QEnum (CQStyleOptionTabSelectedPosition x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QStyleOptionTabSelectedPosition -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeNotAdjacent QStyleOptionTabSelectedPosition where
eNotAdjacent
= ieQStyleOptionTabSelectedPosition $ 0
instance QeNextIsSelected QStyleOptionTabSelectedPosition where
eNextIsSelected
= ieQStyleOptionTabSelectedPosition $ 1
instance QePreviousIsSelected QStyleOptionTabSelectedPosition where
ePreviousIsSelected
= ieQStyleOptionTabSelectedPosition $ 2
data CCornerWidget a = CCornerWidget a
type CornerWidget = QEnum(CCornerWidget Int)
ieCornerWidget :: Int -> CornerWidget
ieCornerWidget x = QEnum (CCornerWidget x)
instance QEnumC (CCornerWidget Int) where
qEnum_toInt (QEnum (CCornerWidget x)) = x
qEnum_fromInt x = QEnum (CCornerWidget x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> CornerWidget -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
data CCornerWidgets a = CCornerWidgets a
type CornerWidgets = QFlags(CCornerWidgets Int)
ifCornerWidgets :: Int -> CornerWidgets
ifCornerWidgets x = QFlags (CCornerWidgets x)
instance QFlagsC (CCornerWidgets Int) where
qFlags_toInt (QFlags (CCornerWidgets x)) = x
qFlags_fromInt x = QFlags (CCornerWidgets x)
withQFlagsResult x
= do
ti <- x
return $ qFlags_fromInt $ fromIntegral ti
withQFlagsListResult x
= do
til <- x
return $ map qFlags_fromInt til
instance Qcs (QObject c -> CornerWidgets -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qFlags_fromInt hint)
return ()
eNoCornerWidgets :: CornerWidget
eNoCornerWidgets
= ieCornerWidget $ 0
eLeftCornerWidget :: CornerWidget
eLeftCornerWidget
= ieCornerWidget $ 1
eRightCornerWidget :: CornerWidget
eRightCornerWidget
= ieCornerWidget $ 2
fNoCornerWidgets :: CornerWidgets
fNoCornerWidgets
= ifCornerWidgets $ 0
fLeftCornerWidget :: CornerWidgets
fLeftCornerWidget
= ifCornerWidgets $ 1
fRightCornerWidget :: CornerWidgets
fRightCornerWidget
= ifCornerWidgets $ 2
| uduki/hsQt | Qtc/Enums/Gui/QStyleOptionTab.hs | bsd-2-clause | 12,522 | 0 | 18 | 2,659 | 3,235 | 1,575 | 1,660 | 279 | 1 |
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the Foundation.hs file.
module Settings
( widgetFile
, PersistConfig
, staticRoot
, staticDir
, Extra (..)
, parseExtra
) where
import Text.Hamlet
import Data.Default (def)
import Prelude
import Text.Shakespeare.Text (st)
import Language.Haskell.TH.Syntax
import Database.Persist.MongoDB (MongoConf)
import Yesod.Default.Config
import Yesod.Default.Util
import Data.Text (Text)
import Data.Yaml
import Control.Applicative
import Settings.Development
-- | Which Persistent backend this site is using.
type PersistConfig = MongoConf
-- Static setting below. Changing these requires a recompile
-- | The location of static files on your system. This is a file system
-- path. The default value works properly with your scaffolded site.
staticDir :: FilePath
staticDir = "static"
-- | The base URL for your static files. As you can see by the default
-- value, this can simply be "static" appended to your application root.
-- A powerful optimization can be serving static files from a separate
-- domain name. This allows you to use a web server optimized for static
-- files, more easily set expires and cache values, and avoid possibly
-- costly transference of cookies on static files. For more information,
-- please see:
-- http://code.google.com/speed/page-speed/docs/request.html#ServeFromCookielessDomain
--
-- If you change the resource pattern for StaticR in Foundation.hs, you will
-- have to make a corresponding change here.
--
-- To see how this value is used, see urlRenderOverride in Foundation.hs
staticRoot :: AppConfig DefaultEnv x -> Text
staticRoot conf = [st|#{appRoot conf}/static|]
-- | Settings for 'widgetFile', such as which template languages to support and
-- default Hamlet settings.
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
{ wfsHamletSettings = defaultHamletSettings
{ hamletNewlines = AlwaysNewlines
}
}
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile = (if development then widgetFileReload
else widgetFileNoReload)
widgetFileSettings
data Extra = Extra
{ extraCopyright :: Text
, extraAnalytics :: Maybe Text -- ^ Google Analytics
, extraHatenaStar :: Maybe Text -- ^ Google Analytics
, extraAdmins :: [Text]
, extraTitle :: Text
, extraDescription :: Text
, extraMarkup :: Maybe String
, extraMailAddress :: Maybe Text
, extraGoogleCSE :: Maybe Text
, extraReCAPTCHA :: Maybe (Text, Text)
} deriving Show
parseExtra :: DefaultEnv -> Object -> Parser Extra
parseExtra _ o = Extra
<$> o .: "copyright"
<*> o .:? "analytics"
<*> o .:? "hatenastar"
<*> o .: "admins"
<*> o .: "title"
<*> o .: "description"
<*> o .:? "markup"
<*> o .:? "admin-mail"
<*> o .:? "google-cse"
<*> (liftA2 (,) <$> o .:? "recaptcha-public-key"
<*> o .:? "recaptcha-private-key")
| konn/Yablog | Settings.hs | bsd-2-clause | 3,325 | 0 | 23 | 707 | 465 | 281 | 184 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-- | A simple API for /routing/, using a custom exchange type.
module Control.Distributed.Process.Platform.Execution.Exchange.Router
( -- * Types
HeaderName
, Binding(..)
, Bindable
, BindingSelector
, RelayType(..)
-- * Starting a Router
, router
, supervisedRouter
, supervisedRouterRef
-- * Client (Publishing) API
, route
, routeMessage
-- * Routing via message/binding keys
, messageKeyRouter
, bindKey
-- * Routing via message headers
, headerContentRouter
, bindHeader
) where
import Control.DeepSeq (NFData)
import Control.Distributed.Process
( Process
, ProcessMonitorNotification(..)
, ProcessId
, monitor
, handleMessage
, unsafeWrapMessage
)
import qualified Control.Distributed.Process as P
import Control.Distributed.Process.Serializable (Serializable)
import Control.Distributed.Process.Platform.Execution.Exchange.Internal
( startExchange
, startSupervised
, configureExchange
, Message(..)
, Exchange
, ExchangeType(..)
, post
, postMessage
, applyHandlers
)
import Control.Distributed.Process.Platform.Internal.Primitives
( deliver
, Resolvable(..)
)
import Control.Distributed.Process.Platform.Supervisor (SupervisorPid)
import Data.Binary
import Data.Foldable (forM_)
import Data.Hashable
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as Map
import Data.HashSet (HashSet)
import qualified Data.HashSet as Set
import Data.Typeable (Typeable)
import GHC.Generics
type HeaderName = String
-- | The binding key used by the built-in key and header based
-- routers.
data Binding =
BindKey { bindingKey :: !String }
| BindHeader { bindingKey :: !String
, headerName :: !HeaderName
}
| BindNone
deriving (Typeable, Generic, Eq, Show)
instance Binary Binding where
instance NFData Binding where
instance Hashable Binding where
-- | Things that can be used as binding keys in a router.
class (Hashable k, Eq k, Serializable k) => Bindable k
instance (Hashable k, Eq k, Serializable k) => Bindable k
-- | Used to convert a 'Message' into a 'Bindable' routing key.
type BindingSelector k = (Message -> Process k)
-- | Given to a /router/ to indicate whether clients should
-- receive 'Message' payloads only, or the whole 'Message' object
-- itself.
data RelayType = PayloadOnly | WholeMessage
data State k = State { bindings :: !(HashMap k (HashSet ProcessId))
, selector :: !(BindingSelector k)
, relayType :: !RelayType
}
type Router k = ExchangeType (State k)
--------------------------------------------------------------------------------
-- Starting/Running the Exchange --
--------------------------------------------------------------------------------
-- | A router that matches on a 'Message' 'key'. To bind a client @Process@ to
-- such an exchange, use the 'bindKey' function.
messageKeyRouter :: RelayType -> Process Exchange
messageKeyRouter t = router t matchOnKey -- (return . BindKey . key)
where
matchOnKey :: Message -> Process Binding
matchOnKey m = return $ BindKey (key m)
-- | A router that matches on a specific (named) header. To bind a client
-- @Process@ to such an exchange, use the 'bindHeader' function.
headerContentRouter :: RelayType -> HeaderName -> Process Exchange
headerContentRouter t n = router t (checkHeaders n)
where
checkHeaders hn Message{..} = do
case Map.lookup hn (Map.fromList headers) of
Nothing -> return BindNone
Just hv -> return $ BindHeader hn hv
-- | Defines a /router/ exchange. The 'BindingSelector' is used to construct
-- a binding (i.e., an instance of the 'Bindable' type @k@) for each incoming
-- 'Message'. Such bindings are matched against bindings stored in the exchange.
-- Clients of a /router/ exchange are identified by a binding, mapped to
-- one or more 'ProcessId's.
--
-- The format of the bindings, nature of their storage and mechanism for
-- submitting new bindings is implementation dependent (i.e., will vary by
-- exchange type). For example, the 'messageKeyRouter' and 'headerContentRouter'
-- implementations both use the 'Binding' data type, which can represent a
-- 'Message' key or a 'HeaderName' and content. As with all custom exchange
-- types, bindings should be submitted by evaluating 'configureExchange' with
-- a suitable data type.
--
router :: (Bindable k) => RelayType -> BindingSelector k -> Process Exchange
router t s = routerT t s >>= startExchange
supervisedRouterRef :: Bindable k
=> RelayType
-> BindingSelector k
-> SupervisorPid
-> Process (ProcessId, P.Message)
supervisedRouterRef t sel spid = do
ex <- supervisedRouter t sel spid
Just pid <- resolve ex
return (pid, unsafeWrapMessage ex)
-- | Defines a /router/ that can be used in a supervision tree.
supervisedRouter :: Bindable k
=> RelayType
-> BindingSelector k
-> SupervisorPid
-> Process Exchange
supervisedRouter t sel spid =
routerT t sel >>= \t' -> startSupervised t' spid
routerT :: Bindable k
=> RelayType
-> BindingSelector k
-> Process (Router k)
routerT t s = do
return $ ExchangeType { name = "Router"
, state = State Map.empty s t
, configureEx = apiConfigure
, routeEx = apiRoute
}
--------------------------------------------------------------------------------
-- Client Facing API --
--------------------------------------------------------------------------------
-- | Add a binding (for the calling process) to a 'messageKeyRouter' exchange.
bindKey :: String -> Exchange -> Process ()
bindKey k ex = do
self <- P.getSelfPid
configureExchange ex (self, BindKey k)
-- | Add a binding (for the calling process) to a 'headerContentRouter' exchange.
bindHeader :: HeaderName -> String -> Exchange -> Process ()
bindHeader n v ex = do
self <- P.getSelfPid
configureExchange ex (self, BindHeader v n)
-- | Send a 'Serializable' message to the supplied 'Exchange'. The given datum
-- will be converted to a 'Message', with the 'key' set to @""@ and the
-- 'headers' to @[]@.
--
-- The routing behaviour will be dependent on the choice of 'BindingSelector'
-- given when initialising the /router/.
route :: Serializable m => Exchange -> m -> Process ()
route = post
-- | Send a 'Message' to the supplied 'Exchange'.
-- The routing behaviour will be dependent on the choice of 'BindingSelector'
-- given when initialising the /router/.
routeMessage :: Exchange -> Message -> Process ()
routeMessage = postMessage
--------------------------------------------------------------------------------
-- Exchage Definition/State & API Handlers --
--------------------------------------------------------------------------------
apiRoute :: forall k. Bindable k
=> State k
-> Message
-> Process (State k)
apiRoute st@State{..} msg = do
binding <- selector msg
case Map.lookup binding bindings of
Nothing -> return st
Just bs -> forM_ bs (fwd relayType msg) >> return st
where
fwd WholeMessage m = deliver m
fwd PayloadOnly m = P.forward (payload m)
-- TODO: implement 'unbind' ???
-- TODO: apiConfigure currently leaks memory if clients die (we don't cleanup)
apiConfigure :: forall k. Bindable k
=> State k
-> P.Message
-> Process (State k)
apiConfigure st msg = do
applyHandlers st msg $ [ \m -> handleMessage m (createBinding st)
, \m -> handleMessage m (handleMonitorSignal st)
]
where
createBinding s@State{..} (pid, bind) = do
case Map.lookup bind bindings of
Nothing -> do _ <- monitor pid
return $ s { bindings = newBind bind pid bindings }
Just ps -> return $ s { bindings = addBind bind pid bindings ps }
newBind b p bs = Map.insert b (Set.singleton p) bs
addBind b' p' bs' ps = Map.insert b' (Set.insert p' ps) bs'
handleMonitorSignal s@State{..} (ProcessMonitorNotification _ p _) =
let bs = bindings
bs' = Map.foldlWithKey' (\a k v -> Map.insert k (Set.delete p v) a) bs bs
in return $ s { bindings = bs' }
| haskell-distributed/distributed-process-platform | src/Control/Distributed/Process/Platform/Execution/Exchange/Router.hs | bsd-3-clause | 9,019 | 0 | 17 | 2,158 | 1,720 | 940 | 780 | -1 | -1 |
{-------------------------------------------------------------------------------
DSem.VectorSpace
Vector space model interface
(c) 2013 Jan Snajder <[email protected]>
-------------------------------------------------------------------------------}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances #-}
module DSem.VectorSpace
( existsTarget
, module DSem.Vector
, getTargetVector
, Model (..)
, ModelIO
, ModelPure
, runModelIO
, runModelIO2
, runModelPure
, Targetable (..) ) where
import Control.Applicative
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State.Strict
import Data.Maybe
import qualified DSem.Vector
import DSem.Vector (Vector)
class (Monad m, Vector v) =>
Model m t c v | m -> v, m -> t, m -> c where
getVector :: t -> m (Maybe v)
getDim :: m (Int,Int)
getTargets :: m [t]
getContexts :: m [c]
class Targetable a t where
toTarget :: a -> t
fromTarget :: t -> a
getTargetVector :: (Model m t c v, Targetable a t) => a -> m (Maybe v)
getTargetVector = getVector . toTarget
type ModelIO a = StateT a IO
type ModelPure a = Reader a
runModelPure :: a -> ModelPure a b -> b
runModelPure = flip runReader
runModelIO :: a -> ModelIO a b -> IO b
runModelIO = flip evalStateT
runModelIO2 :: a -> ReaderT a IO b -> IO b
runModelIO2 = flip runReaderT
existsTarget :: Model m t c v => t -> m Bool
existsTarget t = isJust `liftM` getVector t
{-
targetMarginals :: (VectModel m t v, Ord t) => m -> M.Map t Double
targetMarginals m =
M.fromList [ (t, sum $ V.toList v) | (t,v) <- toList m]
contextMarginals :: VectModel m t v => m -> v
contextMarginals = V.sum . map snd . toList
-- todo: conflate into a single function, with LMI | PMI ...
{-
lmiWeighting :: (Ord t, DModel m t v) => m -> m
lmiWeighting m = fromList . map f $ toList m
where tm = targetMarginals m
cm = contextMarginals m
n = sum $ V.toList cm
f (t,v) = (t,vzip (\fx fxy ->
lmi n fx (M.findWithDefault 0 t tm) fxy) cm v)
-}
lmi :: Double -> Double -> Double -> Double -> Double
lmi n fx fy fxy
| n * fx * fy * fxy == 0 = 0
| otherwise = fxy * (log fxy + log n - log fx - log fy)
-}
| jsnajder/dsem | src/DSem/VectorSpace.hs | bsd-3-clause | 2,231 | 0 | 10 | 499 | 425 | 237 | 188 | 40 | 1 |
module Main where
import Multiarg.Examples.Grover
import System.Environment
main :: IO ()
main = do
as <- getArgs
putStrLn . show $ parseGrover as
| massysett/multiarg | tests/grover-main.hs | bsd-3-clause | 153 | 0 | 8 | 29 | 51 | 27 | 24 | 7 | 1 |
{-
(c) The University of Glasgow, 2004-2006
Module
~~~~~~~~~~
Simply the name of a module, represented as a FastString.
These are Uniquable, hence we can build Maps with Modules as
the keys.
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards #-}
module Module
(
-- * The ModuleName type
ModuleName,
pprModuleName,
moduleNameFS,
moduleNameString,
moduleNameSlashes, moduleNameColons,
moduleStableString,
mkModuleName,
mkModuleNameFS,
stableModuleNameCmp,
-- * The UnitId type
UnitId,
fsToUnitId,
unitIdFS,
stringToUnitId,
unitIdString,
stableUnitIdCmp,
-- * Wired-in UnitIds
-- $wired_in_packages
primUnitId,
integerUnitId,
baseUnitId,
rtsUnitId,
thUnitId,
dphSeqUnitId,
dphParUnitId,
mainUnitId,
thisGhcUnitId,
holeUnitId, isHoleModule,
interactiveUnitId, isInteractiveModule,
wiredInUnitIds,
-- * The Module type
Module(Module),
moduleUnitId, moduleName,
pprModule,
mkModule,
stableModuleCmp,
HasModule(..),
ContainsModule(..),
-- * The ModuleLocation type
ModLocation(..),
addBootSuffix, addBootSuffix_maybe, addBootSuffixLocn,
-- * Module mappings
ModuleEnv,
elemModuleEnv, extendModuleEnv, extendModuleEnvList,
extendModuleEnvList_C, plusModuleEnv_C,
delModuleEnvList, delModuleEnv, plusModuleEnv, lookupModuleEnv,
lookupWithDefaultModuleEnv, mapModuleEnv, mkModuleEnv, emptyModuleEnv,
moduleEnvKeys, moduleEnvElts, moduleEnvToList,
unitModuleEnv, isEmptyModuleEnv,
foldModuleEnv, extendModuleEnvWith, filterModuleEnv,
-- * ModuleName mappings
ModuleNameEnv,
-- * Sets of Modules
ModuleSet,
emptyModuleSet, mkModuleSet, moduleSetElts, extendModuleSet, elemModuleSet
) where
import Config
import Outputable
import Unique
import UniqFM
import FastString
import Binary
import Util
import Data.List
import Data.Ord
import {-# SOURCE #-} Packages
import GHC.PackageDb (BinaryStringRep(..))
import Control.DeepSeq
import Data.Coerce
import Data.Data
import Data.Map (Map)
import qualified Data.Map as Map
import qualified FiniteMap as Map
import Data.Set (Set)
import qualified Data.Set as Set
import System.FilePath
-- Note [The identifier lexicon]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Package keys, installed package IDs, ABI hashes, package names,
-- versions, there are a *lot* of different identifiers for closely
-- related things. What do they all mean? Here's what. (See also
-- https://ghc.haskell.org/trac/ghc/wiki/Commentary/Packages/Concepts )
--
-- THE IMPORTANT ONES
--
-- ComponentId: An opaque identifier provided by Cabal, which should
-- uniquely identify such things as the package name, the package
-- version, the name of the component, the hash of the source code
-- tarball, the selected Cabal flags, GHC flags, direct dependencies of
-- the component. These are very similar to InstalledPackageId, but
-- an 'InstalledPackageId' implies that it identifies a package, while
-- a package may install multiple components with different
-- 'ComponentId's.
-- - Same as Distribution.Package.ComponentId
--
-- UnitId: A ComponentId + a mapping from hole names (ModuleName) to
-- Modules. This is how the compiler identifies instantatiated
-- components, and also is the main identifier by which GHC identifies
-- things.
-- - When Backpack is not being used, UnitId = ComponentId.
-- this means a useful fiction for end-users is that there are
-- only ever ComponentIds, and some ComponentIds happen to have
-- more information (UnitIds).
-- - Same as Language.Haskell.TH.Syntax:PkgName, see
-- https://ghc.haskell.org/trac/ghc/ticket/10279
-- - The same as PackageKey in GHC 7.10 (we renamed it because
-- they don't necessarily identify packages anymore.)
-- - Same as -this-package-key/-package-name flags
--
-- Module: A UnitId + ModuleName. This is how the compiler identifies
-- modules (e.g. a Name is a Module + OccName)
-- - Same as Language.Haskell.TH.Syntax:Module
--
-- THE LESS IMPORTANT ONES
--
-- PackageName: The "name" field in a Cabal file, something like "lens".
-- - Same as Distribution.Package.PackageName
-- - DIFFERENT FROM Language.Haskell.TH.Syntax:PkgName, see
-- https://ghc.haskell.org/trac/ghc/ticket/10279
-- - DIFFERENT FROM -package-name flag
-- - DIFFERENT FROM the 'name' field in an installed package
-- information. This field could more accurately be described
-- as a munged package name: when it's for the main library
-- it is the same as the package name, but if it's an internal
-- library it's a munged combination of the package name and
-- the component name.
--
-- LEGACY ONES
--
-- InstalledPackageId: This is what we used to call ComponentId.
-- It's a still pretty useful concept for packages that have only
-- one library; in that case the logical InstalledPackageId =
-- ComponentId. Also, the Cabal nix-local-build continues to
-- compute an InstalledPackageId which is then forcibly used
-- for all components in a package. This means that if a dependency
-- from one component in a package changes, the InstalledPackageId
-- changes: you don't get as fine-grained dependency tracking,
-- but it means your builds are hermetic. Eventually, Cabal will
-- deal completely in components and we can get rid of this.
--
-- PackageKey: This is what we used to call UnitId. We ditched
-- "Package" from the name when we realized that you might want to
-- assign different "PackageKeys" to components from the same package.
-- (For a brief, non-released period of time, we also called these
-- UnitKeys).
{-
************************************************************************
* *
\subsection{Module locations}
* *
************************************************************************
-}
-- | Where a module lives on the file system: the actual locations
-- of the .hs, .hi and .o files, if we have them
data ModLocation
= ModLocation {
ml_hs_file :: Maybe FilePath,
-- The source file, if we have one. Package modules
-- probably don't have source files.
ml_hi_file :: FilePath,
-- Where the .hi file is, whether or not it exists
-- yet. Always of form foo.hi, even if there is an
-- hi-boot file (we add the -boot suffix later)
ml_obj_file :: FilePath
-- Where the .o file is, whether or not it exists yet.
-- (might not exist either because the module hasn't
-- been compiled yet, or because it is part of a
-- package with a .a file)
} deriving Show
instance Outputable ModLocation where
ppr = text . show
{-
For a module in another package, the hs_file and obj_file
components of ModLocation are undefined.
The locations specified by a ModLocation may or may not
correspond to actual files yet: for example, even if the object
file doesn't exist, the ModLocation still contains the path to
where the object file will reside if/when it is created.
-}
addBootSuffix :: FilePath -> FilePath
-- ^ Add the @-boot@ suffix to .hs, .hi and .o files
addBootSuffix path = path ++ "-boot"
addBootSuffix_maybe :: Bool -> FilePath -> FilePath
-- ^ Add the @-boot@ suffix if the @Bool@ argument is @True@
addBootSuffix_maybe is_boot path
| is_boot = addBootSuffix path
| otherwise = path
addBootSuffixLocn :: ModLocation -> ModLocation
-- ^ Add the @-boot@ suffix to all file paths associated with the module
addBootSuffixLocn locn
= locn { ml_hs_file = fmap addBootSuffix (ml_hs_file locn)
, ml_hi_file = addBootSuffix (ml_hi_file locn)
, ml_obj_file = addBootSuffix (ml_obj_file locn) }
{-
************************************************************************
* *
\subsection{The name of a module}
* *
************************************************************************
-}
-- | A ModuleName is essentially a simple string, e.g. @Data.List@.
newtype ModuleName = ModuleName FastString
deriving Typeable
instance Uniquable ModuleName where
getUnique (ModuleName nm) = getUnique nm
instance Eq ModuleName where
nm1 == nm2 = getUnique nm1 == getUnique nm2
instance Ord ModuleName where
nm1 `compare` nm2 = stableModuleNameCmp nm1 nm2
instance Outputable ModuleName where
ppr = pprModuleName
instance Binary ModuleName where
put_ bh (ModuleName fs) = put_ bh fs
get bh = do fs <- get bh; return (ModuleName fs)
instance BinaryStringRep ModuleName where
fromStringRep = mkModuleNameFS . mkFastStringByteString
toStringRep = fastStringToByteString . moduleNameFS
instance Data ModuleName where
-- don't traverse?
toConstr _ = abstractConstr "ModuleName"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "ModuleName"
instance NFData ModuleName where
rnf x = x `seq` ()
stableModuleNameCmp :: ModuleName -> ModuleName -> Ordering
-- ^ Compares module names lexically, rather than by their 'Unique's
stableModuleNameCmp n1 n2 = moduleNameFS n1 `compare` moduleNameFS n2
pprModuleName :: ModuleName -> SDoc
pprModuleName (ModuleName nm) =
getPprStyle $ \ sty ->
if codeStyle sty
then ztext (zEncodeFS nm)
else ftext nm
moduleNameFS :: ModuleName -> FastString
moduleNameFS (ModuleName mod) = mod
moduleNameString :: ModuleName -> String
moduleNameString (ModuleName mod) = unpackFS mod
-- | Get a string representation of a 'Module' that's unique and stable
-- across recompilations.
-- eg. "$aeson_70dylHtv1FFGeai1IoxcQr$Data.Aeson.Types.Internal"
moduleStableString :: Module -> String
moduleStableString Module{..} =
"$" ++ unitIdString moduleUnitId ++ "$" ++ moduleNameString moduleName
mkModuleName :: String -> ModuleName
mkModuleName s = ModuleName (mkFastString s)
mkModuleNameFS :: FastString -> ModuleName
mkModuleNameFS s = ModuleName s
-- |Returns the string version of the module name, with dots replaced by slashes.
--
moduleNameSlashes :: ModuleName -> String
moduleNameSlashes = dots_to_slashes . moduleNameString
where dots_to_slashes = map (\c -> if c == '.' then pathSeparator else c)
-- |Returns the string version of the module name, with dots replaced by underscores.
--
moduleNameColons :: ModuleName -> String
moduleNameColons = dots_to_colons . moduleNameString
where dots_to_colons = map (\c -> if c == '.' then ':' else c)
{-
************************************************************************
* *
\subsection{A fully qualified module}
* *
************************************************************************
-}
-- | A Module is a pair of a 'UnitId' and a 'ModuleName'.
data Module = Module {
moduleUnitId :: !UnitId, -- pkg-1.0
moduleName :: !ModuleName -- A.B.C
}
deriving (Eq, Ord, Typeable)
instance Uniquable Module where
getUnique (Module p n) = getUnique (unitIdFS p `appendFS` moduleNameFS n)
instance Outputable Module where
ppr = pprModule
instance Binary Module where
put_ bh (Module p n) = put_ bh p >> put_ bh n
get bh = do p <- get bh; n <- get bh; return (Module p n)
instance Data Module where
-- don't traverse?
toConstr _ = abstractConstr "Module"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Module"
instance NFData Module where
rnf x = x `seq` ()
-- | This gives a stable ordering, as opposed to the Ord instance which
-- gives an ordering based on the 'Unique's of the components, which may
-- not be stable from run to run of the compiler.
stableModuleCmp :: Module -> Module -> Ordering
stableModuleCmp (Module p1 n1) (Module p2 n2)
= (p1 `stableUnitIdCmp` p2) `thenCmp`
(n1 `stableModuleNameCmp` n2)
mkModule :: UnitId -> ModuleName -> Module
mkModule = Module
pprModule :: Module -> SDoc
pprModule mod@(Module p n) =
pprPackagePrefix p mod <> pprModuleName n
pprPackagePrefix :: UnitId -> Module -> SDoc
pprPackagePrefix p mod = getPprStyle doc
where
doc sty
| codeStyle sty =
if p == mainUnitId
then empty -- never qualify the main package in code
else ztext (zEncodeFS (unitIdFS p)) <> char '_'
| qualModule sty mod = ppr (moduleUnitId mod) <> char ':'
-- the PrintUnqualified tells us which modules have to
-- be qualified with package names
| otherwise = empty
class ContainsModule t where
extractModule :: t -> Module
class HasModule m where
getModule :: m Module
{-
************************************************************************
* *
\subsection{UnitId}
* *
************************************************************************
-}
-- | A string which uniquely identifies a package. For wired-in packages,
-- it is just the package name, but for user compiled packages, it is a hash.
-- ToDo: when the key is a hash, we can do more clever things than store
-- the hex representation and hash-cons those strings.
newtype UnitId = PId FastString deriving( Eq, Typeable )
-- here to avoid module loops with PackageConfig
instance Uniquable UnitId where
getUnique pid = getUnique (unitIdFS pid)
instance Ord UnitId where
nm1 `compare` nm2 = stableUnitIdCmp nm1 nm2
instance Data UnitId where
-- don't traverse?
toConstr _ = abstractConstr "UnitId"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "UnitId"
instance NFData UnitId where
rnf x = x `seq` ()
stableUnitIdCmp :: UnitId -> UnitId -> Ordering
-- ^ Compares package ids lexically, rather than by their 'Unique's
stableUnitIdCmp p1 p2 = unitIdFS p1 `compare` unitIdFS p2
instance Outputable UnitId where
ppr pk = getPprStyle $ \sty -> sdocWithDynFlags $ \dflags ->
case unitIdPackageIdString dflags pk of
Nothing -> ftext (unitIdFS pk)
Just pkg -> text pkg
-- Don't bother qualifying if it's wired in!
<> (if qualPackage sty pk && not (pk `elem` wiredInUnitIds)
then char '@' <> ftext (unitIdFS pk)
else empty)
instance Binary UnitId where
put_ bh pid = put_ bh (unitIdFS pid)
get bh = do { fs <- get bh; return (fsToUnitId fs) }
instance BinaryStringRep UnitId where
fromStringRep = fsToUnitId . mkFastStringByteString
toStringRep = fastStringToByteString . unitIdFS
fsToUnitId :: FastString -> UnitId
fsToUnitId = PId
unitIdFS :: UnitId -> FastString
unitIdFS (PId fs) = fs
stringToUnitId :: String -> UnitId
stringToUnitId = fsToUnitId . mkFastString
unitIdString :: UnitId -> String
unitIdString = unpackFS . unitIdFS
-- -----------------------------------------------------------------------------
-- $wired_in_packages
-- Certain packages are known to the compiler, in that we know about certain
-- entities that reside in these packages, and the compiler needs to
-- declare static Modules and Names that refer to these packages. Hence
-- the wired-in packages can't include version numbers, since we don't want
-- to bake the version numbers of these packages into GHC.
--
-- So here's the plan. Wired-in packages are still versioned as
-- normal in the packages database, and you can still have multiple
-- versions of them installed. However, for each invocation of GHC,
-- only a single instance of each wired-in package will be recognised
-- (the desired one is selected via @-package@\/@-hide-package@), and GHC
-- will use the unversioned 'UnitId' below when referring to it,
-- including in .hi files and object file symbols. Unselected
-- versions of wired-in packages will be ignored, as will any other
-- package that depends directly or indirectly on it (much as if you
-- had used @-ignore-package@).
-- Make sure you change 'Packages.findWiredInPackages' if you add an entry here
integerUnitId, primUnitId,
baseUnitId, rtsUnitId,
thUnitId, dphSeqUnitId, dphParUnitId,
mainUnitId, thisGhcUnitId, interactiveUnitId :: UnitId
primUnitId = fsToUnitId (fsLit "ghc-prim")
integerUnitId = fsToUnitId (fsLit n)
where
n = case cIntegerLibraryType of
IntegerGMP -> "integer-gmp"
IntegerSimple -> "integer-simple"
baseUnitId = fsToUnitId (fsLit "base")
rtsUnitId = fsToUnitId (fsLit "rts")
thUnitId = fsToUnitId (fsLit "template-haskell")
dphSeqUnitId = fsToUnitId (fsLit "dph-seq")
dphParUnitId = fsToUnitId (fsLit "dph-par")
thisGhcUnitId = fsToUnitId (fsLit "ghc")
interactiveUnitId = fsToUnitId (fsLit "interactive")
-- | This is the package Id for the current program. It is the default
-- package Id if you don't specify a package name. We don't add this prefix
-- to symbol names, since there can be only one main package per program.
mainUnitId = fsToUnitId (fsLit "main")
-- | This is a fake package id used to provide identities to any un-implemented
-- signatures. The set of hole identities is global over an entire compilation.
holeUnitId :: UnitId
holeUnitId = fsToUnitId (fsLit "hole")
isInteractiveModule :: Module -> Bool
isInteractiveModule mod = moduleUnitId mod == interactiveUnitId
isHoleModule :: Module -> Bool
isHoleModule mod = moduleUnitId mod == holeUnitId
wiredInUnitIds :: [UnitId]
wiredInUnitIds = [ primUnitId,
integerUnitId,
baseUnitId,
rtsUnitId,
thUnitId,
thisGhcUnitId,
dphSeqUnitId,
dphParUnitId ]
{-
************************************************************************
* *
\subsection{@ModuleEnv@s}
* *
************************************************************************
-}
-- | A map keyed off of 'Module's
newtype ModuleEnv elt = ModuleEnv (Map NDModule elt)
{-
Note [ModuleEnv performance and determinism]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To prevent accidental reintroduction of nondeterminism the Ord instance
for Module was changed to not depend on Unique ordering and to use the
lexicographic order. This is potentially expensive, but when measured
there was no difference in performance.
To be on the safe side and not pessimize ModuleEnv uses nondeterministic
ordering on Module and normalizes by doing the lexicographic sort when
turning the env to a list.
See Note [Unique Determinism] for more information about the source of
nondeterminismand and Note [Deterministic UniqFM] for explanation of why
it matters for maps.
-}
newtype NDModule = NDModule { unNDModule :: Module }
deriving Eq
-- A wrapper for Module with faster nondeterministic Ord.
-- Don't export, See [ModuleEnv performance and determinism]
instance Ord NDModule where
compare (NDModule (Module p1 n1)) (NDModule (Module p2 n2)) =
(getUnique p1 `compare` getUnique p2) `thenCmp`
(getUnique n1 `compare` getUnique n2)
filterModuleEnv :: (Module -> a -> Bool) -> ModuleEnv a -> ModuleEnv a
filterModuleEnv f (ModuleEnv e) =
ModuleEnv (Map.filterWithKey (f . unNDModule) e)
elemModuleEnv :: Module -> ModuleEnv a -> Bool
elemModuleEnv m (ModuleEnv e) = Map.member (NDModule m) e
extendModuleEnv :: ModuleEnv a -> Module -> a -> ModuleEnv a
extendModuleEnv (ModuleEnv e) m x = ModuleEnv (Map.insert (NDModule m) x e)
extendModuleEnvWith :: (a -> a -> a) -> ModuleEnv a -> Module -> a
-> ModuleEnv a
extendModuleEnvWith f (ModuleEnv e) m x =
ModuleEnv (Map.insertWith f (NDModule m) x e)
extendModuleEnvList :: ModuleEnv a -> [(Module, a)] -> ModuleEnv a
extendModuleEnvList (ModuleEnv e) xs =
ModuleEnv (Map.insertList [(NDModule k, v) | (k,v) <- xs] e)
extendModuleEnvList_C :: (a -> a -> a) -> ModuleEnv a -> [(Module, a)]
-> ModuleEnv a
extendModuleEnvList_C f (ModuleEnv e) xs =
ModuleEnv (Map.insertListWith f [(NDModule k, v) | (k,v) <- xs] e)
plusModuleEnv_C :: (a -> a -> a) -> ModuleEnv a -> ModuleEnv a -> ModuleEnv a
plusModuleEnv_C f (ModuleEnv e1) (ModuleEnv e2) =
ModuleEnv (Map.unionWith f e1 e2)
delModuleEnvList :: ModuleEnv a -> [Module] -> ModuleEnv a
delModuleEnvList (ModuleEnv e) ms =
ModuleEnv (Map.deleteList (map NDModule ms) e)
delModuleEnv :: ModuleEnv a -> Module -> ModuleEnv a
delModuleEnv (ModuleEnv e) m = ModuleEnv (Map.delete (NDModule m) e)
plusModuleEnv :: ModuleEnv a -> ModuleEnv a -> ModuleEnv a
plusModuleEnv (ModuleEnv e1) (ModuleEnv e2) = ModuleEnv (Map.union e1 e2)
lookupModuleEnv :: ModuleEnv a -> Module -> Maybe a
lookupModuleEnv (ModuleEnv e) m = Map.lookup (NDModule m) e
lookupWithDefaultModuleEnv :: ModuleEnv a -> a -> Module -> a
lookupWithDefaultModuleEnv (ModuleEnv e) x m =
Map.findWithDefault x (NDModule m) e
mapModuleEnv :: (a -> b) -> ModuleEnv a -> ModuleEnv b
mapModuleEnv f (ModuleEnv e) = ModuleEnv (Map.mapWithKey (\_ v -> f v) e)
mkModuleEnv :: [(Module, a)] -> ModuleEnv a
mkModuleEnv xs = ModuleEnv (Map.fromList [(NDModule k, v) | (k,v) <- xs])
emptyModuleEnv :: ModuleEnv a
emptyModuleEnv = ModuleEnv Map.empty
moduleEnvKeys :: ModuleEnv a -> [Module]
moduleEnvKeys (ModuleEnv e) = sort $ map unNDModule $ Map.keys e
-- See Note [ModuleEnv performance and determinism]
moduleEnvElts :: ModuleEnv a -> [a]
moduleEnvElts e = map snd $ moduleEnvToList e
-- See Note [ModuleEnv performance and determinism]
moduleEnvToList :: ModuleEnv a -> [(Module, a)]
moduleEnvToList (ModuleEnv e) =
sortBy (comparing fst) [(m, v) | (NDModule m, v) <- Map.toList e]
-- See Note [ModuleEnv performance and determinism]
unitModuleEnv :: Module -> a -> ModuleEnv a
unitModuleEnv m x = ModuleEnv (Map.singleton (NDModule m) x)
isEmptyModuleEnv :: ModuleEnv a -> Bool
isEmptyModuleEnv (ModuleEnv e) = Map.null e
foldModuleEnv :: (a -> b -> b) -> b -> ModuleEnv a -> b
foldModuleEnv f x (ModuleEnv e) = Map.foldRightWithKey (\_ v -> f v) x e
-- | A set of 'Module's
type ModuleSet = Set NDModule
mkModuleSet :: [Module] -> ModuleSet
extendModuleSet :: ModuleSet -> Module -> ModuleSet
emptyModuleSet :: ModuleSet
moduleSetElts :: ModuleSet -> [Module]
elemModuleSet :: Module -> ModuleSet -> Bool
emptyModuleSet = Set.empty
mkModuleSet = Set.fromList . coerce
extendModuleSet s m = Set.insert (NDModule m) s
moduleSetElts = sort . coerce . Set.toList
elemModuleSet = Set.member . coerce
{-
A ModuleName has a Unique, so we can build mappings of these using
UniqFM.
-}
-- | A map keyed off of 'ModuleName's (actually, their 'Unique's)
type ModuleNameEnv elt = UniqFM elt
| GaloisInc/halvm-ghc | compiler/basicTypes/Module.hs | bsd-3-clause | 23,424 | 0 | 19 | 5,392 | 4,025 | 2,198 | 1,827 | 321 | 2 |
module Graphics.Renderer where
import Graphics.Types
import Graphics.QuadRenderer
import Graphics.TextRenderer
import Graphics.Rendering.OpenGL
import Data.List (intercalate)
printGraphicStats :: IO ()
printGraphicStats = do
-- Display some info about opengl
vendorStr <- get vendor
rendererStr <- get renderer
versionStr <- get glVersion
exts <- get glExtensions
glslV <- get shadingLanguageVersion
putStrLn $ intercalate "\n" [ "Vendor:" ++ vendorStr
, "Renderer:" ++ rendererStr
, "OpenGL Version:" ++ versionStr
, "GLSL Version:" ++ glslV
, "Extensions:\n [ " ++ intercalate "\n , " exts ++ "\n ]"
]
setGraphicDefaults :: IO ()
setGraphicDefaults = do
blend $= Enabled
blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
depthFunc $= Nothing
initRenderer :: FilePath -> IO Renderer
initRenderer fp = do
printGraphicStats
setGraphicDefaults
quadRndr <- initQuadRenderer
textRndr <- initTextRenderer fp
return $ Renderer { _screenSize = (0,0)
, _quadRndr = quadRndr
, _textRndr = textRndr
}
| schell/blocks | src/Graphics/Renderer.hs | bsd-3-clause | 1,351 | 0 | 12 | 503 | 277 | 141 | 136 | 32 | 1 |
-- Copyright (c) 2012, Christoph Pohl
-- BSD License (see http://www.opensource.org/licenses/BSD-3-Clause)
-------------------------------------------------------------------------------
--
-- Project Euler Problem 6
--
-- The sum of the squares of the first ten natural numbers is,
-- 1² + 2² + ... + 10² = 385
--
-- The square of the sum of the first ten natural numbers is,
-- (1 + 2 + ... + 10)² = 55² = 3025
--
-- Hence the difference between the sum of the squares of the first ten natural
-- numbers and the square of the sum is 3025 − 385 = 2640.
--
-- Find the difference between the sum of the squares of the first one hundred
-- natural numbers and the square of the sum.
module Main where
main :: IO ()
main = print result
result = squareOfSum - sumOfSquares
squareOfSum = (sum [1..100])^2
sumOfSquares = sum (map (^2) [1..100])
| Psirus/euler | src/euler006.hs | bsd-3-clause | 856 | 0 | 8 | 157 | 93 | 59 | 34 | 6 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module Package (Package(..)) where
import GHC.Generics
import qualified Data.Yaml as Yaml
import Description
data Package = Package
{ version :: String
-- , description :: Description
}
deriving (Show, Generic)
instance Yaml.FromJSON Package
| angerman/stackage2nix | src/Package.hs | bsd-3-clause | 284 | 0 | 8 | 48 | 66 | 41 | 25 | 9 | 0 |
module Data.Shapefile.Types where
| tolysz/shapefile2json | src/Data/Shapefile/Types.hs | bsd-3-clause | 35 | 0 | 3 | 4 | 7 | 5 | 2 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module Test.Golden where
import Prelude ()
import Prelude.Compat
import Control.Exception (try)
import Control.Lens ((&), (.~))
import Control.Monad.IO.Class (MonadIO)
import Data.ByteString (ByteString)
import Data.ByteString.Lazy (fromStrict)
import qualified Data.ByteString.Lazy as LByteString
import Data.Foldable (fold)
import Data.Monoid ((<>))
import Pipes (Pipe, Producer, (>->))
import Pipes.Prelude (mapFoldable, toListM)
import System.Directory
(Permissions, emptyPermissions, removeFile, setOwnerReadable,
setOwnerSearchable, setOwnerWritable, setPermissions)
import System.IO (IOMode(WriteMode), hClose, openBinaryFile)
import System.IO.Error (isPermissionError)
import Test.Tasty (TestTree, testGroup, withResource)
import Test.Tasty.Golden (goldenVsString)
import Highlight.Common.Monad (Output(OutputStderr, OutputStdout))
import Highlight.Common.Options
(CommonOptions, IgnoreCase(IgnoreCase), Recursive(Recursive),
defaultCommonOptions, ignoreCaseLens, inputFilenamesLens,
rawRegexLens, recursiveLens)
import Highlight.Highlight.Monad (HighlightM, runHighlightM)
import Highlight.Highlight.Options
(ColorGrepFilenames(ColorGrepFilenames), Options,
colorGrepFilenamesLens, defaultOptions)
import Highlight.Highlight.Run (highlightOutputProducer)
import Highlight.Hrep.Monad (HrepM, runHrepM)
import Highlight.Hrep.Run (hrepOutputProducer)
import Highlight.Pipes (fromFileLines, stdinLines)
runHighlightTestWithStdin
:: Options
-> Producer ByteString HighlightM ()
-> (forall m. Monad m => Pipe Output ByteString m ())
-> IO LByteString.ByteString
runHighlightTestWithStdin opts stdinPipe filterPipe = do
eitherByteStrings <- runHighlightM opts $ do
outputProducer <- highlightOutputProducer stdinPipe
toListM $ outputProducer >-> filterPipe
case eitherByteStrings of
Left err -> error $ "unexpected error: " <> show err
Right byteStrings -> return . fromStrict $ fold byteStrings
runHighlightTest
:: Options
-> (forall m. Monad m => Pipe Output ByteString m ())
-> IO LByteString.ByteString
runHighlightTest opts =
runHighlightTestWithStdin opts stdinLines
runHrepTestWithStdin
:: CommonOptions
-> (Producer ByteString HrepM ())
-> (forall m. Monad m => Pipe Output ByteString m ())
-> IO LByteString.ByteString
runHrepTestWithStdin opts stdinPipe filterPipe = do
eitherByteStrings <- runHrepM opts $ do
outputProducer <- hrepOutputProducer stdinPipe
toListM $ outputProducer >-> filterPipe
case eitherByteStrings of
Left err -> error $ "unexpected error: " <> show err
Right byteStrings -> return . fromStrict $ fold byteStrings
runHrepTest
:: CommonOptions
-> (forall m. Monad m => Pipe Output ByteString m ())
-> IO LByteString.ByteString
runHrepTest opts =
runHrepTestWithStdin opts stdinLines
filterStdout :: Monad m => Pipe Output ByteString m ()
filterStdout = mapFoldable f
where
f :: Output -> Maybe ByteString
f (OutputStderr _) = Nothing
f (OutputStdout byteString) = Just byteString
filterStderr :: Monad m => Pipe Output ByteString m ()
filterStderr = mapFoldable f
where
f :: Output -> Maybe ByteString
f (OutputStderr byteString) = Just byteString
f (OutputStdout _) = Nothing
getFileOutputProducer
:: MonadIO m
=> FilePath -> IO (Producer ByteString m ())
getFileOutputProducer filePath = do
eitherProducer <- fromFileLines filePath
case eitherProducer of
Left ioerr ->
error $
"ERROR: following error occured when trying to read \"" <>
filePath <> "\": " <> show ioerr
Right producer -> return producer
testStderrAndStdout
:: String
-> FilePath
-> ( ( forall m. Monad m => Pipe Output ByteString m ())
-> IO LByteString.ByteString
)
-> TestTree
testStderrAndStdout msg path runner =
testGroup
msg
[ goldenVsString "stderr" (path <> ".stderr") (runner filterStderr)
, goldenVsString "stdout" (path <> ".stdout") (runner filterStdout)
]
goldenTests :: TestTree
goldenTests =
withResource createUnreadableFile (const deleteUnreadableFile) $
const (testGroup "golden tests" [highlightGoldenTests, hrepGoldenTests])
createUnreadableFile :: IO ()
createUnreadableFile = do
eitherHandle <- try $ openBinaryFile unreadableFilePath WriteMode
case eitherHandle of
Right handle -> do
hClose handle
makeFileUnreadable unreadableFilePath
Left ioerr
| isPermissionError ioerr ->
-- assume that the file already exists, and just try to make sure that
-- the permissions are null
makeFileUnreadable unreadableFilePath
| otherwise ->
-- we shouldn't have gotten an error here, so just rethrow it
ioError ioerr
makeFileUnreadable :: FilePath -> IO ()
makeFileUnreadable filePath = setPermissions filePath emptyPermissions
makeFileReadable :: FilePath -> IO ()
makeFileReadable filePath =
setPermissions filePath fullPermissions
where
fullPermissions :: Permissions
fullPermissions =
setOwnerWritable True .
setOwnerSearchable True .
setOwnerReadable True $
emptyPermissions
deleteUnreadableFile :: IO ()
deleteUnreadableFile = do
makeFileReadable unreadableFilePath
eitherRes <- try $ removeFile unreadableFilePath
either ioError return eitherRes
unreadableFilePath :: FilePath
unreadableFilePath = "test/golden/test-files/dir2/unreadable-file"
---------------------
-- Highlight Tests --
---------------------
highlightGoldenTests :: TestTree
highlightGoldenTests =
testGroup
"highlight"
[ testHighlightSingleFile
, testHighlightMultiFile
, testHighlightFromGrep
]
testHighlightSingleFile :: TestTree
testHighlightSingleFile =
let opts =
defaultOptions
& rawRegexLens .~ "or"
& inputFilenamesLens .~ ["test/golden/test-files/file1"]
in testStderrAndStdout
"`highlight or 'test/golden/test-files/file1'`"
"test/golden/golden-files/highlight/single-file"
(runHighlightTest opts)
testHighlightMultiFile :: TestTree
testHighlightMultiFile =
let opts =
defaultOptions
& rawRegexLens .~ "and"
& ignoreCaseLens .~ IgnoreCase
& recursiveLens .~ Recursive
& inputFilenamesLens .~
[ "test/golden/test-files/dir1"
, "test/golden/test-files/empty-file"
, "test/golden/test-files/dir2"
]
testName =
"`touch 'test/golden/test-files/dir2/unreadable-file' ; " <>
"chmod 0 'test/golden/test-files/dir2/unreadable-file' ; " <>
"highlight --ignore-case --recursive and " <>
"'test/golden/test-files/dir1' " <>
"'test/golden/test-files/empty-file' " <>
"'test/golden/test-files/dir2' ; " <>
"rm -rf 'test/golden/test-files/dir2/unreadable-file'`"
in testStderrAndStdout
testName
"test/golden/golden-files/highlight/multi-file"
(runHighlightTest opts)
testHighlightFromGrep :: TestTree
testHighlightFromGrep =
let opts =
defaultOptions
& rawRegexLens .~ "and"
& colorGrepFilenamesLens .~ ColorGrepFilenames
testName =
"`cat test/golden/test-files/from-grep | " <>
"highlight --from-grep and`"
in testStderrAndStdout
testName
"test/golden/golden-files/highlight/from-grep"
(go opts)
where
go
:: Options
-> (forall m. Monad m => Pipe Output ByteString m ())
-> IO LByteString.ByteString
go opts outputPipe = do
-- This is the output file from @grep@ to use for the test
-- 'testHighlightFromGrep'.
--
-- This file was created with the following command:
-- > $ grep --recursive and 'test/golden/test-files/dir1'
let grepOutputTestFile = "test/golden/test-files/from-grep"
grepOutputProducer <- getFileOutputProducer grepOutputTestFile
runHighlightTestWithStdin opts grepOutputProducer outputPipe
----------------
-- Hrep Tests --
----------------
hrepGoldenTests :: TestTree
hrepGoldenTests =
testGroup
"hrep"
[ testHrepSingleFile
, testHrepMultiFile
, testHrepFromStdin
]
testHrepSingleFile :: TestTree
testHrepSingleFile =
let opts =
defaultCommonOptions
& rawRegexLens .~ "another"
& inputFilenamesLens .~ ["test/golden/test-files/file1"]
in testStderrAndStdout
"`hrep another 'test/golden/test-files/file1'`"
"test/golden/golden-files/hrep/single-file"
(runHrepTest opts)
testHrepMultiFile :: TestTree
testHrepMultiFile =
let opts =
defaultCommonOptions
& rawRegexLens .~ "as"
& ignoreCaseLens .~ IgnoreCase
& recursiveLens .~ Recursive
& inputFilenamesLens .~
[ "test/golden/test-files/dir1"
, "test/golden/test-files/empty-file"
, "test/golden/test-files/dir2"
]
testName =
"`touch 'test/golden/test-files/dir2/unreadable-file' ; " <>
"chmod 0 'test/golden/test-files/dir2/unreadable-file' ; " <>
"hrep --ignore-case --recursive as " <>
"'test/golden/test-files/dir1' " <>
"'test/golden/test-files/empty-file' " <>
"'test/golden/test-files/dir2' ; " <>
"rm -rf 'test/golden/test-files/dir2/unreadable-file'`"
in testStderrAndStdout
testName
"test/golden/golden-files/hrep/multi-file"
(runHrepTest opts)
testHrepFromStdin :: TestTree
testHrepFromStdin =
let opts =
defaultCommonOptions & rawRegexLens .~ "co."
stdinInputFile = "test/golden/test-files/file2"
testName =
"`cat '" <> stdinInputFile <> "' | hrep 'co.'`"
in testStderrAndStdout
testName
"test/golden/golden-files/hrep/from-stdin"
(go opts stdinInputFile)
where
go
:: CommonOptions
-> FilePath
-> (forall m. Monad m => Pipe Output ByteString m ())
-> IO LByteString.ByteString
go opts stdinInputFile outputPipe = do
stdinProducer <- getFileOutputProducer stdinInputFile
runHrepTestWithStdin opts stdinProducer outputPipe
| cdepillabout/highlight | test/Test/Golden.hs | bsd-3-clause | 10,201 | 0 | 16 | 2,157 | 2,094 | 1,092 | 1,002 | 257 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import TPG.WebAPI
import TPG.Structured
import System.Directory
import System.Environment
import System.IO
import qualified Data.ByteString.Lazy as BS
import Cfg
import Control.Monad
import Control.Monad.Loops
import Data.Either
getDepartureList :: String -> [([String],[Thermometer])] -> IO [(Departure,[String],[Thermometer])]
getDepartureList key stopCodesPaired = do
thisNextDepartures <- mapM (\p -> do
nd <- getNextDepartures key ((head . fst) p)
return (nd, fst p, snd p)) stopCodesPaired
let successfulNexts = thisNextDepartures
let ndList nd = case nd of
(Nothing,_,_) -> []
(Just dpts,sts,ts) -> map (\d -> (d,sts,ts)) (departures dpts)
let mapped = map ndList successfulNexts
return (join mapped)
nonEmptyPair :: ([a],[b]) -> Bool
nonEmptyPair ([],[]) = False
nonEmptyPair _ = True
wrapGetThermometer :: String -> (Departure,[String],[Thermometer]) -> IO ([String],[Thermometer])
wrapGetThermometer key (d,sts,prevThermometers) = do
tres <- getThermometer key (show $ departureCode d)
case tres of
Nothing -> return ([],[])
Just tres -> return (sts,tres:prevThermometers)
getThermometerList :: String -> [(Departure,[String],[Thermometer])] -> IO [([String],[Thermometer])]
getThermometerList key departures = do
thisFullResultThermometers <- mapM (wrapGetThermometer key) departures
return (filter nonEmptyPair $ thisFullResultThermometers)
-- Prepends a next possible stop to the current route
-- the stop is only possible if it leads to another line
performStopStep :: [([String],[Thermometer])] -> [([String],[Thermometer])]
performStopStep routes = performStep routes isChangeStop
-- Given the current routes, filter out those that end in one of the given
-- destinations
intersectWithDestinationCodes :: [([String],[Thermometer])] -> [String] -> [([String],[Thermometer])]
intersectWithDestinationCodes routes destinationCodes =
performStep routes (\_ -> stopMatchesDestinations destinationCodes)
stopMatchesDestinations :: [String] -> Stop -> Bool
stopMatchesDestinations destinations stop = any (\c -> stopCode stop == c) destinations
performStep :: [([String],[Thermometer])] -> (String -> Stop -> Bool) -> [([String],[Thermometer])]
performStep pairs filterGen =
join $ map (\q ->
case q of
(sts,ts) ->
let t = head ts in
let currentLineCode = lineCodeThermometer t in
map (\st -> ((stopCode st):sts,ts)) $ filter (filterGen currentLineCode) $ map stopStep $ steps $ t) pairs
calculate_route :: String -> [([String],[Thermometer])] -> [String] -> Int -> IO [([String],[Thermometer])]
calculate_route key fromStopCodeList toStopCodeList maxIter = do
dList <- getDepartureList key fromStopCodeList
let extractedDestinations = map (\t -> case t of
(ds,_,_) -> ds) dList
let departureCodes = map (show . departureCode) extractedDestinations
thermometers <- getThermometerList key dList
let nextStep = performStopStep thermometers
let currentStepResults = (intersectWithDestinationCodes thermometers toStopCodeList)
if maxIter <= 0 then
return currentStepResults
else do
furtherResults <- calculate_route key nextStep toStopCodeList (maxIter - 1)
return (currentStepResults ++ furtherResults)
calculate_route_with_names :: String -> String -> String -> IO ()
calculate_route_with_names key fromStopName toStopName = do
mFromStop <- getStops key fromStopName
mToStop <- getStops key toStopName
case (mFromStop,mToStop) of
(Just fromStop,Just toStop) -> do
let fromStopCodeList = stopCodeList fromStop
let fromStopCodesPaired = map (\sc -> ([sc],[])) fromStopCodeList
let toStopCodeList = stopCodeList toStop
routes <- calculate_route key fromStopCodesPaired toStopCodeList 2 -- max 5 changes
putStrLn (show (map fst routes))
putStrLn (show toStopCodeList)
_ -> putStrLn "Could not match from/to"
main = do
args <- getArgs
if length args < 2 then
do
putStrLn "Usage: from_to fromStationName toStationName"
else
do
home_directory <- getHomeDirectory
config_handle <- openFile (home_directory ++ "/.tpg_tests") ReadMode
contents <- BS.hGetContents config_handle
let key = getApiKeyFromConfigString contents
case key of
Nothing -> error "Did not find API key"
Just key -> calculate_route_with_names key (head args) (head (tail args))
hClose config_handle
| sebug/tpg_sandbox | from_to.hs | bsd-3-clause | 4,601 | 0 | 26 | 926 | 1,506 | 788 | 718 | 91 | 3 |
module Market.Board
( module Market.Board.Types
) where
import Market.Board.Types
| s9gf4ult/market | Market/Board.hs | bsd-3-clause | 97 | 0 | 5 | 24 | 21 | 14 | 7 | 3 | 0 |
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Language.Angler.ScopedTable
( ScopedTable, Scope
, tab_stack
-- basic
, emptyScope, emptyWithIndefinable, empty
, lookup , elem, elemInCurrentScope
, insertWith, safeInsert
, adjust, replace
-- scope handling
, enterScopeWith, enterScope
, topScope, exitScope
, toList
, fromFoldable, safeFromFoldable
-- on keys
, mapKeys
, filterByKey
) where
import Language.Angler.Error
import PrettyShow
import Control.Lens hiding (op)
import Data.Foldable (msum)
import qualified Data.Map.Strict as Map
import Prelude hiding (elem, lookup)
import qualified Prelude as P (elem)
type Scope sym = Map.Map String sym
-- interface for a scoped symbol table
data ScopedTable sym
= ScopedTable
{ _tab_stack :: [ Scope sym ]
, _indefinable :: [ String ]
}
deriving (Show, Functor, Foldable, Traversable)
makeLenses ''ScopedTable
emptyScope :: Scope sym
emptyScope = Map.empty
emptyWithIndefinable :: [String] -> ScopedTable sym
emptyWithIndefinable = ScopedTable [emptyScope]
empty :: ScopedTable sym
empty = emptyWithIndefinable []
enterScopeWith :: Scope sym -> ScopedTable sym -> ScopedTable sym
enterScopeWith up = over tab_stack (cons up)
enterScope :: ScopedTable sym -> ScopedTable sym
enterScope = enterScopeWith emptyScope
topScope :: ScopedTable sym -> Scope sym
topScope = views tab_stack head
exitScope :: ScopedTable sym -> ScopedTable sym
exitScope = over tab_stack tail
lookup :: String -> ScopedTable sym -> Maybe sym
lookup str = views tab_stack (msum . fmap (Map.lookup str))
scopeElem :: String -> Scope sym -> Bool
scopeElem str = P.elem str . Map.keys
elem :: String -> ScopedTable sym -> Bool
elem str = views tab_stack (any (scopeElem str))
elemInCurrentScope :: String -> ScopedTable sym -> Bool
elemInCurrentScope str = views tab_stack (scopeElem str . head)
insertWith :: (sym -> sym -> sym) -> String -> sym -> ScopedTable sym -> ScopedTable sym
insertWith join str sym = over (tab_stack._head) (Map.insertWith join str sym)
safeInsert :: String -> sym -> ScopedTable sym -> Either Error (ScopedTable sym)
safeInsert str sym tab = if P.elem str (view indefinable tab) || elemInCurrentScope str tab
then (Left . CheckError . CErrAlreadyInSymbolTable) str
else Right (insert str sym tab)
-- overwrites the symbol in the top scope
insert :: String -> sym -> ScopedTable sym -> ScopedTable sym
insert = insertWith const
-- looks for the symbol and adjusts it in the appropiate scope
adjust :: forall sym . (sym -> sym) -> String -> ScopedTable sym -> ScopedTable sym
adjust f str = over tab_stack adjust'
where
adjust' :: [Scope sym] -> [Scope sym]
adjust' scopes = case scopes of
sc : scs -> if scopeElem str sc
then Map.adjust f str sc : scs
else sc : adjust' scs
[] -> []
replace :: String -> sym -> ScopedTable sym -> ScopedTable sym
replace str sym = adjust (const sym) str
toList :: ScopedTable sym -> [(String, sym)]
toList = views tab_stack (proccess . concatMap Map.toList)
where
proccess :: [(String, sym)] -> [(String, sym)]
proccess = Map.toList . foldr (uncurry Map.insert) emptyScope
fromFoldable :: Foldable f => f (String, sym) -> ScopedTable sym
fromFoldable = foldl (flip (uncurry insert)) empty
safeFromFoldable :: Foldable f => f (String, sym) -> Either Error (ScopedTable sym)
safeFromFoldable = foldl (\act (str,sym) -> act >>= safeInsert str sym) (Right empty)
mapKeys :: (String -> String) -> ScopedTable sym -> ScopedTable sym
mapKeys f = over (tab_stack.traverse) (Map.mapKeys f)
filterByKey :: (String -> Bool) -> ScopedTable sym -> ScopedTable sym
filterByKey f = over (tab_stack.traverse) (Map.filterWithKey (\s _ -> f s))
instance PrettyShow sym => PrettyShow (ScopedTable sym) where
pshow = pshows line . (map snd . toList)
| angler-lang/angler-lang | src/Language/Angler/ScopedTable.hs | bsd-3-clause | 4,239 | 0 | 13 | 1,053 | 1,325 | 694 | 631 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : HJScript.DOM
-- Copyright : (c) Joel Bjornson 2008
-- License : BSD-style
-- Maintainer : Joel Bjornson [email protected]
-- Niklas Broberg [email protected]
-- Stability : experimental
-----------------------------------------------------------------------------
module HJScript.DOM
(
module HJScript.DOM.NodeTypes,
module HJScript.DOM.Node,
module HJScript.DOM.Document,
module HJScript.DOM.ElementNode,
module HJScript.DOM.AttributeNode,
module HJScript.DOM.TextNode,
module HJScript.DOM.Window,
module HJScript.DOM.XHTML
) where
import HJScript.DOM.NodeTypes (NodeType(..), nodeTypeVal)
import HJScript.DOM.Node
import HJScript.DOM.Document
import HJScript.DOM.ElementNode
import HJScript.DOM.AttributeNode
import HJScript.DOM.TextNode
import HJScript.DOM.Window
import HJScript.DOM.XHTML
| seereason/HJScript | src/HJScript/DOM.hs | bsd-3-clause | 968 | 0 | 6 | 143 | 133 | 94 | 39 | 18 | 0 |
module Main where
import Control.Exception (bracket)
import Data.Time.Clock.POSIX (POSIXTime, getPOSIXTime)
import qualified Data.Map as Map
import qualified Data.Vector as Vector
import Minecraft.Anvil (ChunkX, ChunkZ, ChunkData, ChunkMap, compressChunkData, writeChunkMap)
import Minecraft.Core (BlockId(..), toNBT)
import Minecraft.Chunk (Chunk(..), Section(..), emptyChunk, emptySection)
import System.IO (IOMode(WriteMode), withFile)
section0 :: Section
section0 =
emptySection { _Blocks = Vector.replicate 4096 (BlockId 20)
}
chunk0 :: Chunk
chunk0 = emptyChunk { _Sections = [section0] }
chunkData0 :: ChunkData
chunkData0 = compressChunkData (toNBT chunk0)
chunkMap :: POSIXTime -> ChunkMap
chunkMap now = Map.fromList
[ ((0,0), (chunkData0, now)) ]
main :: IO ()
main =
withFile "test-r.0.0.mca" WriteMode $ \h ->
do now <- getPOSIXTime
writeChunkMap h (chunkMap now)
| stepcut/minecraft-data | utils/GenWorld.hs | bsd-3-clause | 915 | 0 | 11 | 147 | 295 | 176 | 119 | 24 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
--
-- Copyright © 2013-2015 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
--
-- This module defines a convienient interface for clients
-- of Ceilometer.
--
-- For flexibility use the Collector and Fold modules.
--
module Ceilometer.Client
( -- * Interface
decodeFold
, decodeFold_
-- * Re-exports
, module X
) where
import Control.Applicative
import Control.Foldl
import Control.Lens
import Control.Monad
import Data.Monoid ((<>))
import qualified Data.Traversable as T
import Pipes
import qualified Pipes.Prelude as P
import System.IO (hPutStrLn, stderr)
import System.IO.Unsafe (unsafePerformIO)
import Ceilometer.Fold as X
import Ceilometer.Tags as X
import Ceilometer.Types as X
import Vaultaire.Types as X
decodeFold
:: (Monad m, Applicative m)
=> Env -- ^ @SourceDict@ to infer the resource type.
-> Producer SimplePoint m () -- ^ The raw data points to parse and aggregate.
-> m (Maybe FoldResult) -- ^ Result
decodeFold env@(Env _ sd _ _ _) raw = do
let x = do
name <- lookupMetricName sd
if | name == valCPU
-> return (decodeFold_ (undefined :: proxy PDCPU) env raw)
| name == valDiskReads
-> return (decodeFold_ (undefined :: proxy PDDiskRead) env raw)
| name == valDiskWrites
-> return (decodeFold_ (undefined :: proxy PDDiskWrite) env raw)
| name == valNeutronIn
-> return (decodeFold_ (undefined :: proxy PDNeutronRx) env raw)
| name == valNeutronOut
-> return (decodeFold_ (undefined :: proxy PDNeutronTx) env raw)
| name == valVolume -> do
voltype <- lookupVolumeType sd
if | voltype == valVolumeBlock
-> return (decodeFold_ (undefined :: proxy PDVolume) env raw)
| voltype == valVolumeFast
-> return (decodeFold_ (undefined :: proxy PDSSD) env raw)
| otherwise -> mzero
| name == valInstanceFlavor -> do
compound <- lookupCompound sd
event <- lookupEvent sd
if | compound == valTrue && event == valFalse
-> return (decodeFold_ (undefined :: proxy PDInstanceFlavor) env raw)
| otherwise -> mzero
| name == valInstanceVCPU
-> return (decodeFold_ (undefined :: proxy PDInstanceVCPU) env raw)
| name == valInstanceRAM
-> return (decodeFold_ (undefined :: proxy PDInstanceRAM) env raw)
| name == valImage ->
if | isEvent sd
-> return (decodeFold_ (undefined :: proxy PDImage) env raw)
| otherwise
-> return (decodeFold_ (undefined :: proxy PDImagePollster) env raw)
| name == valSnapshot
-> return (decodeFold_ (undefined :: proxy PDSnapshot) env raw)
| name == valIP
-> return (decodeFold_ (undefined :: proxy PDIP) env raw)
| otherwise -> mzero
T.sequence x
decodeFold_
:: forall proxy a m . (Known a, Applicative m, Monad m)
=> proxy a
-> Env
-> Producer SimplePoint m ()
-> m FoldResult
decodeFold_ _ env raw
= foldDecoded env (raw >-> (decode env :: Pipe SimplePoint (Timed a) m ()))
decode
:: (Known a, Monad m)
=> Env
-> Pipe SimplePoint (Timed a) m ()
decode env = forever $ do
p@(SimplePoint _ (TimeStamp t) v) <- await
let x = T.sequence $ Timed t $ v ^? clonePrism (mkPrism env)
case x of
Nothing -> do
-- Originally this would call error on Nothing, instead we print an angry
-- message and try to keep going. This *really* shoudn't happen for any
-- billing runs after August 2015, but due to a terrible recovery of data
-- we have some invalid points that are hard to get rid of.
let msg = "This shouldn't happen after August 2015, could not decode point:\n\t"
<> show p
return $! unsafePerformIO $ hPutStrLn stderr msg
Just x' -> yield x'
foldDecoded
:: (Known a, Monad m)
=> Env
-> Producer (Timed a) m ()
-> m FoldResult
foldDecoded env = impurely P.foldM (generalize $ mkFold env)
| anchor/ceilometer-common | lib/Ceilometer/Client.hs | bsd-3-clause | 4,830 | 0 | 22 | 1,543 | 1,201 | 615 | 586 | 99 | 17 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ARB.DrawBuffers
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ARB.DrawBuffers (
-- * Extension Support
glGetARBDrawBuffers,
gl_ARB_draw_buffers,
-- * Enums
pattern GL_DRAW_BUFFER0_ARB,
pattern GL_DRAW_BUFFER10_ARB,
pattern GL_DRAW_BUFFER11_ARB,
pattern GL_DRAW_BUFFER12_ARB,
pattern GL_DRAW_BUFFER13_ARB,
pattern GL_DRAW_BUFFER14_ARB,
pattern GL_DRAW_BUFFER15_ARB,
pattern GL_DRAW_BUFFER1_ARB,
pattern GL_DRAW_BUFFER2_ARB,
pattern GL_DRAW_BUFFER3_ARB,
pattern GL_DRAW_BUFFER4_ARB,
pattern GL_DRAW_BUFFER5_ARB,
pattern GL_DRAW_BUFFER6_ARB,
pattern GL_DRAW_BUFFER7_ARB,
pattern GL_DRAW_BUFFER8_ARB,
pattern GL_DRAW_BUFFER9_ARB,
pattern GL_MAX_DRAW_BUFFERS_ARB,
-- * Functions
glDrawBuffersARB
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/ARB/DrawBuffers.hs | bsd-3-clause | 1,214 | 0 | 5 | 165 | 137 | 91 | 46 | 25 | 0 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
-- |
-- Module : Pact.Types.Runtime
-- Copyright : (C) 2019 Stuart Popejoy
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Stuart Popejoy <[email protected]>
--
-- Meta data and its types
--
module Pact.Types.ChainMeta
( -- * types
Address(..)
, PrivateMeta(..)
, PublicMeta(..)
, HasPlafMeta(..)
, PublicData(..)
, EntityName(..)
, TTLSeconds(..)
, TxCreationTime(..)
-- * optics
, aFrom, aTo
, pmAddress, pmChainId, pmSender, pmGasLimit, pmGasPrice, pmTTL, pmCreationTime
, pdPublicMeta, pdBlockHeight, pdBlockTime, pdPrevBlockHash
, getCurrentCreationTime
) where
import GHC.Generics
import Control.DeepSeq (NFData)
import Control.Lens (makeLenses)
import Data.Aeson
import Data.Default (Default, def)
import Data.Hashable (Hashable)
import Data.Int (Int64)
import Data.Serialize (Serialize)
import Data.Set (Set)
import Data.String (IsString)
import Data.Text
import Pact.Time (getCurrentTime, toPosixTimestampMicros)
import Data.Word (Word64)
-- internal pact modules
import Pact.Parse
import Pact.Types.ChainId (ChainId)
import Pact.Types.Gas
import Pact.Types.Util (AsString, lensyToJSON, lensyParseJSON)
-- | Name of "entity", ie confidential counterparty in an encrypted exchange, in privacy-supporting platforms.
newtype EntityName = EntityName Text
deriving stock (Eq, Ord, Generic)
deriving newtype (Show, NFData, Hashable, Serialize, Default, ToJSON, FromJSON, IsString, AsString)
-- | Wrapper for 'PublicMeta' ttl field in seconds since offset
--
newtype TTLSeconds = TTLSeconds ParsedInteger
deriving stock (Eq, Ord, Generic)
deriving newtype (Show, Num, NFData, ToJSON, FromJSON, Serialize)
-- | Wrapper for 'PublicMeta' creation time field in seconds since POSIX epoch
--
newtype TxCreationTime = TxCreationTime ParsedInteger
deriving stock (Eq, Ord, Generic)
deriving newtype (Show, Num, NFData, ToJSON, FromJSON, Serialize)
-- | Get current time as TxCreationTime
getCurrentCreationTime :: IO TxCreationTime
getCurrentCreationTime = TxCreationTime
. fromIntegral
. (`div` 1000000)
. toPosixTimestampMicros
<$> getCurrentTime
-- | Confidential/Encrypted addressing info, for use in metadata on privacy-supporting platforms.
data Address = Address
{ _aFrom :: EntityName
, _aTo :: Set EntityName
} deriving (Eq,Show,Ord,Generic)
instance NFData Address
instance Serialize Address
instance ToJSON Address where toJSON = lensyToJSON 2
instance FromJSON Address where parseJSON = lensyParseJSON 2
makeLenses ''Address
-- | Private-blockchain specific metadata.
newtype PrivateMeta = PrivateMeta { _pmAddress :: Maybe Address }
deriving (Eq,Show,Generic)
makeLenses ''PrivateMeta
instance Default PrivateMeta where def = PrivateMeta def
instance ToJSON PrivateMeta where toJSON = lensyToJSON 3
instance FromJSON PrivateMeta where parseJSON = lensyParseJSON 3
instance NFData PrivateMeta
instance Serialize PrivateMeta
-- | Allows user to specify execution parameters specific to public-chain
-- execution, namely gas parameters, TTL, creation time, chain identifier.
data PublicMeta = PublicMeta
{ _pmChainId :: !ChainId
-- ^ platform-specific chain identifier, e.g. "0"
, _pmSender :: !Text
-- ^ sender gas account key
, _pmGasLimit :: !GasLimit
-- ^ gas limit (maximum acceptable gas units for tx)
, _pmGasPrice :: !GasPrice
-- ^ per-unit gas price
, _pmTTL :: !TTLSeconds
-- ^ TTL in seconds
, _pmCreationTime :: !TxCreationTime
-- ^ Creation time in seconds since UNIX epoch
} deriving (Eq, Show, Generic)
makeLenses ''PublicMeta
instance Default PublicMeta where def = PublicMeta "" "" 0 0 0 0
instance ToJSON PublicMeta where
toJSON (PublicMeta cid s gl gp ttl ct) = object
[ "chainId" .= cid
, "sender" .= s
, "gasLimit" .= gl
, "gasPrice" .= gp
, "ttl" .= ttl
, "creationTime" .= ct
]
instance FromJSON PublicMeta where
parseJSON = withObject "PublicMeta" $ \o -> PublicMeta
<$> o .: "chainId"
<*> o .: "sender"
<*> o .: "gasLimit"
<*> o .: "gasPrice"
<*> o .: "ttl"
<*> o .: "creationTime"
instance NFData PublicMeta
instance Serialize PublicMeta
class HasPlafMeta a where
getPrivateMeta :: a -> PrivateMeta
getPublicMeta :: a -> PublicMeta
instance HasPlafMeta PrivateMeta where
getPrivateMeta = id
getPublicMeta = const def
instance HasPlafMeta PublicMeta where
getPrivateMeta = const def
getPublicMeta = id
instance HasPlafMeta () where
getPrivateMeta = const def
getPublicMeta = const def
-- | "Public chain" data with immutable block data
-- height, hash, creation time
data PublicData = PublicData
{ _pdPublicMeta :: !PublicMeta
-- ^ 'PublicMeta' data from request
, _pdBlockHeight :: !Word64
-- ^ block height as specified by platform.
, _pdBlockTime :: !Int64
-- ^ block creation time, micros since UNIX epoch
, _pdPrevBlockHash :: !Text
-- ^ block hash of preceding block
}
deriving (Show, Eq, Generic)
makeLenses ''PublicData
instance ToJSON PublicData where toJSON = lensyToJSON 3
instance FromJSON PublicData where parseJSON = lensyParseJSON 3
instance Default PublicData where def = PublicData def def def def
| kadena-io/pact | src/Pact/Types/ChainMeta.hs | bsd-3-clause | 5,386 | 0 | 19 | 962 | 1,185 | 668 | 517 | 138 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
module Cardano.Faucet.Types.Recaptcha
( CaptchaSecret(..)
, CaptchaRequest(..), secret, response
, CaptchaResponse(..), success, challengeTS, hostname, errorCodes
, ReadRecaptchaSecretError(..)
, readCaptchaSecret
, captchaRequest
) where
import Control.Exception.Safe (Exception, throwIO)
import Control.Lens (makeLenses, makeWrapped, _Wrapped)
import Data.String (IsString)
import Network.Wreq (FormParam (..))
import qualified Network.Wreq as Wreq
-- import Data.Proxy
import Data.Aeson
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import Data.Time.Clock (UTCTime)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Universum
import Cardano.Faucet.Types.API
--------------------------------------------------------------------------------
newtype CaptchaSecret = CaptchaSecret Text deriving (Show, Eq, IsString)
makeWrapped ''CaptchaSecret
--------------------------------------------------------------------------------
-- | Request for sending to google to validate recaptcha
data CaptchaRequest = CaptchaRequest {
-- | The secret given by google
_secret :: CaptchaSecret
-- | The "g-recaptcha-response" field sent by the form
, _response :: GCaptchaResponse
} deriving (Generic)
makeLenses ''CaptchaRequest
--------------------------------------------------------------------------------
-- | Error thrown if recaptcha secret isn't a single line in a file
data ReadRecaptchaSecretError =
MoreThanOneLine FilePath
deriving (Eq, Show, Typeable)
instance Exception ReadRecaptchaSecretError
--------------------------------------------------------------------------------
-- | Response from google to being sent a 'CaptchaRequest'
data CaptchaResponse = CaptchaResponse {
-- | Was the recatcha validated as not coming from a bot
_success :: Bool
-- | The time of the challenge
--
-- (Maybe because this isn't present if there are errors)
, _challengeTS :: Maybe UTCTime
-- | The hostname serving the form
--
-- (Maybe because this isn't present if there are errors)
, _hostname :: Maybe Text
-- | Any errors present
, _errorCodes :: [Text]
} deriving (Eq, Show)
makeLenses ''CaptchaResponse
instance FromJSON CaptchaResponse where
parseJSON = withObject "CaptchaResponse" $ \v -> CaptchaResponse
<$> v .: "success"
<*> v .:? "challenge_ts"
<*> v .:? "hostname"
<*> (fromMaybe [] <$> v .:? "error-codes")
-- | Reads a CaptchaSecret out of a file
readCaptchaSecret :: FilePath -> IO CaptchaSecret
readCaptchaSecret fp = do
file <- Text.readFile fp
case Text.lines file of
[rSecret] -> return $ CaptchaSecret rSecret
_ -> throwIO $ MoreThanOneLine fp
-- | Makes the 'CaptchaRequest' to google
captchaRequest :: CaptchaRequest -> IO CaptchaResponse
captchaRequest cr = do
resp <- Wreq.asJSON =<< (Wreq.post "https://www.google.com/recaptcha/api/siteverify"
[ "secret" := cr ^. secret . _Wrapped
, "response" := cr ^. response . _Wrapped])
return $ resp ^. Wreq.responseBody
| input-output-hk/pos-haskell-prototype | faucet/src/Cardano/Faucet/Types/Recaptcha.hs | mit | 3,617 | 0 | 14 | 813 | 602 | 351 | 251 | -1 | -1 |
f x = y 0
where y z | z > 10 = 10
| otherwise = (10 + 20)
q = 20
p = 10
| itchyny/vim-haskell-indent | test/where/where_paren.in.hs | mit | 73 | 1 | 8 | 27 | 57 | 28 | 29 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Tests.Writers.LaTeX (tests) where
import Test.Framework
import Text.Pandoc.Builder
import Text.Pandoc
import Tests.Helpers
import Tests.Arbitrary()
latex :: (ToString a, ToPandoc a) => a -> String
latex = writeLaTeX def{ writerHighlight = True } . toPandoc
latexListing :: (ToString a, ToPandoc a) => a -> String
latexListing = writeLaTeX def{ writerListings = True } . toPandoc
{-
"my test" =: X =?> Y
is shorthand for
test latex "my test" $ X =?> Y
which is in turn shorthand for
test latex "my test" (X,Y)
-}
infix 4 =:
(=:) :: (ToString a, ToPandoc a)
=> String -> (a, String) -> Test
(=:) = test latex
tests :: [Test]
tests = [ testGroup "code blocks"
[ "in footnotes" =: note (para "hi" <> codeBlock "hi") =?>
"\\footnote{hi\n\n\\begin{Verbatim}\nhi\n\\end{Verbatim}\n}"
, test latexListing "identifier" $ codeBlockWith ("id",[],[]) "hi" =?>
("\\begin{lstlisting}[label=id]\nhi\n\\end{lstlisting}" :: String)
, test latexListing "no identifier" $ codeBlock "hi" =?>
("\\begin{lstlisting}\nhi\n\\end{lstlisting}" :: String)
]
, testGroup "definition lists"
[ "with internal link" =: definitionList [(link "#go" "" (str "testing"),
[plain (text "hi there")])] =?>
"\\begin{description}\n\\tightlist\n\\item[\\protect\\hyperlink{go}{testing}]\nhi there\n\\end{description}"
]
, testGroup "math"
[ "escape |" =: para (math "\\sigma|_{\\{x\\}}") =?>
"\\(\\sigma|_{\\{x\\}}\\)"
]
, testGroup "headers"
[ "unnumbered header" =:
headerWith ("foo",["unnumbered"],[]) 1
(text "Header 1" <> note (plain $ text "note")) =?>
"\\section*{\\texorpdfstring{Header 1\\footnote{note}}{Header 1}}\\label{foo}\n\\addcontentsline{toc}{section}{Header 1}\n"
, "in list item" =:
bulletList [header 2 (text "foo")] =?>
"\\begin{itemize}\n\\item ~\n \\subsection{foo}\n\\end{itemize}"
, "in definition list item" =:
definitionList [(text "foo", [header 2 (text "bar"),
para $ text "baz"])] =?>
"\\begin{description}\n\\item[foo] ~ \n\\subsection{bar}\n\nbaz\n\\end{description}"
, "containing image" =:
header 1 (image "imgs/foo.jpg" "" (text "Alt text")) =?>
"\\section{\\texorpdfstring{\\protect\\includegraphics{imgs/foo.jpg}}{Alt text}}"
]
, testGroup "inline code"
[ "struck out and highlighted" =:
strikeout (codeWith ("",["haskell"],[]) "foo" <> space
<> str "bar") =?>
"\\sout{\\mbox{\\VERB|\\NormalTok{foo}|} bar}"
, "struck out and not highlighted" =:
strikeout (code "foo" <> space
<> str "bar") =?>
"\\sout{\\texttt{foo} bar}"
, "single quotes" =:
code "dog's" =?> "\\texttt{dog\\textquotesingle{}s}"
]
]
| alexvong1995/pandoc | tests/Tests/Writers/LaTeX.hs | gpl-2.0 | 3,087 | 0 | 16 | 862 | 652 | 349 | 303 | 56 | 1 |
{-| Definition of the data collectors used by MonD.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.DataCollectors( collectors ) where
import Data.Map (findWithDefault)
import Data.Monoid (mempty)
import qualified Ganeti.DataCollectors.CPUload as CPUload
import qualified Ganeti.DataCollectors.Diskstats as Diskstats
import qualified Ganeti.DataCollectors.Drbd as Drbd
import qualified Ganeti.DataCollectors.InstStatus as InstStatus
import qualified Ganeti.DataCollectors.Lv as Lv
import qualified Ganeti.DataCollectors.XenCpuLoad as XenCpuLoad
import Ganeti.DataCollectors.Types (DataCollector(..),ReportBuilder(..))
import Ganeti.JSON (GenericContainer(..))
import Ganeti.Objects
import Ganeti.Types
-- | The list of available builtin data collectors.
collectors :: [DataCollector]
collectors =
[ cpuLoadCollector
, xenCpuLoadCollector
, diskStatsCollector
, drdbCollector
, instStatusCollector
, lvCollector
]
where
f .&&. g = \x y -> f x y && g x y
xenHypervisor = flip elem [XenPvm, XenHvm]
xenCluster _ cfg =
any xenHypervisor . clusterEnabledHypervisors $ configCluster cfg
collectorConfig name cfg =
let config = fromContainer . clusterDataCollectors $ configCluster cfg
in findWithDefault mempty name config
updateInterval name cfg = dataCollectorInterval $ collectorConfig name cfg
activeConfig name cfg = dataCollectorActive $ collectorConfig name cfg
diskStatsCollector =
DataCollector Diskstats.dcName Diskstats.dcCategory
Diskstats.dcKind (StatelessR Diskstats.dcReport) Nothing activeConfig
updateInterval
drdbCollector =
DataCollector Drbd.dcName Drbd.dcCategory Drbd.dcKind
(StatelessR Drbd.dcReport) Nothing activeConfig updateInterval
instStatusCollector =
DataCollector InstStatus.dcName InstStatus.dcCategory
InstStatus.dcKind (StatelessR InstStatus.dcReport) Nothing
(xenCluster .&&. activeConfig) updateInterval
lvCollector =
DataCollector Lv.dcName Lv.dcCategory Lv.dcKind
(StatelessR Lv.dcReport) Nothing activeConfig updateInterval
cpuLoadCollector =
DataCollector CPUload.dcName CPUload.dcCategory CPUload.dcKind
(StatefulR CPUload.dcReport) (Just CPUload.dcUpdate) activeConfig
updateInterval
xenCpuLoadCollector =
DataCollector XenCpuLoad.dcName XenCpuLoad.dcCategory XenCpuLoad.dcKind
(StatefulR XenCpuLoad.dcReport) (Just XenCpuLoad.dcUpdate) activeConfig
updateInterval
| dimara/ganeti | src/Ganeti/DataCollectors.hs | bsd-2-clause | 3,774 | 0 | 12 | 633 | 543 | 300 | 243 | 52 | 1 |
{-# LANGUAGE BangPatterns #-}
-- | In-place quicksort.
module QuickSort (quickSort) where
import Control.Monad
import Control.Monad.Primitive
import Control.Monad.ST
import Data.List (sort)
import Data.Vector (Vector)
import qualified Data.Vector as V (toList)
import qualified Data.Vector.Generic as V(freeze)
import Data.Vector.Mutable (MVector)
import qualified Data.Vector.Mutable as V
import Prelude hiding (read)
import Test.QuickCheck
--------------------------------------------------------------------------------
-- Implementation
-- | In-place quicksort the given vector in some mutation-permitting
-- monad.
quickSort :: (PrimMonad m,Ord a) => MVector (PrimState m) a -> m ()
quickSort vec =
qsort vec 0 (V.length vec)
where qsort array begin end =
when (end > begin)
(do let startP = begin + ((end - begin) `div` 2)
-- ^ I chose to simply choose the pivot as the
-- median, no cleverness or randomness here. AIUI
-- Sedgewick recommends this.
pivot <- partition array begin end startP
-- The condition below is recommended by Sedgewick:
--
-- To make sure at most O(log n) space is used,
-- recurse first into the smaller side of the
-- partition, then use a tail call to recurse into
-- the other.
if pivot - begin > end - pivot + 1
then do qsort array (pivot + 1) end
qsort array begin pivot
else do qsort array begin pivot
qsort array (pivot + 1) end)
-- | Swap elements in the array until all elements <pivot are to the
-- left of pivot.
partition :: (PrimMonad m,Ord a) => V.MVector (PrimState m) a -> Int -> Int -> Int -> m Int
partition array begin end pivot =
do piv <- V.read array pivot
V.swap array pivot (end - 1)
store <- for begin (end - 1) begin
(\ix !store ->
do v <- V.read array ix
if v <= piv
then do V.swap array store ix
return (store + 1)
else return store)
V.swap array (end - 1) store
return store
where for from to state m = go from state
where go i state =
if i < to
then do state' <- m i state
go (i + 1) state'
else return state
--------------------------------------------------------------------------------
-- Tests
-- | Test that sorting some list of ints is equivalent to @sort xs@.
quickSortProp :: [Int] -> Bool
quickSortProp xs =
sort xs ==
V.toList (runST (do arr <- thaw xs
quickSort arr
freeze arr))
--------------------------------------------------------------------------------
-- Example
-- | Works in either the IO or ST monad!
main :: IO ()
main =
do quickCheck quickSortProp
ioVector <- do arr <- thaw [1,7,2,4,1,8,5,2]
quickSort arr
freeze arr
print ioVector
print (runST (do arr <- thaw [1,7,2,4,1,8,5,2]
quickSort arr
freeze arr))
-- | Handy function to construct a mutable vector from a list.
thaw :: (PrimMonad m)
=> [Int] -> m (MVector (PrimState m) Int)
thaw is =
do array <- V.new (length is)
forM_ (zip [0 ..] is)
(\(i,v) -> V.write array i (v :: Int))
return array
-- | More specific type for freezing.
freeze :: (PrimMonad m)
=> MVector (PrimState m) a -> m (Vector a)
freeze = V.freeze
| QuinnSurkamer/sorting | src/Quicksort.hs | bsd-3-clause | 3,850 | 0 | 18 | 1,392 | 1,008 | 524 | 484 | 70 | 3 |
module Distribution.Solver.Modular.Explore
( backjump
, backjumpAndExplore
) where
import Data.Foldable as F
import Data.List as L (foldl')
import Data.Map as M
import Distribution.Solver.Modular.Assignment
import Distribution.Solver.Modular.Dependency
import Distribution.Solver.Modular.Log
import Distribution.Solver.Modular.Message
import qualified Distribution.Solver.Modular.PSQ as P
import qualified Distribution.Solver.Modular.ConflictSet as CS
import Distribution.Solver.Modular.Tree
import Distribution.Solver.Types.PackagePath
import Distribution.Solver.Types.Settings (EnableBackjumping(..), CountConflicts(..))
import qualified Distribution.Solver.Types.Progress as P
-- | This function takes the variable we're currently considering, an
-- initial conflict set and a
-- list of children's logs. Each log yields either a solution or a
-- conflict set. The result is a combined log for the parent node that
-- has explored a prefix of the children.
--
-- We can stop traversing the children's logs if we find an individual
-- conflict set that does not contain the current variable. In this
-- case, we can just lift the conflict set to the current level,
-- because the current level cannot possibly have contributed to this
-- conflict, so no other choice at the current level would avoid the
-- conflict.
--
-- If any of the children might contain a successful solution, we can
-- return it immediately. If all children contain conflict sets, we can
-- take the union as the combined conflict set.
--
-- The initial conflict set corresponds to the justification that we
-- have to choose this goal at all. There is a reason why we have
-- introduced the goal in the first place, and this reason is in conflict
-- with the (virtual) option not to choose anything for the current
-- variable. See also the comments for 'avoidSet'.
--
backjump :: EnableBackjumping -> Var QPN
-> ConflictSet QPN -> P.PSQ k (ConflictMap -> ConflictSetLog a)
-> ConflictMap -> ConflictSetLog a
backjump (EnableBackjumping enableBj) var initial xs =
F.foldr combine logBackjump xs initial
where
combine :: (ConflictMap -> ConflictSetLog a)
-> (ConflictSet QPN -> ConflictMap -> ConflictSetLog a)
-> ConflictSet QPN -> ConflictMap -> ConflictSetLog a
combine x f csAcc cm =
let l = x cm
in case l of
P.Done d -> P.Done d
P.Fail (cs, cm')
| enableBj && not (var `CS.member` cs) -> logBackjump cs cm'
| otherwise -> f (csAcc `CS.union` cs) cm'
P.Step m ms ->
let l' = combine (\ _ -> ms) f csAcc cm
in P.Step m l'
logBackjump :: ConflictSet QPN -> ConflictMap -> ConflictSetLog a
logBackjump cs cm = failWith (Failure cs Backjump) (cs, cm)
type ConflictSetLog = P.Progress Message (ConflictSet QPN, ConflictMap)
type ConflictMap = Map (Var QPN) Int
getBestGoal :: ConflictMap -> P.PSQ (Goal QPN) a -> (Goal QPN, a)
getBestGoal cm =
P.maximumBy
( flip (M.findWithDefault 0) cm
. (\ (Goal v _) -> v)
)
getFirstGoal :: P.PSQ (Goal QPN) a -> (Goal QPN, a)
getFirstGoal ts =
P.casePSQ ts
(error "getFirstGoal: empty goal choice") -- empty goal choice is an internal error
(\ k v _xs -> (k, v)) -- commit to the first goal choice
updateCM :: ConflictSet QPN -> ConflictMap -> ConflictMap
updateCM cs cm =
L.foldl' (\ cmc k -> M.alter inc k cmc) cm (CS.toList cs)
where
inc Nothing = Just 1
inc (Just n) = Just $! n + 1
-- | A tree traversal that simultaneously propagates conflict sets up
-- the tree from the leaves and creates a log.
exploreLog :: EnableBackjumping -> CountConflicts -> Tree QGoalReason
-> (Assignment -> ConflictMap -> ConflictSetLog (Assignment, RevDepMap))
exploreLog enableBj (CountConflicts countConflicts) = cata go
where
getBestGoal' :: P.PSQ (Goal QPN) a -> ConflictMap -> (Goal QPN, a)
getBestGoal'
| countConflicts = \ ts cm -> getBestGoal cm ts
| otherwise = \ ts _ -> getFirstGoal ts
go :: TreeF QGoalReason (Assignment -> ConflictMap -> ConflictSetLog (Assignment, RevDepMap))
-> (Assignment -> ConflictMap -> ConflictSetLog (Assignment, RevDepMap))
go (FailF c fr) _ = \ cm -> let failure = failWith (Failure c fr)
in if countConflicts
then failure (c, updateCM c cm)
else failure (c, cm)
go (DoneF rdm) a = \ _ -> succeedWith Success (a, rdm)
go (PChoiceF qpn gr ts) (A pa fa sa) =
backjump enableBj (P qpn) (avoidSet (P qpn) gr) $ -- try children in order,
P.mapWithKey -- when descending ...
(\ i@(POption k _) r cm ->
let l = r (A (M.insert qpn k pa) fa sa) cm
in tryWith (TryP qpn i) l
)
ts
go (FChoiceF qfn gr _ _ ts) (A pa fa sa) =
backjump enableBj (F qfn) (avoidSet (F qfn) gr) $ -- try children in order,
P.mapWithKey -- when descending ...
(\ k r cm ->
let l = r (A pa (M.insert qfn k fa) sa) cm
in tryWith (TryF qfn k) l
)
ts
go (SChoiceF qsn gr _ ts) (A pa fa sa) =
backjump enableBj (S qsn) (avoidSet (S qsn) gr) $ -- try children in order,
P.mapWithKey -- when descending ...
(\ k r cm ->
let l = r (A pa fa (M.insert qsn k sa)) cm
in tryWith (TryS qsn k) l
)
ts
go (GoalChoiceF ts) a = \ cm ->
let (k, v) = getBestGoal' ts cm
l = v a cm
in continueWith (Next k) l
-- | Build a conflict set corresponding to the (virtual) option not to
-- choose a solution for a goal at all.
--
-- In the solver, the set of goals is not statically determined, but depends
-- on the choices we make. Therefore, when dealing with conflict sets, we
-- always have to consider that we could perhaps make choices that would
-- avoid the existence of the goal completely.
--
-- Whenever we actual introduce a choice in the tree, we have already established
-- that the goal cannot be avoided. This is tracked in the "goal reason".
-- The choice to avoid the goal therefore is a conflict between the goal itself
-- and its goal reason. We build this set here, and pass it to the 'backjump'
-- function as the initial conflict set.
--
-- This has two effects:
--
-- - In a situation where there are no choices available at all (this happens
-- if an unknown package is requested), the initial conflict set becomes the
-- actual conflict set.
--
-- - In a situation where we backjump past the current node, the goal reason
-- of the current node will be added to the conflict set.
--
avoidSet :: Var QPN -> QGoalReason -> ConflictSet QPN
avoidSet var gr =
CS.fromList (var : goalReasonToVars gr)
-- | Interface.
backjumpAndExplore :: EnableBackjumping
-> CountConflicts
-> Tree QGoalReason -> Log Message (Assignment, RevDepMap)
backjumpAndExplore enableBj countConflicts t =
toLog $ (exploreLog enableBj countConflicts t (A M.empty M.empty M.empty)) M.empty
where
toLog :: P.Progress step fail done -> Log step done
toLog = P.foldProgress P.Step (const (P.Fail ())) P.Done
| kolmodin/cabal | cabal-install/Distribution/Solver/Modular/Explore.hs | bsd-3-clause | 7,516 | 0 | 20 | 2,068 | 1,797 | 954 | 843 | 102 | 7 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudFront.DeleteCloudFrontOriginAccessIdentity
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Delete an origin access identity.
--
-- <http://docs.aws.amazon.com/AmazonCloudFront/latest/APIReference/DeleteCloudFrontOriginAccessIdentity.html>
module Network.AWS.CloudFront.DeleteCloudFrontOriginAccessIdentity
(
-- * Request
DeleteCloudFrontOriginAccessIdentity
-- ** Request constructor
, deleteCloudFrontOriginAccessIdentity
-- ** Request lenses
, dcfoaiId
, dcfoaiIfMatch
-- * Response
, DeleteCloudFrontOriginAccessIdentityResponse
-- ** Response constructor
, deleteCloudFrontOriginAccessIdentityResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.RestXML
import Network.AWS.CloudFront.Types
import qualified GHC.Exts
data DeleteCloudFrontOriginAccessIdentity = DeleteCloudFrontOriginAccessIdentity
{ _dcfoaiId :: Text
, _dcfoaiIfMatch :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DeleteCloudFrontOriginAccessIdentity' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dcfoaiId' @::@ 'Text'
--
-- * 'dcfoaiIfMatch' @::@ 'Maybe' 'Text'
--
deleteCloudFrontOriginAccessIdentity :: Text -- ^ 'dcfoaiId'
-> DeleteCloudFrontOriginAccessIdentity
deleteCloudFrontOriginAccessIdentity p1 = DeleteCloudFrontOriginAccessIdentity
{ _dcfoaiId = p1
, _dcfoaiIfMatch = Nothing
}
-- | The origin access identity's id.
dcfoaiId :: Lens' DeleteCloudFrontOriginAccessIdentity Text
dcfoaiId = lens _dcfoaiId (\s a -> s { _dcfoaiId = a })
-- | The value of the ETag header you received from a previous GET or PUT request.
-- For example: E2QWRUHAPOMQZL.
dcfoaiIfMatch :: Lens' DeleteCloudFrontOriginAccessIdentity (Maybe Text)
dcfoaiIfMatch = lens _dcfoaiIfMatch (\s a -> s { _dcfoaiIfMatch = a })
data DeleteCloudFrontOriginAccessIdentityResponse = DeleteCloudFrontOriginAccessIdentityResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeleteCloudFrontOriginAccessIdentityResponse' constructor.
deleteCloudFrontOriginAccessIdentityResponse :: DeleteCloudFrontOriginAccessIdentityResponse
deleteCloudFrontOriginAccessIdentityResponse = DeleteCloudFrontOriginAccessIdentityResponse
instance ToPath DeleteCloudFrontOriginAccessIdentity where
toPath DeleteCloudFrontOriginAccessIdentity{..} = mconcat
[ "/2014-11-06/origin-access-identity/cloudfront/"
, toText _dcfoaiId
]
instance ToQuery DeleteCloudFrontOriginAccessIdentity where
toQuery = const mempty
instance ToHeaders DeleteCloudFrontOriginAccessIdentity where
toHeaders DeleteCloudFrontOriginAccessIdentity{..} = mconcat
[ "If-Match" =: _dcfoaiIfMatch
]
instance ToXMLRoot DeleteCloudFrontOriginAccessIdentity where
toXMLRoot = const (namespaced ns "DeleteCloudFrontOriginAccessIdentity" [])
instance ToXML DeleteCloudFrontOriginAccessIdentity
instance AWSRequest DeleteCloudFrontOriginAccessIdentity where
type Sv DeleteCloudFrontOriginAccessIdentity = CloudFront
type Rs DeleteCloudFrontOriginAccessIdentity = DeleteCloudFrontOriginAccessIdentityResponse
request = delete
response = nullResponse DeleteCloudFrontOriginAccessIdentityResponse
| kim/amazonka | amazonka-cloudfront/gen/Network/AWS/CloudFront/DeleteCloudFrontOriginAccessIdentity.hs | mpl-2.0 | 4,234 | 0 | 9 | 778 | 448 | 267 | 181 | 56 | 1 |
{- | Client inner-loop
This function is generally only needed if you are adding a new communication channel.
-}
processRemoteState :: IsAcidic st =>
IO CommChannel -- ^ (re-)connect function
-> IO (AcidState st)
processRemoteState reconnect
= do cmdQueue <- atomically newTQueue
ccTMV <- atomically newEmptyTMVar
isClosed <- newIORef False
let actor :: Command -> IO (MVar Response)
actor command =
do debugStrLn "actor: begin."
readIORef isClosed >>= flip when (throwIO AcidStateClosed)
ref <- newEmptyMVar
atomically $ writeTQueue cmdQueue (command, ref)
debugStrLn "actor: end."
return ref
expireQueue listenQueue =
do mCallback <- atomically $ tryReadTQueue listenQueue
case mCallback of
Nothing -> return ()
(Just callback) ->
do callback ConnectionError
expireQueue listenQueue
handleReconnect :: SomeException -> IO ()
handleReconnect e
= case fromException e of
(Just ThreadKilled) ->
do debugStrLn "handleReconnect: ThreadKilled. Not attempting to reconnect."
return ()
_ ->
do debugStrLn $ "handleReconnect begin."
tmv <- atomically $ tryTakeTMVar ccTMV
case tmv of
Nothing ->
do debugStrLn $ "handleReconnect: error handling already in progress."
debugStrLn $ "handleReconnect end."
return ()
(Just (oldCC, oldListenQueue, oldListenerTID)) ->
do thisTID <- myThreadId
when (thisTID /= oldListenerTID) (killThread oldListenerTID)
ccClose oldCC
expireQueue oldListenQueue
cc <- reconnect
listenQueue <- atomically $ newTQueue
listenerTID <- forkIO $ listener cc listenQueue
atomically $ putTMVar ccTMV (cc, listenQueue, listenerTID)
debugStrLn $ "handleReconnect end."
return ()
listener :: CommChannel -> TQueue (Response -> IO ()) -> IO ()
listener cc listenQueue
= getResponse Strict.empty `catch` handleReconnect
where
getResponse leftover =
do debugStrLn $ "listener: listening for Response."
let go inp = case inp of
Fail msg _ -> error msg
Partial cont -> do debugStrLn $ "listener: ccGetSome"
bs <- ccGetSome cc 1024
go (cont bs)
Done resp rest -> do debugStrLn $ "listener: getting callback"
callback <- atomically $ readTQueue listenQueue
debugStrLn $ "listener: passing Response to callback"
callback (resp :: Response)
return rest
rest <- go (runGetPartial get leftover) -- `catch` (\e -> do handleReconnect e
-- throwIO e
-- )
getResponse rest
actorThread :: IO ()
actorThread = forever $
do debugStrLn "actorThread: waiting for something to do."
(cc, cmd) <- atomically $
do (cmd, ref) <- readTQueue cmdQueue
(cc, listenQueue, _) <- readTMVar ccTMV
writeTQueue listenQueue (putMVar ref)
return (cc, cmd)
debugStrLn "actorThread: sending command."
ccPut cc (encode cmd) `catch` handleReconnect
debugStrLn "actorThread: sent."
return ()
shutdown :: ThreadId -> IO ()
shutdown actorTID =
do debugStrLn "shutdown: update isClosed IORef to True."
writeIORef isClosed True
debugStrLn "shutdown: killing actor thread."
killThread actorTID
debugStrLn "shutdown: taking ccTMV."
(cc, listenQueue, listenerTID) <- atomically $ takeTMVar ccTMV -- FIXME: or should this by tryTakeTMVar
debugStrLn "shutdown: killing listener thread."
killThread listenerTID
debugStrLn "shutdown: expiring listen queue."
expireQueue listenQueue
debugStrLn "shutdown: closing connection."
ccClose cc
return ()
cc <- reconnect
listenQueue <- atomically $ newTQueue
actorTID <- forkIO $ actorThread
listenerTID <- forkIO $ listener cc listenQueue
atomically $ putTMVar ccTMV (cc, listenQueue, listenerTID)
return (toAcidState $ RemoteState actor (shutdown actorTID))
| bitemyapp/apply-refact | tests/examples/Remote.hs | bsd-3-clause | 5,693 | 0 | 24 | 2,719 | 1,055 | 476 | 579 | 97 | 6 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Network.Wai.Handler.Warp.ResponseHeader (composeHeader) where
import Control.Monad
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import Data.ByteString.Internal (create)
import qualified Data.CaseInsensitive as CI
import Data.List (foldl')
import Data.Word (Word8)
import Foreign.Ptr
import GHC.Storable
import qualified Network.HTTP.Types as H
import Network.Wai.Handler.Warp.Buffer (copy)
----------------------------------------------------------------
composeHeader :: H.HttpVersion -> H.Status -> H.ResponseHeaders -> IO ByteString
composeHeader !httpversion !status !responseHeaders = create len $ \ptr -> do
ptr1 <- copyStatus ptr httpversion status
ptr2 <- copyHeaders ptr1 responseHeaders
void $ copyCRLF ptr2
where
!len = 17 + slen + foldl' fieldLength 0 responseHeaders
fieldLength !l !(k,v) = l + S.length (CI.original k) + S.length v + 4
!slen = S.length $ H.statusMessage status
httpVer11 :: ByteString
httpVer11 = "HTTP/1.1 "
httpVer10 :: ByteString
httpVer10 = "HTTP/1.0 "
{-# INLINE copyStatus #-}
copyStatus :: Ptr Word8 -> H.HttpVersion -> H.Status -> IO (Ptr Word8)
copyStatus !ptr !httpversion !status = do
ptr1 <- copy ptr httpVer
writeWord8OffPtr ptr1 0 (zero + fromIntegral r2)
writeWord8OffPtr ptr1 1 (zero + fromIntegral r1)
writeWord8OffPtr ptr1 2 (zero + fromIntegral r0)
writeWord8OffPtr ptr1 3 spc
ptr2 <- copy (ptr1 `plusPtr` 4) (H.statusMessage status)
copyCRLF ptr2
where
httpVer
| httpversion == H.HttpVersion 1 1 = httpVer11
| otherwise = httpVer10
(q0,r0) = H.statusCode status `divMod` 10
(q1,r1) = q0 `divMod` 10
r2 = q1 `mod` 10
{-# INLINE copyHeaders #-}
copyHeaders :: Ptr Word8 -> [H.Header] -> IO (Ptr Word8)
copyHeaders !ptr [] = return ptr
copyHeaders !ptr (h:hs) = do
ptr1 <- copyHeader ptr h
copyHeaders ptr1 hs
{-# INLINE copyHeader #-}
copyHeader :: Ptr Word8 -> H.Header -> IO (Ptr Word8)
copyHeader !ptr (k,v) = do
ptr1 <- copy ptr (CI.original k)
writeWord8OffPtr ptr1 0 colon
writeWord8OffPtr ptr1 1 spc
ptr2 <- copy (ptr1 `plusPtr` 2) v
copyCRLF ptr2
{-# INLINE copyCRLF #-}
copyCRLF :: Ptr Word8 -> IO (Ptr Word8)
copyCRLF !ptr = do
writeWord8OffPtr ptr 0 cr
writeWord8OffPtr ptr 1 lf
return $! ptr `plusPtr` 2
zero :: Word8
zero = 48
spc :: Word8
spc = 32
colon :: Word8
colon = 58
cr :: Word8
cr = 13
lf :: Word8
lf = 10
| frontrowed/wai | warp/Network/Wai/Handler/Warp/ResponseHeader.hs | mit | 2,513 | 0 | 13 | 491 | 872 | 444 | 428 | 72 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, MagicHash, UnboxedTuples #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.IORef
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Mutable references in the IO monad.
--
-----------------------------------------------------------------------------
module Data.IORef
(
-- * IORefs
IORef, -- abstract, instance of: Eq, Typeable
newIORef,
readIORef,
writeIORef,
modifyIORef,
modifyIORef',
atomicModifyIORef,
atomicModifyIORef',
atomicWriteIORef,
#if !defined(__PARALLEL_HASKELL__)
mkWeakIORef,
#endif
-- ** Memory Model
-- $memmodel
) where
import GHC.Base
import GHC.STRef
import GHC.IORef hiding (atomicModifyIORef)
import qualified GHC.IORef
#if !defined(__PARALLEL_HASKELL__)
import GHC.Weak
#endif
#if !defined(__PARALLEL_HASKELL__)
-- |Make a 'Weak' pointer to an 'IORef', using the second argument as a finalizer
-- to run when 'IORef' is garbage-collected
mkWeakIORef :: IORef a -> IO () -> IO (Weak (IORef a))
mkWeakIORef r@(IORef (STRef r#)) f = IO $ \s ->
case mkWeak# r# r f s of (# s1, w #) -> (# s1, Weak w #)
#endif
-- |Mutate the contents of an 'IORef'.
--
-- Be warned that 'modifyIORef' does not apply the function strictly. This
-- means if the program calls 'modifyIORef' many times, but seldomly uses the
-- value, thunks will pile up in memory resulting in a space leak. This is a
-- common mistake made when using an IORef as a counter. For example, the
-- following will likely produce a stack overflow:
--
-- >ref <- newIORef 0
-- >replicateM_ 1000000 $ modifyIORef ref (+1)
-- >readIORef ref >>= print
--
-- To avoid this problem, use 'modifyIORef'' instead.
modifyIORef :: IORef a -> (a -> a) -> IO ()
modifyIORef ref f = readIORef ref >>= writeIORef ref . f
-- |Strict version of 'modifyIORef'
--
-- @since 4.6.0.0
modifyIORef' :: IORef a -> (a -> a) -> IO ()
modifyIORef' ref f = do
x <- readIORef ref
let x' = f x
x' `seq` writeIORef ref x'
-- |Atomically modifies the contents of an 'IORef'.
--
-- This function is useful for using 'IORef' in a safe way in a multithreaded
-- program. If you only have one 'IORef', then using 'atomicModifyIORef' to
-- access and modify it will prevent race conditions.
--
-- Extending the atomicity to multiple 'IORef's is problematic, so it
-- is recommended that if you need to do anything more complicated
-- then using 'Control.Concurrent.MVar.MVar' instead is a good idea.
--
-- 'atomicModifyIORef' does not apply the function strictly. This is important
-- to know even if all you are doing is replacing the value. For example, this
-- will leak memory:
--
-- >ref <- newIORef '1'
-- >forever $ atomicModifyIORef ref (\_ -> ('2', ()))
--
-- Use 'atomicModifyIORef'' or 'atomicWriteIORef' to avoid this problem.
--
atomicModifyIORef :: IORef a -> (a -> (a,b)) -> IO b
atomicModifyIORef = GHC.IORef.atomicModifyIORef
-- | Strict version of 'atomicModifyIORef'. This forces both the value stored
-- in the 'IORef' as well as the value returned.
--
-- @since 4.6.0.0
atomicModifyIORef' :: IORef a -> (a -> (a,b)) -> IO b
atomicModifyIORef' ref f = do
b <- atomicModifyIORef ref $ \a ->
case f a of
v@(a',_) -> a' `seq` v
b `seq` return b
-- | Variant of 'writeIORef' with the \"barrier to reordering\" property that
-- 'atomicModifyIORef' has.
--
-- @since 4.6.0.0
atomicWriteIORef :: IORef a -> a -> IO ()
atomicWriteIORef ref a = do
x <- atomicModifyIORef ref (\_ -> (a, ()))
x `seq` return ()
{- $memmodel
In a concurrent program, 'IORef' operations may appear out-of-order
to another thread, depending on the memory model of the underlying
processor architecture. For example, on x86, loads can move ahead
of stores, so in the following example:
> maybePrint :: IORef Bool -> IORef Bool -> IO ()
> maybePrint myRef yourRef = do
> writeIORef myRef True
> yourVal <- readIORef yourRef
> unless yourVal $ putStrLn "critical section"
>
> main :: IO ()
> main = do
> r1 <- newIORef False
> r2 <- newIORef False
> forkIO $ maybePrint r1 r2
> forkIO $ maybePrint r2 r1
> threadDelay 1000000
it is possible that the string @"critical section"@ is printed
twice, even though there is no interleaving of the operations of the
two threads that allows that outcome. The memory model of x86
allows 'readIORef' to happen before the earlier 'writeIORef'.
The implementation is required to ensure that reordering of memory
operations cannot cause type-correct code to go wrong. In
particular, when inspecting the value read from an 'IORef', the
memory writes that created that value must have occurred from the
point of view of the current thread.
'atomicModifyIORef' acts as a barrier to reordering. Multiple
'atomicModifyIORef' operations occur in strict program order. An
'atomicModifyIORef' is never observed to take place ahead of any
earlier (in program order) 'IORef' operations, or after any later
'IORef' operations.
-}
| jtojnar/haste-compiler | libraries/ghc-7.10/base/Data/IORef.hs | bsd-3-clause | 5,368 | 0 | 14 | 1,126 | 584 | 340 | 244 | 41 | 1 |
module Q2 where
import Q
instance Show (IO a) where
show = undefined
| olsner/ghc | testsuite/tests/cabal/T12733/q/Q2.hs | bsd-3-clause | 73 | 0 | 7 | 17 | 26 | 15 | 11 | 4 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.AFSM.Auto
-- Copyright : (c) Hanzhong Xu, Meng Meng 2016,
-- License : MIT License
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
-- {-# LANGUAGE ExistentialQuantification #-}
-- {-# LANGUAGE FlexibleInstances, UndecidableInstances #-}
{-# LANGUAGE RankNTypes, ImpredicativeTypes, ExistentialQuantification #-}
module Data.AFSM.Auto where
import Control.Category
import Control.Arrow
import Control.Monad
-- | Control.Arrow.Transformer.Automaton
-- data Auto f a b = forall f. (Arrow f) => Auto (f a (Auto f a b, b))
-- data Auto f a b = Auto (f a (Auto f a b, b))
-- | Control.Auto
-- data Auto m a b = Auto (a -> m (Auto m a b, b))
{-
class Auto z where
build :: (a -> (z a b, b)) -> z a b
step :: (z a b) -> a -> (z a b, b)
-}
data AUTO a b = forall z. (Auto z) => AUTO (a -> (z a b, b))
class Auto z where
step :: (z a b) -> a -> (z a b, b)
{-
instance Auto (->) where
step f a = (f, f a)
-}
instance Auto AUTO where
step (AUTO t) a = (AUTO (step z), b)
where
(z, b) = t a
{-
instance Auto z => Category z where
id = idAuto
(.) = composeAuto
idAuto :: Auto z => z a a
idAuto = build (\a -> (idAuto, a))
composeAuto :: Auto z => z b c -> z a b -> z a c
composeAuto zbc zab = build f
where
f :: a -> (z a c, c)
f a = (composeAuto zbc' zab', c)
where
(zab', b) = step zab a
(zbc', c) = step zbc b
-}
| PseudoPower/AFSM | src/Data/AFSM/Auto.hs | mit | 1,629 | 0 | 10 | 399 | 183 | 110 | 73 | 11 | 0 |
module ReplacementExperiment where
replaceWithP :: b -> Char
replaceWithP = const 'p'
lms :: [Maybe [Char]]
lms = [Just "Ave", Nothing, Just "woohoo"]
-- Just making the argument more specific
replaceWithP' :: [Maybe [Char]] -> Char
replaceWithP' = replaceWithP
liftedReplace :: Functor f => f a -> f Char
liftedReplace = fmap replaceWithP
liftedReplace' :: [Maybe [Char]] -> [Char]
liftedReplace' = liftedReplace
twiceLifted :: (Functor f1, Functor f) => f (f1 a) -> f (f1 Char)
twiceLifted = (fmap . fmap) replaceWithP
twiceLifted' :: [Maybe [Char]] -> [Maybe Char]
twiceLifted' = twiceLifted
thriceLifted :: (Functor f2, Functor f1, Functor f) => f (f1 (f2 a)) -> f (f1 (f2 Char))
thriceLifted = (fmap . fmap . fmap) replaceWithP
thriceLifted' :: [Maybe [Char]] -> [Maybe [Char]]
thriceLifted' = thriceLifted
main :: IO ()
main = do
putStr "replaceWithP lms: "
print (replaceWithP lms)
putStr "replaceWithP' lms: "
print (replaceWithP' lms)
putStr "liftedReplace lms: "
print (liftedReplace lms)
putStr "liftedReplace' lms: "
print (liftedReplace' lms)
putStr "twiceLifted lms: "
print (twiceLifted lms)
putStr "twiceLifted' lms: "
print (twiceLifted' lms)
putStr "thriceLifted lms: "
print (thriceLifted lms)
putStr "thriceLifted' lms: "
print (thriceLifted' lms) | NickAger/LearningHaskell | HaskellProgrammingFromFirstPrinciples/Chapter16.hsproj/ReplacementExperiment.hs | mit | 1,333 | 0 | 11 | 256 | 491 | 241 | 250 | 37 | 1 |
{-# LANGUAGE ConstraintKinds, DataKinds, DefaultSignatures, FlexibleContexts,
FlexibleInstances, MultiParamTypeClasses, OverlappingInstances,
RankNTypes, ScopedTypeVariables, TupleSections, TypeFamilies,
TypeOperators, UndecidableInstances #-}
{- |
Module : Control.Monad.Levels.Definitions
Description : Specific levls of monad transformers
Copyright : (c) Ivan Lazar Miljenovic
License : MIT
Maintainer : [email protected]
-}
module Control.Monad.Levels.Definitions where
import Control.Applicative (Applicative, WrappedMonad)
import Data.Coerce (Coercible, coerce)
import Data.Constraint ((:-) (..), Class (..), Constraint, Dict (..),
trans, weaken1, weaken2, (\\))
import Data.Constraint.Forall (Forall, inst)
import Data.Proxy (Proxy (..))
import Control.Monad.Trans.Cont (ContT (..))
import Control.Monad.Trans.Except (ExceptT (..), runExceptT)
import Control.Monad.Trans.Identity (IdentityT (..))
import Control.Monad.Trans.List (ListT (..))
import Control.Monad.Trans.Maybe (MaybeT (..))
import Control.Monad.Trans.Reader (ReaderT (..))
import qualified Control.Monad.Trans.RWS.Lazy as LRWS
import qualified Control.Monad.Trans.RWS.Strict as SRWS
import qualified Control.Monad.Trans.State.Lazy as LSt
import qualified Control.Monad.Trans.State.Strict as SSt
import qualified Control.Monad.Trans.Writer.Lazy as LW
import qualified Control.Monad.Trans.Writer.Strict as SW
import Data.Functor.Identity (Identity (..))
import Control.Arrow (first)
import Data.Monoid (Monoid, mempty)
-- -----------------------------------------------------------------------------
-- | Monads in a monadic stack.
--
-- For monads that are /not/ instances of 'MonadicLevel_' then it
-- suffices to say @instance MonadTower_ MyMonad@; for levels it is
-- required to define 'BaseMonad' (typically recursively).
--
-- You should use 'MonadTower' in any constraints rather than this
-- class. This includes when writing instances of 'MonadTower_' for
-- monadic transformers.
class (Applicative m, Monad m) => MonadTower_ m where
type BaseMonad m :: * -> *
type BaseMonad m = m
-- | This is 'MonadTower_' with additional sanity constraints to
-- ensure that applying 'BaseMonad' is idempotent.
type MonadTower m = (MonadTower_ m, IsBaseMonad (BaseMonad m))
-- -----------------------------------------------------------------------------
instance MonadTower_ []
instance MonadTower_ Maybe
instance MonadTower_ IO
instance MonadTower_ (Either e)
instance MonadTower_ ((->) r)
instance (Monad m) => MonadTower_ (WrappedMonad m)
instance MonadTower_ Identity
instance (MonadTower m) => MonadTower_ (ContT r m) where
type BaseMonad (ContT r m) = BaseMonad m
instance (MonadTower m) => MonadTower_ (ExceptT e m) where
type BaseMonad (ExceptT e m) = BaseMonad m
instance (MonadTower m) => MonadTower_ (IdentityT m) where
type BaseMonad (IdentityT m) = BaseMonad m
instance (MonadTower m) => MonadTower_ (ListT m) where
type BaseMonad (ListT m) = BaseMonad m
instance (MonadTower m) => MonadTower_ (MaybeT m) where
type BaseMonad (MaybeT m) = BaseMonad m
instance (MonadTower m) => MonadTower_ (ReaderT r m) where
type BaseMonad (ReaderT r m) = BaseMonad m
instance (Monoid w, MonadTower m) => MonadTower_ (LRWS.RWST r w s m) where
type BaseMonad (LRWS.RWST r w s m) = BaseMonad m
instance (Monoid w, MonadTower m) => MonadTower_ (SRWS.RWST r w s m) where
type BaseMonad (SRWS.RWST r w s m) = BaseMonad m
instance (MonadTower m) => MonadTower_ (LSt.StateT s m) where
type BaseMonad (LSt.StateT s m) = BaseMonad m
instance (MonadTower m) => MonadTower_ (SSt.StateT s m) where
type BaseMonad (SSt.StateT s m) = BaseMonad m
instance (Monoid w, MonadTower m) => MonadTower_ (LW.WriterT w m) where
type BaseMonad (LW.WriterT w m) = BaseMonad m
instance (Monoid w, MonadTower m) => MonadTower_ (SW.WriterT w m) where
type BaseMonad (SW.WriterT w m) = BaseMonad m
-- -----------------------------------------------------------------------------
-- | How to handle wrappers around existing 'MonadTower' instances.
--
-- For newtype wrappers (e.g. 'IdentityT'), it is sufficient to only
-- define 'LowerMonad'.
--
-- You should use 'MonadLevel' rather than this class in
-- constraints.
class (IsSubTowerOf (LowerMonad m) m, CanAddInternalM m) => MonadLevel_ m where
type LowerMonad m :: * -> *
-- | How the value is represented internally; defaults to @a@.
type InnerValue m a :: *
type InnerValue m a = a
-- | An instance of 'AddInternalM'; this is defined so as to be able
-- to make it easier to add constraints rather than solely relying
-- upon its value within 'Unwrapper'.
type WithLower_ m :: (* -> *) -> * -> *
type WithLower_ m = AddIdent
-- | Within the continuation for 'wrap' for @m a@, we can unwrap any
-- @m b@ if @AllowOtherValues m ~ True@; otherwise, we can only
-- unwrap @m a@. Defaults to @True@.
type AllowOtherValues m :: Bool
type AllowOtherValues m = True
-- | By default, should all constraints be allowed through this
-- level? Defaults to @True@.
type DefaultAllowConstraints m :: Bool
type DefaultAllowConstraints m = True
-- | A continuation-based approach to create a value of this level.
--
-- A default is provided for newtype wrappers around existing
-- 'MonadTower' instances, provided that - with the exception of
-- 'LowerMonad' - all associated types are left as their defaults.
wrap :: (CanUnwrap m a b) => Proxy a
-> (Unwrapper m a (LowerMonadValue m b)) -> m b
default wrap :: (Forall (IsCoercible m), Forall (InnerSame m)
, WithLower_ m ~ AddIdent, AllowOtherValues m ~ True, DefaultAllowConstraints m ~ True)
=> Proxy a -> (Unwrapper m a (LowerMonadValue m b)) -> m b
wrap = coerceWrap
coerceWrap :: forall m a b. (MonadLevel_ m, Forall (IsCoercible m), Forall (InnerSame m)
, WithLower_ m ~ AddIdent, AllowOtherValues m ~ True, DefaultAllowConstraints m ~ True)
=> Proxy a -> (Unwrapper m a (LowerMonadValue m b)) -> m b
coerceWrap _ f = pack (f unpack AddIdent)
where
pack :: LowerMonadValue m b -> m b
pack = coerce \\ (inst :: Forall (InnerSame m) :- InnerSame m b)
\\ (inst :: Forall (IsCoercible m) :- IsCoercible m b)
unpack :: m c -> LowerMonadValue m c
unpack = coerceUnwrap
{-# INLINE coerceWrap #-}
coerceUnwrap :: forall m c. (MonadLevel_ m, Forall (IsCoercible m), Forall (InnerSame m))
=> m c -> LowerMonadValue m c
coerceUnwrap = coerce \\ (inst :: Forall (InnerSame m) :- InnerSame m c)
\\ (inst :: Forall (IsCoercible m) :- IsCoercible m c)
{-# INLINE coerceUnwrap #-}
class (MonadLevel_ m, Coercible (m a) (LowerMonadValue m a), Coercible (LowerMonadValue m a) (m a))
=> IsCoercible m a
instance (MonadLevel_ m, Coercible (m a) (LowerMonadValue m a), Coercible (LowerMonadValue m a) (m a))
=> IsCoercible m a
type CanUnwrap_ m a b = CheckOtherAllowed (AllowOtherValues m) a b
type family CheckOtherAllowed (allowed::Bool) a b :: Constraint where
CheckOtherAllowed True a b = ()
CheckOtherAllowed False a b = (a ~ b)
-- | If we're dealing overall with @m a@, then this allows us to
-- specify those @b@ values for which we can also manipulate @m b@.
--
-- If @'AllowOtherValues' m ~ False@ then we require that @a ~ b@;
-- otherwise, any @b@ is accepted.
class (MonadLevel_ m, CanUnwrap_ m a b) => CanUnwrap m a b
instance (MonadLevel_ m, CanUnwrap_ m a b) => CanUnwrap m a b
-- | Used to ensure that for all monad levels, @CanUnwrap m a a@ is
-- satisfied.
class (MonadLevel_ m, CanUnwrap m a a) => CanUnwrapSelf m a
instance (MonadLevel_ m, CanUnwrap m a a) => CanUnwrapSelf m a
instance (MonadLevel_ m) => Class (MonadLevel_ m, CanUnwrap m a a) (CanUnwrapSelf m a) where
cls = Sub Dict
getUnwrapSelfProof :: (MonadLevel m) => MonadLevel m :- CanUnwrap m a a
getUnwrapSelfProof = trans weaken2 -- CanUnwrap
( trans cls -- Undo CanUnwrapSelf
(trans inst -- Undo Forall
(trans weaken1 -- Get Forall
weaken2 -- Remove MonadLevel_
)
)
)
-- | This is 'MonadLevel_' with some additional sanity constraints.
type MonadLevel m = (MonadLevel_ m, (Forall (CanUnwrapSelf m), WithLowerC m))
-- | The value contained within the actual level (e.g. for
-- @'LSt.StateT' s m a@, this is equivalent to @m (a,s)@).
type LowerMonadValue m a = LowerMonad m (InnerValue m a)
-- | A continuation function to produce a value of type @t@.
type Unwrapper m a t = (forall b. (CanUnwrap m a b) => m b -> LowerMonadValue m b)
-> (WithLower m a)
-> t
type WithLower m = WithLower_ m m
type WithLowerC m = AddConstraint (WithLower_ m) m
type CanAddInternalM m = AddInternalM (WithLower_ m)
type CanAddInternal m = AddInternal (WithLower_ m)
type CanGetInternal m = GetInternal (WithLower_ m)
class AddInternalM (ai :: (* -> *) -> * -> *) where
type AddConstraint ai (m :: * -> *) :: Constraint
type AddConstraint ai m = ()
addInternalM :: (MonadLevel m, WithLower_ m ~ ai, CanUnwrap m a b)
=> ai m a -> LowerMonad m b
-> LowerMonadValue m b
class (AddInternalM ai) => AddInternal ai where
addInternal :: (MonadLevel m, WithLower_ m ~ ai, CanUnwrap m a b)
=> ai m a -> b -> InnerValue m b
mapInternal :: (MonadLevel m, WithLower_ m ~ ai, CanUnwrap m a b, CanUnwrap m a c)
=> ai m a -> (b -> c) -> InnerValue m b -> InnerValue m c
class (AddInternal ai) => GetInternal ai where
-- | This is like a lifted 'maybe' function that applies to
-- 'InnerValue' values rather than just 'Maybe's.
getInternal :: (MonadLevel m, WithLower_ m ~ ai, CanUnwrap m a b)
=> ai m a -> c -> (b -> c) -> InnerValue m b -> c
-- | Used for monad transformers like 'ContT' where it is not possible
-- to manipulate the internal value without considering the monad
-- that it is within.
newtype AddIM m a = AddIM { addIMFunc :: forall b. (CanUnwrap m a b)
=> LowerMonad m b -> LowerMonadValue m b }
instance AddInternalM AddIM where
addInternalM = addIMFunc
-- | In most cases you will want to use 'AddIG' instead of this; this
-- is defined for cases like 'ListT' where it may not be possible to
-- obtain either zero or one value for use with 'getInternal'.
data AddI m a = AddI { setIFunc :: forall b. (CanUnwrap m a b) => b -> InnerValue m b
, mapIFunc :: forall b c. (CanUnwrap m a b, CanUnwrap m a c)
=> (b -> c) -> InnerValue m b -> InnerValue m c
}
instance AddInternalM AddI where
addInternalM ai = fmap (setIFunc ai)
addIntProof :: (MonadLevel m, AddInternalM ai) => ai m a -> MonadLevel m :- CanUnwrap m a a
addIntProof _ = getUnwrapSelfProof
instance AddInternal AddI where
addInternal = setIFunc
mapInternal = mapIFunc
-- | Used for monad transformers where it is possible to consider the
-- 'InnerValue' in isolation. If @InnerValue m a ~ a@ then use
-- 'AddIdent' instead.
data AddIG m a = AddIG { setIUFunc :: forall b. (CanUnwrap m a b) => b -> InnerValue m b
, mapIUFunc :: forall b c. (CanUnwrap m a b, CanUnwrap m a c)
=> (b -> c) -> InnerValue m b -> InnerValue m c
, getIUFunc :: forall b c. (CanUnwrap m a b)
=> c -> (b -> c) -> InnerValue m b -> c
}
instance AddInternalM AddIG where
addInternalM ai = fmap (setIUFunc ai)
instance AddInternal AddIG where
addInternal = setIUFunc
mapInternal = mapIUFunc
instance GetInternal AddIG where
getInternal = getIUFunc
-- | Used for monad transformers where @'InnerValue' m a ~ a@.
data AddIdent (m :: * -> *) a = AddIdent
instance AddInternalM AddIdent where
type AddConstraint AddIdent m = Forall (InnerSame m)
addInternalM ai m = m \\ addIdentProof ai (proxyFromM m)
class (MonadLevel_ m, InnerValue m a ~ a) => InnerSame m a
instance (MonadLevel_ m, InnerValue m a ~ a) => InnerSame m a
addIdentProof :: AddIdent m a -> Proxy b -> Forall (InnerSame m) :- InnerSame m b
addIdentProof _ _ = inst
proxyFrom :: a -> Proxy a
proxyFrom _ = Proxy
proxyFromM :: m a -> Proxy a
proxyFromM _ = Proxy
proxyFromF1 :: (a -> b) -> Proxy a
proxyFromF1 _ = Proxy
proxyFromF2 :: (a -> b) -> Proxy b
proxyFromF2 _ = Proxy
instance AddInternal AddIdent where
addInternal ai b = b \\ addIdentProof ai (proxyFrom b)
mapInternal ai f = f \\ addIdentProof ai (proxyFromF2 f)
\\ addIdentProof ai (proxyFromF1 f)
instance GetInternal AddIdent where
getInternal ai _ f lb = f lb \\ addIdentProof ai (proxyFromF1 f)
-- -----------------------------------------------------------------------------
instance (MonadTower m) => MonadLevel_ (ContT r m) where
type LowerMonad (ContT r m) = m
type InnerValue (ContT r m) a = r
type WithLower_ (ContT r m) = AddIM
type AllowOtherValues (ContT r m) = False
type DefaultAllowConstraints (ContT r m) = False
wrap _ f = ContT $ \ cont -> f (`runContT` cont) (AddIM (>>= cont))
instance (MonadTower m) => MonadLevel_ (ExceptT e m) where
type LowerMonad (ExceptT e m) = m
type InnerValue (ExceptT e m) a = Either e a
type WithLower_ (ExceptT e m) = AddIG
wrap _ f = ExceptT $ f runExceptT (AddIG Right fmap (either . const))
instance (MonadTower m) => MonadLevel_ (IdentityT m) where
type LowerMonad (IdentityT m) = m
-- Using default coerce-based implementation as a test.
-- wrap _ f = IdentityT $ f runIdentityT AddIdent
instance (MonadTower m) => MonadLevel_ (ListT m) where
type LowerMonad (ListT m) = m
type InnerValue (ListT m) a = [a]
type WithLower_ (ListT m) = AddI
type DefaultAllowConstraints (ListT m) = False
wrap _ f = ListT $ f runListT (AddI (:[]) map)
-- Can't define getInternal as that would require length <= 1
instance (MonadTower m) => MonadLevel_ (MaybeT m) where
type LowerMonad (MaybeT m) = m
type InnerValue (MaybeT m) a = Maybe a
type WithLower_ (MaybeT m) = AddIG
wrap _ f = MaybeT $ f runMaybeT (AddIG Just fmap maybe)
instance (MonadTower m) => MonadLevel_ (ReaderT r m) where
type LowerMonad (ReaderT r m) = m
wrap _ f = ReaderT $ \ r -> f (`runReaderT` r) AddIdent
map1 :: (a -> a') -> (a,b,c) -> (a',b,c)
map1 f (a,b,c) = (f a,b,c)
{-# INLINE map1 #-}
get1 :: (a,b,c) -> a
get1 (a,_,_) = a
{-# INLINE get1 #-}
instance (Monoid w, MonadTower m) => MonadLevel_ (LRWS.RWST r w s m) where
type LowerMonad (LRWS.RWST r w s m) = m
type InnerValue (LRWS.RWST r w s m) a = (a,s,w)
type WithLower_ (LRWS.RWST r w s m) = AddIG
wrap _ f = LRWS.RWST $ \ r s -> f (\m -> LRWS.runRWST m r s) (AddIG (,s,mempty) map1 (const (. get1)))
instance (Monoid w, MonadTower m) => MonadLevel_ (SRWS.RWST r w s m) where
type LowerMonad (SRWS.RWST r w s m) = m
type InnerValue (SRWS.RWST r w s m) a = (a,s,w)
type WithLower_ (SRWS.RWST r w s m) = AddIG
wrap _ f = SRWS.RWST $ \ r s -> f (\m -> SRWS.runRWST m r s) (AddIG (,s,mempty) map1 (const (. get1)))
instance (MonadTower m) => MonadLevel_ (LSt.StateT s m) where
type LowerMonad (LSt.StateT s m) = m
type InnerValue (LSt.StateT s m) a = (a,s)
type WithLower_ (LSt.StateT s m) = AddIG
wrap _ f = LSt.StateT $ \ s -> f (`LSt.runStateT` s) (AddIG (,s) first (const (. fst)))
instance (MonadTower m) => MonadLevel_ (SSt.StateT s m) where
type LowerMonad (SSt.StateT s m) = m
type InnerValue (SSt.StateT s m) a = (a,s)
type WithLower_ (SSt.StateT s m) = AddIG
wrap _ f = SSt.StateT $ \ s -> f (`SSt.runStateT` s) (AddIG (,s) first (const (. fst)))
instance (Monoid w, MonadTower m) => MonadLevel_ (LW.WriterT w m) where
type LowerMonad (LW.WriterT w m) = m
type InnerValue (LW.WriterT w m) a = (a,w)
type WithLower_ (LW.WriterT w m) = AddIG
wrap _ f = LW.WriterT $ f LW.runWriterT (AddIG (,mempty) first (const (. fst)))
instance (Monoid w, MonadTower m) => MonadLevel_ (SW.WriterT w m) where
type LowerMonad (SW.WriterT w m) = m
type InnerValue (SW.WriterT w m) a = (a,w)
type WithLower_ (SW.WriterT w m) = AddIG
wrap _ f = SW.WriterT $ f SW.runWriterT (AddIG (,mempty) first (const (. fst)))
-- -----------------------------------------------------------------------------
class (MonadTower_ m, m ~ BaseMonad m, BaseMonad m ~ m) => IsBaseMonad m
instance (MonadTower_ m, m ~ BaseMonad m, BaseMonad m ~ m) => IsBaseMonad m
type family SameMonad (m :: * -> *) (n :: * -> *) where
SameMonad m m = True
SameMonad m n = False
-- -----------------------------------------------------------------------------
-- | When considering whether a particular monad within a 'MonadTower'
-- stack satisfies a constraint, we need to be able to determine
-- this at the type level.
--
-- This is achieved with the 'ConstraintSatisfied' associated type:
-- it should be equated to a closed type family with the result
-- being 'True' for all monads for which the constraint is satisfied
-- and 'False' for all others.
--
-- (This is defined as a type class rather than just a type family
-- so that we can explicitly state that this needs to be defined.)
class ValidConstraint (c :: (* -> *) -> Constraint) where
type ConstraintSatisfied c (m :: * -> *) :: Bool
instance ValidConstraint IsBaseMonad where
type ConstraintSatisfied IsBaseMonad m = SameMonad (BaseMonad m) m
-- -----------------------------------------------------------------------------
-- | @IsSubTowerOf s m@ denotes that @s@ is a part of the @m@
-- @MonadTower@.
class (MonadTower_ s, MonadTower_ m, BaseMonad s ~ BaseMonad m) => IsSubTowerOf s m
instance (MonadTower m) => IsSubTowerOf m m
instance (MonadTower s, MonadLevel_ m, IsSubTowerOf s (LowerMonad m)) => IsSubTowerOf s m
instance (MonadTower s) => ValidConstraint (IsSubTowerOf s) where
type ConstraintSatisfied (IsSubTowerOf s) m = SameMonad s m
| ivan-m/monad-levels | Control/Monad/Levels/Definitions.hs | mit | 18,679 | 0 | 14 | 4,534 | 5,617 | 3,037 | 2,580 | -1 | -1 |
{-|
Description : Parse string into parse tree
Parsec applicative style.
-}
module Uroboro.Parser
(
-- * Parsing Uroboro
parseFile
, parseExpression
, parse
-- * Individual parsers
, parseDef
, parseExp
, Parser
, pq
) where
import Control.Applicative ((<*>), (*>))
import Control.Monad (liftM)
import Text.Parsec hiding (parse)
import Text.Parsec.Error (errorMessages, showErrorMessages)
import Uroboro.Error
import Uroboro.Token
import Uroboro.Tree
(
PExp(..)
, PP(..)
, PQ(..)
, PT(..)
, PTCon(..)
, PTDes(..)
, PTRule(..)
, Type(..)
)
-- | Parse whole file.
parseFile :: FilePath -> String -> Either Error [PT]
parseFile = parse parseDef
-- | Parse expression.
parseExpression :: FilePath -> String -> Either Error PExp
parseExpression = parse parseExp
-- |Parse "(p, ...)".
args :: Parser a -> Parser [a]
args p = parens (commaSep p)
-- |Recursively apply a list of functions to a start value, from left to right.
fold :: a -> [a -> a] -> a
fold x [] = x
fold x (f:fs) = f (fold x fs)
-- |Variant of liftM that also stores the current location
liftLoc :: (Location -> a -> b) -> Parser a -> Parser b
liftLoc make parser = do
loc <- getLocation
arg <- parser
return (make loc arg)
-- |Parse "a.name(b, ...)...".
dotNotation :: (Location -> String -> [b] -> a -> a) -> Parser a -> Parser b -> Parser a
dotNotation make a b = liftM fold_ a <*> (dot *> sepBy1 name dot)
where name = liftLoc make identifier <*> args b
fold_ x l = fold x (reverse l) -- TODO make fold into foldr.
-- |Parse expression.
pexp :: Parser PExp
pexp = choice [des, app, var] <?> "expression"
where
des = try $ dotNotation PDes (app <|> var <?> "function or variable") pexp
app = try $ liftLoc PApp identifier <*> args pexp
var = liftLoc PVar identifier
-- |Parse exactly one expression.
parseExp :: Parser PExp
parseExp = exactly pexp
-- |Parse pattern.
pp :: Parser PP
pp = choice [con, var] <?> "pattern"
where
con = try $ liftLoc PPCon identifier <*> args pp
var = liftLoc PPVar identifier
-- |Parse copattern.
pq :: Parser PQ
pq = choice [des, app] <?> "copattern"
where
des = try $ dotNotation PQDes (app <?> "function") pp
app = liftLoc PQApp identifier <*> args pp
-- |Parse whole file.
parseDef :: Parser [PT]
parseDef = exactly $ many (choice [pos, neg, fun])
where
pos = definition "data" PTPos <*> where1 con
neg = definition "codata" PTNeg <*> where1 des
fun = liftLoc PTFun (reserved "function" *> identifier) <*>
args typ <*> (colon *> typ) <*> where1 rul
con = liftLoc (flip3 PTCon) identifier <*> args typ <*> (colon *> typ)
des = liftLoc (flip4 PTDes) typ <*>
(dot *> identifier) <*> args typ <*> (colon *> typ)
rul = liftLoc PTRule pq <*> (symbol "=" *> pexp)
typ :: Parser Type
typ = liftM Type identifier
flip3 f loc a b c = f loc c a b
flip4 f loc a b c d = f loc d b c a
definition :: String -> (Location -> Type -> a) -> Parser a
definition kind make = liftLoc make (reserved kind *> typ)
where1 :: Parser a -> Parser [a]
where1 a = reserved "where" *> many a
-- | Convert location to custom location type
convertLocation :: SourcePos -> Location
convertLocation pos = MakeLocation name line column where
name = sourceName pos
line = sourceLine pos
column = sourceColumn pos
-- | Convert error to custom error type
convertError :: ParseError -> Error
convertError err = MakeError location messages where
pos = errorPos err
location = convertLocation pos
messages = showErrorMessages
"or" "unknown parse error" "expecting"
"unexpected" "end of input"
(errorMessages err)
| lordxist/uroboro | src/Uroboro/Parser.hs | mit | 3,798 | 0 | 13 | 952 | 1,238 | 653 | 585 | 89 | 1 |
-- N-Point Crossover
-- http://www.codewars.com/kata/57339a5226196a7f90001bcf
module Kata.NPointCrossover where
import Data.List (nub, sort)
import Data.Tuple (swap)
import Control.Arrow ((***))
crossover :: [Int] -> [a] -> [a] -> ([a],[a])
crossover ns xs = unzip . f (nub . sort $ ns) 0 . zip xs
where f [] i ps = map (ff !! (i `mod` 2)) ps
f (n:ns) i ps = uncurry (++) . (map (ff !! (i `mod` 2)) *** f (map ( + (-n)) ns) (succ i)) . splitAt n $ ps
ff = [id, swap]
| gafiatulin/codewars | src/6 kyu/NPointCrossover.hs | mit | 494 | 0 | 17 | 115 | 259 | 146 | 113 | 9 | 2 |
module Thirty where
import Control.Exception
data NotDivThree =
NotDivThree Int
deriving (Eq, Show)
instance Exception NotDivThree
data NotEven =
NotEven Int
deriving (Eq, Show)
instance Exception NotEven
evenAndThreeDiv :: Int -> IO Int
evenAndThreeDiv i
| rem i 3 /= 0 = throwIO (NotDivThree i)
| odd i = throwIO (NotEven i)
| otherwise = return i
catchNotDivThree :: IO Int
-> (NotDivThree -> IO Int)
-> IO Int
catchNotDivThree = catch
catchNotEven :: IO Int
-> (NotEven -> IO Int)
-> IO Int
catchNotEven = catch
| mudphone/HaskellBook | src/Thirty.hs | mit | 598 | 0 | 9 | 169 | 208 | 102 | 106 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Moz.Linkscape.URLMetrics
( URLMetrics(..)
, URLMetricCol(..)
, sumUrlMetricCols
) where
import Data.Aeson (FromJSON(..), Value(..), (.:?))
import Control.Monad (mzero)
data URLMetricCol = Title
| CanoncialURL
| Subdomain
| RootDomain
| ExternalEquityLinks
| SubdomainExternalLinks
| RootDomainExternalLinks
| EquityLinks
| SubdomainsLinking
| RootDomainsLinking
| Links
| SubdomainSubdomainsLinking
| RootDomainRootDomainsLinking
| MozRankURL
| MozRankSubdomain
| MozRankRootDomain
| MozTrust
| MozTrustSubdomain
| MozTrustRootDomain
| MozRankExternalEquity
| MozRankSubdomainExternalEquity
| MozRankRootDomainExternalEquity
| MozRankSubdomainCombined
| MozRankRootDomainCombined
| SubdomainSpamScore
| Social
| HTTPStatusCode
| LinksToSubdomain
| LinksToRootDomain
| RootDomainsLinkingToSubdomain
| PageAuthority
| DomainAuthority
| ExternalLinks
| ExternalLinksToSubdomain
| ExternalLinksToRootDomain
| LinkingCBlocks
| TimeLastCrawled
deriving (Enum, Eq, Show)
-- | Convert a list of URL metrics into a bit mask representing their sum.
sumUrlMetricCols :: [URLMetricCol] -> Int
sumUrlMetricCols = foldr (+) 0 . map toBitFlag
-- This looks like it should be able to be simplified by bitshifting using the
-- position of the value in the enum, but the bit flags have gaps which would
-- comprimise the data type, and the first one ('Title') doesn't fit the
-- pattern.
toBitFlag :: URLMetricCol -> Int
toBitFlag Title = 1
toBitFlag CanoncialURL = 4
toBitFlag Subdomain = 8
toBitFlag RootDomain = 16
toBitFlag ExternalEquityLinks = 32
toBitFlag SubdomainExternalLinks = 64
toBitFlag RootDomainExternalLinks = 128
toBitFlag EquityLinks = 256
toBitFlag SubdomainsLinking = 512
toBitFlag RootDomainsLinking = 1024
toBitFlag Links = 2048
toBitFlag SubdomainSubdomainsLinking = 4096
toBitFlag RootDomainRootDomainsLinking = 8192
toBitFlag MozRankURL = 16384
toBitFlag MozRankSubdomain = 32768
toBitFlag MozRankRootDomain = 65536
toBitFlag MozTrust = 131072
toBitFlag MozTrustSubdomain = 262144
toBitFlag MozTrustRootDomain = 524288
toBitFlag MozRankExternalEquity = 1048576
toBitFlag MozRankSubdomainExternalEquity = 2097152
toBitFlag MozRankRootDomainExternalEquity = 4194304
toBitFlag MozRankSubdomainCombined = 8388608
toBitFlag MozRankRootDomainCombined = 16777216
toBitFlag SubdomainSpamScore = 67108864
toBitFlag Social = 134217728
toBitFlag HTTPStatusCode = 536870912
toBitFlag LinksToSubdomain = 4294967296
toBitFlag LinksToRootDomain = 8589934592
toBitFlag RootDomainsLinkingToSubdomain = 17179869184
toBitFlag PageAuthority = 34359738368
toBitFlag DomainAuthority = 68719476736
toBitFlag ExternalLinks = 549755813888
toBitFlag ExternalLinksToSubdomain = 140737488355328
toBitFlag ExternalLinksToRootDomain = 2251799813685248
toBitFlag LinkingCBlocks = 36028797018963968
toBitFlag TimeLastCrawled = 144115188075855872
data URLMetrics = URLMetrics { title :: Maybe String -- ut
, canonicalURL :: Maybe String -- uu
, subdomain :: Maybe String -- ufq
, rootDomain :: Maybe String -- upl
, externalEquityLinks :: Maybe Int -- ueid
, subdomainExternalLinks :: Maybe Int -- feid
, rootDomainExternalLinks :: Maybe Int -- peid
, equityLinks :: Maybe Int -- ujid
, subdomainsLinking :: Maybe Int -- uifq
, rootDomainsLinking :: Maybe Int -- uipl
, links :: Maybe Int -- uid
, subdomainSubdomainsLinking :: Maybe Int -- fid
, rootDomainRootDomainsLinking :: Maybe Int -- pid
, mozRankURLNormalized :: Maybe Double -- umrp
, mozRankURLRaw :: Maybe Double -- umrr
, mozRankSubdomainNormalized :: Maybe Double -- fmrp
, mozRankSubdomainRaw :: Maybe Double -- fmrr
, mozRankRootDomainNormalized :: Maybe Double -- pmrp
, mozRankRootDomainRaw :: Maybe Double -- pmrr
, mozTrustNormalized :: Maybe Double -- utrp
, mozTrustRaw :: Maybe Double -- utrr
, mozTrustSubdomainNormalized :: Maybe Double -- ftrp
, mozTrustRootDomainNormalized :: Maybe Double -- ptrp
, mozTrustRootDomainRaw :: Maybe Double -- ptrr
, mozRankExternalEquityNormalized :: Maybe Double -- uemrp
, mozRankExternalEquityRaw :: Maybe Double -- uemrr
, mozRankSubdomainExternalEquityNormalized :: Maybe Double -- fejp
, mozRankSubdomainExternalEquityRaw :: Maybe Double -- fejr
, mozRankRootDomainExternalEquityNormalized :: Maybe Double -- pejp
, mozRankRootDomainExternalEquityRaw :: Maybe Double -- pejr
, mozRankSubdomainCombinedNormalized :: Maybe Double -- pjp
, mozRankSubdomainCombinedRaw :: Maybe Double -- pjr
, mozRankRootDomainCombinedNormalized :: Maybe Double -- fjp
, mozRankRootDomainCombinedRaw :: Maybe Double -- fjr
, subdomainSpamScoreSubdomain :: Maybe Int -- fspsc
, subdomainSpamScoreFlags :: Maybe Int -- fspf
, subdomainSpamScoreLanguage :: Maybe String -- flan
, subdomainSpamScoreCrawlStatusCode :: Maybe Int -- fsps
, subdomainSpamScoreLastCrawled :: Maybe Int -- fsplc
, subdomainSpamScorePagesCrawled :: Maybe [String] -- fspp
, socialFacebookAccount :: Maybe String -- ffb
, socialTwitterHandle :: Maybe String -- ftw
, socialGooglePlusAccount :: Maybe String -- fg+
, socialEmailAddress :: Maybe String -- fem
, httpStatusCode :: Maybe String -- us
, linksToSubdomain :: Maybe Int -- fuid
, linksToRootDomain :: Maybe Int -- puid
, rootDomainsLinkingToSubdomain :: Maybe Int -- fipl
, pageAuthority :: Maybe Double -- upa
, domainAuthority :: Maybe Double -- pda
, externalLinks :: Maybe Int -- ued
, externalLinksToSubdomain :: Maybe Int -- fed
, externalLinksToRootDoamin :: Maybe Int -- ped
, linkingCBlocks :: Maybe Int -- pib
, timeLastCrawed :: Maybe Int -- ulc
} deriving (Show)
instance FromJSON URLMetrics where
parseJSON (Object v) =
URLMetrics <$> v .:? "ut"
<*> v .:? "uu"
<*> v .:? "ufq"
<*> v .:? "upl"
<*> v .:? "ueid"
<*> v .:? "feid"
<*> v .:? "peid"
<*> v .:? "ujid"
<*> v .:? "uifq"
<*> v .:? "uipl"
<*> v .:? "uid"
<*> v .:? "fid"
<*> v .:? "pid"
<*> v .:? "umrp"
<*> v .:? "umrr"
<*> v .:? "fmrp"
<*> v .:? "fmrr"
<*> v .:? "pmrp"
<*> v .:? "pmrr"
<*> v .:? "utrp"
<*> v .:? "utrr"
<*> v .:? "ftrp"
<*> v .:? "ptrp"
<*> v .:? "ptrr"
<*> v .:? "uemrp"
<*> v .:? "uemrr"
<*> v .:? "fejp"
<*> v .:? "fejr"
<*> v .:? "pejp"
<*> v .:? "pejr"
<*> v .:? "pjp"
<*> v .:? "pjr"
<*> v .:? "fjp"
<*> v .:? "fjr"
<*> v .:? "fspsc"
<*> v .:? "fspf"
<*> v .:? "flan"
<*> v .:? "fsps"
<*> v .:? "fsplc"
<*> v .:? "fspp"
<*> v .:? "ffb"
<*> v .:? "ftw"
<*> v .:? "fg+"
<*> v .:? "fem"
<*> v .:? "us"
<*> v .:? "fuid"
<*> v .:? "puid"
<*> v .:? "fipl"
<*> v .:? "upa"
<*> v .:? "pda"
<*> v .:? "ued"
<*> v .:? "fed"
<*> v .:? "ped"
<*> v .:? "pib"
<*> v .:? "ulc"
parseJSON _ = mzero
| ags/hs-moz | src/Moz/Linkscape/URLMetrics.hs | mit | 10,120 | 0 | 115 | 4,554 | 1,573 | 878 | 695 | 199 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.