code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -Wall #-}
module File.Watcher where
import Control.Concurrent (forkIO, threadDelay)
import Control.Concurrent.Chan (newChan, readChan, writeChan)
import qualified System.FSNotify as Notify
import qualified Elm.Project as Project
import Elm.Project (Project)
-- GRAPH
data Graph =
Graph
{ _elm :: Map.Map Module.Raw Node
, _js :: Map.Map Module.Raw FilePath
}
data Node =
Node
{ _path :: FilePath
, _time :: UTCTime
, _needs :: Set.Set Module.Raw
, _blocks :: Set.Set Module.Raw
, _iface :: Maybe (UTCTime, Interface)
}
-- ABC
action :: Notify.Event -> IO ()
action event =
case event of
Notify.Added path time ->
return ()
Notify.Modified path time ->
return ()
Notify.Removed path time ->
return ()
watcher :: Project -> IO ()
watcher project =
let
srcDir =
Project.toSourceDir project
action event =
case event of
Notify.Added _ _ ->
return ()
Notify.Modified path time ->
Notify.Removed path time ->
in
do killer <- newChan
mapM_ (watcherHelp killer action) [srcDir]
watcherHelp :: Notify.Action -> FilePath -> IO ()
watcherHelp killer action dir =
void $ forkIO $ Notify.withManager $ \manager ->
do stop <- Notify.watchTree manager dir (const True) action
_ <- readChan killer
stop
| evancz/builder | src/File/Watcher.hs | bsd-3-clause | 1,394 | 7 | 15 | 379 | 450 | 235 | 215 | -1 | -1 |
{-# LANGUAGE LambdaCase, TupleSections #-}
module Transformations.Optimising.EvaluatedCaseElimination where
import Data.Functor.Foldable as Foldable
import Grin.Grin
evaluatedCaseElimination :: Exp -> Exp
evaluatedCaseElimination = ana builder where
builder :: Exp -> ExpF Exp
builder = \case
ECase val alts | all (altBodyEQ $ SReturn val) alts -> SReturnF val
exp -> project exp
altBodyEQ :: Exp -> Alt -> Bool
altBodyEQ exp (Alt _cpat body) = exp == body
| andorp/grin | grin/src/Transformations/Optimising/EvaluatedCaseElimination.hs | bsd-3-clause | 476 | 0 | 16 | 85 | 138 | 72 | 66 | 12 | 2 |
-- | Uhit test that seeds a 3D grid with a few points, computes the
-- Euclidean distance transform of that grid, then checks a few points
-- to see if the distance transformed grid agrees with an exhaustive
-- nearest-neighbor search.
module Main (main) where
import qualified Data.Vector.Unboxed as V
import qualified Data.Vector.Unboxed.Mutable as VM
import Test.Framework (defaultMain)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (assert)
import DistanceTransform.Euclidean
testRes :: Int
testRes = 64
-- A 3D point.
data Point = Point !Int !Int !Int deriving Show
pointToI :: Point -> Int
pointToI (Point x y z) = z * testRes * testRes + y * testRes + x
distance :: Point -> Point -> Float
distance (Point x1 y1 z1) (Point x2 y2 z2) = sqrt . fromIntegral $
dx*dx + dy*dy + dz*dz
where dx = x2 - x1
dy = y2 - y1
dz = z2 - z1
mkGrid :: [Point] -> V.Vector Int
mkGrid pts = V.create $ do v <- VM.replicate (testRes^(3::Int)) 1
mapM_ (flip (VM.write v) 0 . pointToI) pts
return v
main :: IO ()
main = defaultMain $ map (testPoint g1) probes ++ map (testPoint g2) probes
where probes = [ Point 48 32 32
, Point 32 54 35
, Point 0 62 54
, Point 35 35 35 ]
pts = Point mid mid mid :
[Point x y z | x <- [0,hi], y <- [0,hi], z <- [0,hi]]
mid = testRes `quot` 2
hi = testRes - 1
rawGrid = mkGrid pts
g1 = edt (replicate 3 testRes) rawGrid
g2 = edtPar (replicate 3 testRes) $ rawGrid
testPoint g probe = let x = minimum $ map (distance probe) pts
y = g V.! pointToI probe
in testCase ("Probing "++show probe)
(assert (abs (x - y) < 0.0001))
| acowley/DistanceTransform | src/tests/Main.hs | bsd-3-clause | 1,907 | 0 | 16 | 647 | 652 | 346 | 306 | 45 | 1 |
{-# LANGUAGE TypeFamilies, Rank2Types #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Mem.StableName.Dynamic
-- Copyright : (c) Edward Kmett 2010
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC only
--
-- Dynamic stable names are a way of performing fast (O(1)), not-quite-exact comparison between objects.
--
-- Dynamic stable names solve the following problem: suppose you want to build a hash table with Haskell objects as keys, but you want to use pointer equality for comparison; maybe because the keys are large and hashing would be slow, or perhaps because the keys are infinite in size. We can't build a hash table using the address of the object as the key, because objects get moved around by the garbage collector, meaning a re-hash would be necessary after every garbage collection.
-----------------------------------------------------------------------------
module System.Mem.StableName.Dynamic
( DynamicStableName(..)
, hashDynamicStableName
, makeDynamicStableName
, wrapStableName
) where
import GHC.Prim
import System.Mem.StableName (StableName, makeStableName, hashStableName)
import Unsafe.Coerce (unsafeCoerce)
{-|
An abstract name for an object, that supports equality and hashing.
Dynamic stable names have the following property:
* If @sn1 :: DynamicStableName@ and @sn2 :: DynamicStableName@ and @sn1 == sn2@
then @sn1@ and @sn2@ were created by calls to @makeStableName@ on
the same object.
The reverse is not necessarily true: if two dynamic stable names are not
equal, then the objects they name may still be equal. Note in particular
that `makeDynamicStableName` may return a different `DynamicStableName`
after an object is evaluated.
Dynamic Stable Names are similar to Stable Pointers ("Foreign.StablePtr"),
but differ in the following ways:
* There is no @freeDynamicStableName@ operation, unlike "Foreign.StablePtr"s.
Dynamic Stable Names are reclaimed by the runtime system when they are no
longer needed.
* There is no @deRefDynamicStableName@ operation. You can\'t get back from
a dynamic stable name to the original Haskell object. The reason for
this is that the existence of a stable name for an object does not
guarantee the existence of the object itself; it can still be garbage
collected.
-}
newtype DynamicStableName = DynamicStableName (StableName Any)
-- | Makes a 'DynamicStableName' for an arbitrary object. The object passed as
-- the first argument is not evaluated by 'makeDynamicStableName'.
makeDynamicStableName :: t -> IO DynamicStableName
makeDynamicStableName a = do
s <- makeStableName a
return (wrapStableName s)
-- | Convert a 'DynamicStableName' to an 'Int'. The 'Int' returned is not
-- necessarily unique; several 'DynamicStableName's may map to the same 'Int'
-- (in practice however, the chances of this are small, so the result
-- of 'hashDynamicStableName' makes a good hash key).
hashDynamicStableName :: DynamicStableName -> Int
hashDynamicStableName (DynamicStableName sn) = hashStableName sn
instance Eq DynamicStableName where
DynamicStableName sn1 == DynamicStableName sn2 = sn1 == sn2
wrapStableName :: StableName a -> DynamicStableName
wrapStableName s = DynamicStableName (unsafeCoerce s)
| FranklinChen/stable-maps | System/Mem/StableName/Dynamic.hs | bsd-3-clause | 3,405 | 0 | 9 | 587 | 219 | 126 | 93 | 20 | 1 |
module Database.Persist.SqlBackend.Internal.IsolationLevel where
import Data.String (IsString(..))
-- | Please refer to the documentation for the database in question for a full
-- overview of the semantics of the varying isloation levels
data IsolationLevel = ReadUncommitted
| ReadCommitted
| RepeatableRead
| Serializable
deriving (Show, Eq, Enum, Ord, Bounded)
makeIsolationLevelStatement :: (Monoid s, IsString s) => IsolationLevel -> s
makeIsolationLevelStatement l = "SET TRANSACTION ISOLATION LEVEL " <> case l of
ReadUncommitted -> "READ UNCOMMITTED"
ReadCommitted -> "READ COMMITTED"
RepeatableRead -> "REPEATABLE READ"
Serializable -> "SERIALIZABLE"
| paul-rouse/persistent | persistent/Database/Persist/SqlBackend/Internal/IsolationLevel.hs | mit | 760 | 0 | 8 | 183 | 128 | 73 | 55 | 13 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./CommonLogic/Morphism.hs
Description : Morphism of Common Logic
Copyright : (c) Uni Bremen DFKI 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (via Logic.Logic)
Morphism of Common Logic
-}
module CommonLogic.Morphism
( Morphism (..)
, pretty -- pretty printing
, idMor -- identity morphism
, isLegalMorphism -- check if morhpism is ok
, composeMor -- composition
, inclusionMap -- inclusion map
, mkMorphism -- create Morphism
, mapSentence -- map of sentences
, applyMap -- application function for maps
, applyMorphism -- application function for morphism
, morphismUnion
) where
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Common.Result as Result
import Common.Id as Id
import Common.Result
import Common.Doc
import Common.DocUtils
import CommonLogic.AS_CommonLogic as AS
import CommonLogic.Sign as Sign
import Control.Monad (unless)
import Data.Data
-- maps of sets
data Morphism = Morphism
{ source :: Sign
, target :: Sign
, propMap :: Map.Map Id Id
} deriving (Eq, Ord, Show, Typeable)
instance Pretty Morphism where
pretty = printMorphism
-- | Constructs an id-morphism
idMor :: Sign -> Morphism
idMor a = inclusionMap a a
-- | Determines whether a morphism is valid
isLegalMorphism :: Morphism -> Result ()
isLegalMorphism pmor =
let psource = allItems $ source pmor
ptarget = allItems $ target pmor
pdom = Map.keysSet $ propMap pmor
pcodom = Set.map (applyMorphism pmor) psource
in unless (Set.isSubsetOf pcodom ptarget && Set.isSubsetOf pdom psource) $
fail "illegal CommonLogic morphism"
-- | Application funtion for morphisms
applyMorphism :: Morphism -> Id -> Id
applyMorphism mor idt = Map.findWithDefault idt idt $ propMap mor
-- | Application function for propMaps
applyMap :: Map.Map Id Id -> Id -> Id
applyMap pmap idt = Map.findWithDefault idt idt pmap
-- | Composition of morphisms in propositional Logic
composeMor :: Morphism -> Morphism -> Result Morphism
composeMor f g =
let fSource = source f
gTarget = target g
fMap = propMap f
gMap = propMap g
in return Morphism
{ source = fSource
, target = gTarget
, propMap = if Map.null gMap then fMap else
Set.fold ( \ i -> let j = applyMap gMap (applyMap fMap i) in
if i == j then id else Map.insert i j)
Map.empty $ allItems fSource }
-- | Pretty printing for Morphisms
printMorphism :: Morphism -> Doc
printMorphism m = pretty (source m) <> text "-->" <> pretty (target m)
<> vcat (map ( \ (x, y) -> lparen <> pretty x <> text ","
<> pretty y <> rparen) $ Map.assocs $ propMap m)
-- | Inclusion map of a subsig into a supersig
inclusionMap :: Sign.Sign -> Sign.Sign -> Morphism
inclusionMap s1 s2 = Morphism
{ source = s1
, target = s2
, propMap = Map.empty }
-- | creates a Morphism
mkMorphism :: Sign.Sign -> Sign.Sign -> Map.Map Id Id -> Morphism
mkMorphism s t p =
Morphism { source = s
, target = t
, propMap = p }
{- | sentence (text) translation along signature morphism
here just the renaming of formulae -}
mapSentence :: Morphism -> AS.TEXT_META -> Result.Result AS.TEXT_META
mapSentence mor tm =
return $ tm { getText = mapSen_txt mor $ getText tm }
-- propagates the translation to sentences
mapSen_txt :: Morphism -> AS.TEXT -> AS.TEXT
mapSen_txt mor txt = case txt of
AS.Text phrs r -> AS.Text (map (mapSen_phr mor) phrs) r
AS.Named_text n t r -> AS.Named_text n (mapSen_txt mor t) r
-- propagates the translation to sentences
mapSen_phr :: Morphism -> AS.PHRASE -> AS.PHRASE
mapSen_phr mor phr = case phr of
AS.Module m -> AS.Module $ mapSen_mod mor m
AS.Sentence s -> AS.Sentence $ mapSen_sen mor s
AS.Comment_text c t r -> AS.Comment_text c (mapSen_txt mor t) r
x -> x
-- propagates the translation to sentences
mapSen_mod :: Morphism -> AS.MODULE -> AS.MODULE
mapSen_mod mor m = case m of
AS.Mod n t rn -> AS.Mod n (mapSen_txt mor t) rn
AS.Mod_ex n exs t rn -> AS.Mod_ex n exs (mapSen_txt mor t) rn
mapSen_sen :: Morphism -> AS.SENTENCE -> AS.SENTENCE
mapSen_sen mor frm = case frm of
AS.Quant_sent q vs is rn ->
AS.Quant_sent q (map (mapSen_nos mor) vs) (mapSen_sen mor is) rn
AS.Bool_sent bs rn -> AS.Bool_sent (case bs of
AS.Junction j sens -> AS.Junction j (map (mapSen_sen mor) sens)
AS.Negation sen -> AS.Negation (mapSen_sen mor sen)
AS.BinOp op s1 s2 ->
AS.BinOp op (mapSen_sen mor s1) (mapSen_sen mor s2)
) rn
AS.Atom_sent atom rn -> AS.Atom_sent (case atom of
AS.Equation t1 t2 -> AS.Equation (mapSen_trm mor t1) (mapSen_trm mor t2)
AS.Atom t tss -> AS.Atom (mapSen_trm mor t) (map (mapSen_trmSeq mor) tss)
) rn
AS.Comment_sent cm sen rn -> AS.Comment_sent cm (mapSen_sen mor sen) rn
AS.Irregular_sent sen rn -> AS.Irregular_sent (mapSen_sen mor sen) rn
mapSen_trm :: Morphism -> AS.TERM -> AS.TERM
mapSen_trm mor trm = case trm of
AS.Name_term n -> AS.Name_term (mapSen_tok mor n)
AS.Funct_term t ts rn ->
AS.Funct_term (mapSen_trm mor t) (map (mapSen_trmSeq mor) ts) rn
AS.Comment_term t c rn -> AS.Comment_term (mapSen_trm mor t) c rn
AS.That_term s rn -> AS.That_term (mapSen_sen mor s) rn
mapSen_nos :: Morphism -> AS.NAME_OR_SEQMARK -> AS.NAME_OR_SEQMARK
mapSen_nos mor nos = case nos of
AS.Name n -> AS.Name (mapSen_tok mor n)
AS.SeqMark s -> AS.SeqMark (mapSen_tok mor s)
mapSen_trmSeq :: Morphism -> AS.TERM_SEQ -> AS.TERM_SEQ
mapSen_trmSeq mor ts = case ts of
AS.Term_seq t -> AS.Term_seq (mapSen_trm mor t)
AS.Seq_marks s -> AS.Seq_marks (mapSen_tok mor s)
mapSen_tok :: Morphism -> Id.Token -> Id.Token
mapSen_tok mor tok = Id.idToSimpleId $ applyMorphism mor $ Id.simpleIdToId tok
-- | Union of two morphisms.
morphismUnion :: Morphism -> Morphism -> Result.Result Morphism
morphismUnion mor1 mor2 =
let pmap1 = propMap mor1
pmap2 = propMap mor2
p1 = source mor1
p2 = source mor2
up1 = Set.difference (allItems p1) $ Map.keysSet pmap1
up2 = Set.difference (allItems p2) $ Map.keysSet pmap2
(pds, pmap) = foldr ( \ (i, j) (ds, m) -> case Map.lookup i m of
Nothing -> (ds, Map.insert i j m)
Just k -> if j == k then (ds, m) else
(Diag Error
("incompatible mapping of prop " ++ showId i " to "
++ showId j " and " ++ showId k "")
nullRange : ds, m))
([], pmap1)
(Map.toList pmap2 ++ map (\ a -> (a, a))
(Set.toList $ Set.union up1 up2))
in if null pds then return Morphism
{ source = unite p1 p2
, target = unite (target mor1) $ target mor2
, propMap = pmap } else Result pds Nothing
| spechub/Hets | CommonLogic/Morphism.hs | gpl-2.0 | 7,064 | 0 | 23 | 1,770 | 2,287 | 1,169 | 1,118 | 144 | 8 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CodeDeploy.ListDeploymentInstances
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Lists the instances for a deployment associated with the applicable IAM user
-- or AWS account.
--
-- <http://docs.aws.amazon.com/codedeploy/latest/APIReference/API_ListDeploymentInstances.html>
module Network.AWS.CodeDeploy.ListDeploymentInstances
(
-- * Request
ListDeploymentInstances
-- ** Request constructor
, listDeploymentInstances
-- ** Request lenses
, ldiDeploymentId
, ldiInstanceStatusFilter
, ldiNextToken
-- * Response
, ListDeploymentInstancesResponse
-- ** Response constructor
, listDeploymentInstancesResponse
-- ** Response lenses
, ldirInstancesList
, ldirNextToken
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CodeDeploy.Types
import qualified GHC.Exts
data ListDeploymentInstances = ListDeploymentInstances
{ _ldiDeploymentId :: Text
, _ldiInstanceStatusFilter :: List "instanceStatusFilter" InstanceStatus
, _ldiNextToken :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'ListDeploymentInstances' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ldiDeploymentId' @::@ 'Text'
--
-- * 'ldiInstanceStatusFilter' @::@ ['InstanceStatus']
--
-- * 'ldiNextToken' @::@ 'Maybe' 'Text'
--
listDeploymentInstances :: Text -- ^ 'ldiDeploymentId'
-> ListDeploymentInstances
listDeploymentInstances p1 = ListDeploymentInstances
{ _ldiDeploymentId = p1
, _ldiNextToken = Nothing
, _ldiInstanceStatusFilter = mempty
}
-- | The unique ID of a deployment.
ldiDeploymentId :: Lens' ListDeploymentInstances Text
ldiDeploymentId = lens _ldiDeploymentId (\s a -> s { _ldiDeploymentId = a })
-- | A subset of instances to list, by status:
--
-- Pending: Include in the resulting list those instances with pending
-- deployments. InProgress: Include in the resulting list those instances with
-- in-progress deployments. Succeeded: Include in the resulting list those
-- instances with succeeded deployments. Failed: Include in the resulting list
-- those instances with failed deployments. Skipped: Include in the resulting
-- list those instances with skipped deployments. Unknown: Include in the
-- resulting list those instances with deployments in an unknown state.
ldiInstanceStatusFilter :: Lens' ListDeploymentInstances [InstanceStatus]
ldiInstanceStatusFilter =
lens _ldiInstanceStatusFilter (\s a -> s { _ldiInstanceStatusFilter = a })
. _List
-- | An identifier that was returned from the previous list deployment instances
-- call, which can be used to return the next set of deployment instances in the
-- list.
ldiNextToken :: Lens' ListDeploymentInstances (Maybe Text)
ldiNextToken = lens _ldiNextToken (\s a -> s { _ldiNextToken = a })
data ListDeploymentInstancesResponse = ListDeploymentInstancesResponse
{ _ldirInstancesList :: List "instancesList" Text
, _ldirNextToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ListDeploymentInstancesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ldirInstancesList' @::@ ['Text']
--
-- * 'ldirNextToken' @::@ 'Maybe' 'Text'
--
listDeploymentInstancesResponse :: ListDeploymentInstancesResponse
listDeploymentInstancesResponse = ListDeploymentInstancesResponse
{ _ldirInstancesList = mempty
, _ldirNextToken = Nothing
}
-- | A list of instances IDs.
ldirInstancesList :: Lens' ListDeploymentInstancesResponse [Text]
ldirInstancesList =
lens _ldirInstancesList (\s a -> s { _ldirInstancesList = a })
. _List
-- | If the amount of information that is returned is significantly large, an
-- identifier will also be returned, which can be used in a subsequent list
-- deployment instances call to return the next set of deployment instances in
-- the list.
ldirNextToken :: Lens' ListDeploymentInstancesResponse (Maybe Text)
ldirNextToken = lens _ldirNextToken (\s a -> s { _ldirNextToken = a })
instance ToPath ListDeploymentInstances where
toPath = const "/"
instance ToQuery ListDeploymentInstances where
toQuery = const mempty
instance ToHeaders ListDeploymentInstances
instance ToJSON ListDeploymentInstances where
toJSON ListDeploymentInstances{..} = object
[ "deploymentId" .= _ldiDeploymentId
, "nextToken" .= _ldiNextToken
, "instanceStatusFilter" .= _ldiInstanceStatusFilter
]
instance AWSRequest ListDeploymentInstances where
type Sv ListDeploymentInstances = CodeDeploy
type Rs ListDeploymentInstances = ListDeploymentInstancesResponse
request = post "ListDeploymentInstances"
response = jsonResponse
instance FromJSON ListDeploymentInstancesResponse where
parseJSON = withObject "ListDeploymentInstancesResponse" $ \o -> ListDeploymentInstancesResponse
<$> o .:? "instancesList" .!= mempty
<*> o .:? "nextToken"
| romanb/amazonka | amazonka-codedeploy/gen/Network/AWS/CodeDeploy/ListDeploymentInstances.hs | mpl-2.0 | 6,030 | 0 | 12 | 1,202 | 686 | 415 | 271 | 78 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudHSM.DeleteHsm
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deletes an HSM. Once complete, this operation cannot be undone and your key
-- material cannot be recovered.
--
-- <http://docs.aws.amazon.com/cloudhsm/latest/dg/API_DeleteHsm.html>
module Network.AWS.CloudHSM.DeleteHsm
(
-- * Request
DeleteHsm
-- ** Request constructor
, deleteHsm
-- ** Request lenses
, dhHsmArn
-- * Response
, DeleteHsmResponse
-- ** Response constructor
, deleteHsmResponse
-- ** Response lenses
, dhr1Status
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CloudHSM.Types
import qualified GHC.Exts
newtype DeleteHsm = DeleteHsm
{ _dhHsmArn :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeleteHsm' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dhHsmArn' @::@ 'Text'
--
deleteHsm :: Text -- ^ 'dhHsmArn'
-> DeleteHsm
deleteHsm p1 = DeleteHsm
{ _dhHsmArn = p1
}
-- | The ARN of the HSM to delete.
dhHsmArn :: Lens' DeleteHsm Text
dhHsmArn = lens _dhHsmArn (\s a -> s { _dhHsmArn = a })
newtype DeleteHsmResponse = DeleteHsmResponse
{ _dhr1Status :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeleteHsmResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dhr1Status' @::@ 'Text'
--
deleteHsmResponse :: Text -- ^ 'dhr1Status'
-> DeleteHsmResponse
deleteHsmResponse p1 = DeleteHsmResponse
{ _dhr1Status = p1
}
-- | The status of the action.
dhr1Status :: Lens' DeleteHsmResponse Text
dhr1Status = lens _dhr1Status (\s a -> s { _dhr1Status = a })
instance ToPath DeleteHsm where
toPath = const "/"
instance ToQuery DeleteHsm where
toQuery = const mempty
instance ToHeaders DeleteHsm
instance ToJSON DeleteHsm where
toJSON DeleteHsm{..} = object
[ "HsmArn" .= _dhHsmArn
]
instance AWSRequest DeleteHsm where
type Sv DeleteHsm = CloudHSM
type Rs DeleteHsm = DeleteHsmResponse
request = post "DeleteHsm"
response = jsonResponse
instance FromJSON DeleteHsmResponse where
parseJSON = withObject "DeleteHsmResponse" $ \o -> DeleteHsmResponse
<$> o .: "Status"
| kim/amazonka | amazonka-cloudhsm/gen/Network/AWS/CloudHSM/DeleteHsm.hs | mpl-2.0 | 3,285 | 0 | 9 | 777 | 461 | 280 | 181 | 57 | 1 |
module Main where
import HEP.Kinematics.TwoBody
import Control.Applicative (liftA2)
import Control.Monad (replicateM)
import System.Random.MWC
main :: IO ()
main = do
rs <- createSystemRandom >>= genRandoms 10
let twobodies = map (mkTwoBodyEvent 250 (0, 0, 125, 91)) rs
mapM_ print (twobodies :: [Maybe TwoBodyEvent])
where
genRandoms :: Int -> GenIO -> IO [(Double, Double)]
genRandoms nev gen =
replicateM nev (liftA2 (,) (uniform gen) (uniform gen))
| cbpark/hep-kinematics | examples/twobody.hs | bsd-3-clause | 498 | 0 | 13 | 110 | 186 | 100 | 86 | 13 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcMatches: Typecheck some @Matches@
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
module TcMatches ( tcMatchesFun, tcGRHS, tcGRHSsPat, tcMatchesCase, tcMatchLambda,
TcMatchCtxt(..), TcStmtChecker, TcExprStmtChecker, TcCmdStmtChecker,
tcStmts, tcStmtsAndThen, tcDoStmts, tcBody,
tcDoStmt, tcGuardStmt
) where
import {-# SOURCE #-} TcExpr( tcSyntaxOp, tcInferSigmaNC, tcInferSigma
, tcCheckId, tcMonoExpr, tcMonoExprNC, tcPolyExpr )
import BasicTypes ( LexicalFixity(..) )
import HsSyn
import TcRnMonad
import TcEnv
import TcPat
import TcMType
import TcType
import TcBinds
import TcUnify
import Name
import TysWiredIn
import Id
import TyCon
import TysPrim
import TcEvidence
import Outputable
import Util
import SrcLoc
import DynFlags
import PrelNames (monadFailClassName)
import qualified GHC.LanguageExtensions as LangExt
-- Create chunkified tuple tybes for monad comprehensions
import MkCore
import Control.Monad
import Control.Arrow ( second )
#include "HsVersions.h"
{-
************************************************************************
* *
\subsection{tcMatchesFun, tcMatchesCase}
* *
************************************************************************
@tcMatchesFun@ typechecks a @[Match]@ list which occurs in a
@FunMonoBind@. The second argument is the name of the function, which
is used in error messages. It checks that all the equations have the
same number of arguments before using @tcMatches@ to do the work.
Note [Polymorphic expected type for tcMatchesFun]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcMatchesFun may be given a *sigma* (polymorphic) type
so it must be prepared to use tcSkolemise to skolemise it.
See Note [sig_tau may be polymorphic] in TcPat.
-}
tcMatchesFun :: Located Name
-> MatchGroup Name (LHsExpr Name)
-> ExpRhoType -- Expected type of function
-> TcM (HsWrapper, MatchGroup TcId (LHsExpr TcId))
-- Returns type of body
tcMatchesFun fn@(L _ fun_name) matches exp_ty
= do { -- Check that they all have the same no of arguments
-- Location is in the monad, set the caller so that
-- any inter-equation error messages get some vaguely
-- sensible location. Note: we have to do this odd
-- ann-grabbing, because we don't always have annotations in
-- hand when we call tcMatchesFun...
traceTc "tcMatchesFun" (ppr fun_name $$ ppr exp_ty)
; checkArgs fun_name matches
; (wrap_gen, (wrap_fun, group))
<- tcSkolemiseET (FunSigCtxt fun_name True) exp_ty $ \ exp_rho ->
-- Note [Polymorphic expected type for tcMatchesFun]
do { (matches', wrap_fun)
<- matchExpectedFunTys herald arity exp_rho $
\ pat_tys rhs_ty ->
tcMatches match_ctxt pat_tys rhs_ty matches
; return (wrap_fun, matches') }
; return (wrap_gen <.> wrap_fun, group) }
where
arity = matchGroupArity matches
herald = text "The equation(s) for"
<+> quotes (ppr fun_name) <+> text "have"
match_ctxt = MC { mc_what = FunRhs fn Prefix, mc_body = tcBody }
{-
@tcMatchesCase@ doesn't do the argument-count check because the
parser guarantees that each equation has exactly one argument.
-}
tcMatchesCase :: (Outputable (body Name)) =>
TcMatchCtxt body -- Case context
-> TcSigmaType -- Type of scrutinee
-> MatchGroup Name (Located (body Name)) -- The case alternatives
-> ExpRhoType -- Type of whole case expressions
-> TcM (MatchGroup TcId (Located (body TcId)))
-- Translated alternatives
-- wrapper goes from MatchGroup's ty to expected ty
tcMatchesCase ctxt scrut_ty matches res_ty
= tcMatches ctxt [mkCheckExpType scrut_ty] res_ty matches
tcMatchLambda :: SDoc -- see Note [Herald for matchExpectedFunTys] in TcUnify
-> TcMatchCtxt HsExpr
-> MatchGroup Name (LHsExpr Name)
-> ExpRhoType -- deeply skolemised
-> TcM (MatchGroup TcId (LHsExpr TcId), HsWrapper)
tcMatchLambda herald match_ctxt match res_ty
= matchExpectedFunTys herald n_pats res_ty $ \ pat_tys rhs_ty ->
tcMatches match_ctxt pat_tys rhs_ty match
where
n_pats | isEmptyMatchGroup match = 1 -- must be lambda-case
| otherwise = matchGroupArity match
-- @tcGRHSsPat@ typechecks @[GRHSs]@ that occur in a @PatMonoBind@.
tcGRHSsPat :: GRHSs Name (LHsExpr Name) -> TcRhoType
-> TcM (GRHSs TcId (LHsExpr TcId))
-- Used for pattern bindings
tcGRHSsPat grhss res_ty = tcGRHSs match_ctxt grhss (mkCheckExpType res_ty)
where
match_ctxt = MC { mc_what = PatBindRhs,
mc_body = tcBody }
{-
************************************************************************
* *
\subsection{tcMatch}
* *
************************************************************************
Note [Case branches must never infer a non-tau type]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case ... of
... -> \(x :: forall a. a -> a) -> x
... -> \y -> y
Should that type-check? The problem is that, if we check the second branch
first, then we'll get a type (b -> b) for the branches, which won't unify
with the polytype in the first branch. If we check the first branch first,
then everything is OK. This order-dependency is terrible. So we want only
proper tau-types in branches (unless a sigma-type is pushed down).
This is what expTypeToType ensures: it replaces an Infer with a fresh
tau-type.
An even trickier case looks like
f x True = x undefined
f x False = x ()
Here, we see that the arguments must also be non-Infer. Thus, we must
use expTypeToType on the output of matchExpectedFunTys, not the input.
But we make a special case for a one-branch case. This is so that
f = \(x :: forall a. a -> a) -> x
still gets assigned a polytype.
-}
-- | When the MatchGroup has multiple RHSs, convert an Infer ExpType in the
-- expected type into TauTvs.
-- See Note [Case branches must never infer a non-tau type]
tauifyMultipleMatches :: [LMatch id body]
-> [ExpType] -> TcM [ExpType]
tauifyMultipleMatches group exp_tys
| isSingletonMatchGroup group = return exp_tys
| otherwise = mapM tauifyExpType exp_tys
-- NB: In the empty-match case, this ensures we fill in the ExpType
-- | Type-check a MatchGroup.
tcMatches :: (Outputable (body Name)) => TcMatchCtxt body
-> [ExpSigmaType] -- Expected pattern types
-> ExpRhoType -- Expected result-type of the Match.
-> MatchGroup Name (Located (body Name))
-> TcM (MatchGroup TcId (Located (body TcId)))
data TcMatchCtxt body -- c.f. TcStmtCtxt, also in this module
= MC { mc_what :: HsMatchContext Name, -- What kind of thing this is
mc_body :: Located (body Name) -- Type checker for a body of
-- an alternative
-> ExpRhoType
-> TcM (Located (body TcId)) }
tcMatches ctxt pat_tys rhs_ty (MG { mg_alts = L l matches
, mg_origin = origin })
= do { rhs_ty:pat_tys <- tauifyMultipleMatches matches (rhs_ty:pat_tys)
-- See Note [Case branches must never infer a non-tau type]
; matches' <- mapM (tcMatch ctxt pat_tys rhs_ty) matches
; pat_tys <- mapM readExpType pat_tys
; rhs_ty <- readExpType rhs_ty
; return (MG { mg_alts = L l matches'
, mg_arg_tys = pat_tys
, mg_res_ty = rhs_ty
, mg_origin = origin }) }
-------------
tcMatch :: (Outputable (body Name)) => TcMatchCtxt body
-> [ExpSigmaType] -- Expected pattern types
-> ExpRhoType -- Expected result-type of the Match.
-> LMatch Name (Located (body Name))
-> TcM (LMatch TcId (Located (body TcId)))
tcMatch ctxt pat_tys rhs_ty match
= wrapLocM (tc_match ctxt pat_tys rhs_ty) match
where
tc_match ctxt pat_tys rhs_ty match@(Match _ pats maybe_rhs_sig grhss)
= add_match_ctxt match $
do { (pats', grhss') <- tcPats (mc_what ctxt) pats pat_tys $
tc_grhss ctxt maybe_rhs_sig grhss rhs_ty
; return (Match (mc_what ctxt) pats' Nothing grhss') }
tc_grhss ctxt Nothing grhss rhs_ty
= tcGRHSs ctxt grhss rhs_ty -- No result signature
-- Result type sigs are no longer supported
tc_grhss _ (Just {}) _ _
= panic "tc_ghrss" -- Rejected by renamer
-- For (\x -> e), tcExpr has already said "In the expresssion \x->e"
-- so we don't want to add "In the lambda abstraction \x->e"
add_match_ctxt match thing_inside
= case mc_what ctxt of
LambdaExpr -> thing_inside
_ -> addErrCtxt (pprMatchInCtxt match) thing_inside
-------------
tcGRHSs :: TcMatchCtxt body -> GRHSs Name (Located (body Name)) -> ExpRhoType
-> TcM (GRHSs TcId (Located (body TcId)))
-- Notice that we pass in the full res_ty, so that we get
-- good inference from simple things like
-- f = \(x::forall a.a->a) -> <stuff>
-- We used to force it to be a monotype when there was more than one guard
-- but we don't need to do that any more
tcGRHSs ctxt (GRHSs grhss (L l binds)) res_ty
= do { (binds', grhss')
<- tcLocalBinds binds $
mapM (wrapLocM (tcGRHS ctxt res_ty)) grhss
; return (GRHSs grhss' (L l binds')) }
-------------
tcGRHS :: TcMatchCtxt body -> ExpRhoType -> GRHS Name (Located (body Name))
-> TcM (GRHS TcId (Located (body TcId)))
tcGRHS ctxt res_ty (GRHS guards rhs)
= do { (guards', rhs')
<- tcStmtsAndThen stmt_ctxt tcGuardStmt guards res_ty $
mc_body ctxt rhs
; return (GRHS guards' rhs') }
where
stmt_ctxt = PatGuard (mc_what ctxt)
{-
************************************************************************
* *
\subsection{@tcDoStmts@ typechecks a {\em list} of do statements}
* *
************************************************************************
-}
tcDoStmts :: HsStmtContext Name
-> Located [LStmt Name (LHsExpr Name)]
-> ExpRhoType
-> TcM (HsExpr TcId) -- Returns a HsDo
tcDoStmts ListComp (L l stmts) res_ty
= do { res_ty <- expTypeToType res_ty
; (co, elt_ty) <- matchExpectedListTy res_ty
; let list_ty = mkListTy elt_ty
; stmts' <- tcStmts ListComp (tcLcStmt listTyCon) stmts
(mkCheckExpType elt_ty)
; return $ mkHsWrapCo co (HsDo ListComp (L l stmts') list_ty) }
tcDoStmts PArrComp (L l stmts) res_ty
= do { res_ty <- expTypeToType res_ty
; (co, elt_ty) <- matchExpectedPArrTy res_ty
; let parr_ty = mkPArrTy elt_ty
; stmts' <- tcStmts PArrComp (tcLcStmt parrTyCon) stmts
(mkCheckExpType elt_ty)
; return $ mkHsWrapCo co (HsDo PArrComp (L l stmts') parr_ty) }
tcDoStmts DoExpr (L l stmts) res_ty
= do { stmts' <- tcStmts DoExpr tcDoStmt stmts res_ty
; res_ty <- readExpType res_ty
; return (HsDo DoExpr (L l stmts') res_ty) }
tcDoStmts MDoExpr (L l stmts) res_ty
= do { stmts' <- tcStmts MDoExpr tcDoStmt stmts res_ty
; res_ty <- readExpType res_ty
; return (HsDo MDoExpr (L l stmts') res_ty) }
tcDoStmts MonadComp (L l stmts) res_ty
= do { stmts' <- tcStmts MonadComp tcMcStmt stmts res_ty
; res_ty <- readExpType res_ty
; return (HsDo MonadComp (L l stmts') res_ty) }
tcDoStmts ctxt _ _ = pprPanic "tcDoStmts" (pprStmtContext ctxt)
tcBody :: LHsExpr Name -> ExpRhoType -> TcM (LHsExpr TcId)
tcBody body res_ty
= do { traceTc "tcBody" (ppr res_ty)
; tcMonoExpr body res_ty
}
{-
************************************************************************
* *
\subsection{tcStmts}
* *
************************************************************************
-}
type TcExprStmtChecker = TcStmtChecker HsExpr ExpRhoType
type TcCmdStmtChecker = TcStmtChecker HsCmd TcRhoType
type TcStmtChecker body rho_type
= forall thing. HsStmtContext Name
-> Stmt Name (Located (body Name))
-> rho_type -- Result type for comprehension
-> (rho_type -> TcM thing) -- Checker for what follows the stmt
-> TcM (Stmt TcId (Located (body TcId)), thing)
tcStmts :: (Outputable (body Name)) => HsStmtContext Name
-> TcStmtChecker body rho_type -- NB: higher-rank type
-> [LStmt Name (Located (body Name))]
-> rho_type
-> TcM [LStmt TcId (Located (body TcId))]
tcStmts ctxt stmt_chk stmts res_ty
= do { (stmts', _) <- tcStmtsAndThen ctxt stmt_chk stmts res_ty $
const (return ())
; return stmts' }
tcStmtsAndThen :: (Outputable (body Name)) => HsStmtContext Name
-> TcStmtChecker body rho_type -- NB: higher-rank type
-> [LStmt Name (Located (body Name))]
-> rho_type
-> (rho_type -> TcM thing)
-> TcM ([LStmt TcId (Located (body TcId))], thing)
-- Note the higher-rank type. stmt_chk is applied at different
-- types in the equations for tcStmts
tcStmtsAndThen _ _ [] res_ty thing_inside
= do { thing <- thing_inside res_ty
; return ([], thing) }
-- LetStmts are handled uniformly, regardless of context
tcStmtsAndThen ctxt stmt_chk (L loc (LetStmt (L l binds)) : stmts)
res_ty thing_inside
= do { (binds', (stmts',thing)) <- tcLocalBinds binds $
tcStmtsAndThen ctxt stmt_chk stmts res_ty thing_inside
; return (L loc (LetStmt (L l binds')) : stmts', thing) }
-- Don't set the error context for an ApplicativeStmt. It ought to be
-- possible to do this with a popErrCtxt in the tcStmt case for
-- ApplicativeStmt, but it did someting strange and broke a test (ado002).
tcStmtsAndThen ctxt stmt_chk (L loc stmt : stmts) res_ty thing_inside
| ApplicativeStmt{} <- stmt
= do { (stmt', (stmts', thing)) <-
stmt_chk ctxt stmt res_ty $ \ res_ty' ->
tcStmtsAndThen ctxt stmt_chk stmts res_ty' $
thing_inside
; return (L loc stmt' : stmts', thing) }
-- For the vanilla case, handle the location-setting part
| otherwise
= do { (stmt', (stmts', thing)) <-
setSrcSpan loc $
addErrCtxt (pprStmtInCtxt ctxt stmt) $
stmt_chk ctxt stmt res_ty $ \ res_ty' ->
popErrCtxt $
tcStmtsAndThen ctxt stmt_chk stmts res_ty' $
thing_inside
; return (L loc stmt' : stmts', thing) }
---------------------------------------------------
-- Pattern guards
---------------------------------------------------
tcGuardStmt :: TcExprStmtChecker
tcGuardStmt _ (BodyStmt guard _ _ _) res_ty thing_inside
= do { guard' <- tcMonoExpr guard (mkCheckExpType boolTy)
; thing <- thing_inside res_ty
; return (BodyStmt guard' noSyntaxExpr noSyntaxExpr boolTy, thing) }
tcGuardStmt ctxt (BindStmt pat rhs _ _ _) res_ty thing_inside
= do { (rhs', rhs_ty) <- tcInferSigmaNC rhs
-- Stmt has a context already
; (pat', thing) <- tcPat_O (StmtCtxt ctxt) (exprCtOrigin (unLoc rhs))
pat (mkCheckExpType rhs_ty) $
thing_inside res_ty
; return (mkTcBindStmt pat' rhs', thing) }
tcGuardStmt _ stmt _ _
= pprPanic "tcGuardStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- List comprehensions and PArrays
-- (no rebindable syntax)
---------------------------------------------------
-- Dealt with separately, rather than by tcMcStmt, because
-- a) PArr isn't (yet) an instance of Monad, so the generality seems overkill
-- b) We have special desugaring rules for list comprehensions,
-- which avoid creating intermediate lists. They in turn
-- assume that the bind/return operations are the regular
-- polymorphic ones, and in particular don't have any
-- coercion matching stuff in them. It's hard to avoid the
-- potential for non-trivial coercions in tcMcStmt
tcLcStmt :: TyCon -- The list/Parray type constructor ([] or PArray)
-> TcExprStmtChecker
tcLcStmt _ _ (LastStmt body noret _) elt_ty thing_inside
= do { body' <- tcMonoExprNC body elt_ty
; thing <- thing_inside (panic "tcLcStmt: thing_inside")
; return (LastStmt body' noret noSyntaxExpr, thing) }
-- A generator, pat <- rhs
tcLcStmt m_tc ctxt (BindStmt pat rhs _ _ _) elt_ty thing_inside
= do { pat_ty <- newFlexiTyVarTy liftedTypeKind
; rhs' <- tcMonoExpr rhs (mkCheckExpType $ mkTyConApp m_tc [pat_ty])
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat (mkCheckExpType pat_ty) $
thing_inside elt_ty
; return (mkTcBindStmt pat' rhs', thing) }
-- A boolean guard
tcLcStmt _ _ (BodyStmt rhs _ _ _) elt_ty thing_inside
= do { rhs' <- tcMonoExpr rhs (mkCheckExpType boolTy)
; thing <- thing_inside elt_ty
; return (BodyStmt rhs' noSyntaxExpr noSyntaxExpr boolTy, thing) }
-- ParStmt: See notes with tcMcStmt
tcLcStmt m_tc ctxt (ParStmt bndr_stmts_s _ _ _) elt_ty thing_inside
= do { (pairs', thing) <- loop bndr_stmts_s
; return (ParStmt pairs' noExpr noSyntaxExpr unitTy, thing) }
where
-- loop :: [([LStmt Name], [Name])] -> TcM ([([LStmt TcId], [TcId])], thing)
loop [] = do { thing <- thing_inside elt_ty
; return ([], thing) } -- matching in the branches
loop (ParStmtBlock stmts names _ : pairs)
= do { (stmts', (ids, pairs', thing))
<- tcStmtsAndThen ctxt (tcLcStmt m_tc) stmts elt_ty $ \ _elt_ty' ->
do { ids <- tcLookupLocalIds names
; (pairs', thing) <- loop pairs
; return (ids, pairs', thing) }
; return ( ParStmtBlock stmts' ids noSyntaxExpr : pairs', thing ) }
tcLcStmt m_tc ctxt (TransStmt { trS_form = form, trS_stmts = stmts
, trS_bndrs = bindersMap
, trS_by = by, trS_using = using }) elt_ty thing_inside
= do { let (bndr_names, n_bndr_names) = unzip bindersMap
unused_ty = pprPanic "tcLcStmt: inner ty" (ppr bindersMap)
-- The inner 'stmts' lack a LastStmt, so the element type
-- passed in to tcStmtsAndThen is never looked at
; (stmts', (bndr_ids, by'))
<- tcStmtsAndThen (TransStmtCtxt ctxt) (tcLcStmt m_tc) stmts unused_ty $ \_ -> do
{ by' <- traverse tcInferSigma by
; bndr_ids <- tcLookupLocalIds bndr_names
; return (bndr_ids, by') }
; let m_app ty = mkTyConApp m_tc [ty]
--------------- Typecheck the 'using' function -------------
-- using :: ((a,b,c)->t) -> m (a,b,c) -> m (a,b,c)m (ThenForm)
-- :: ((a,b,c)->t) -> m (a,b,c) -> m (m (a,b,c))) (GroupForm)
-- n_app :: Type -> Type -- Wraps a 'ty' into '[ty]' for GroupForm
; let n_app = case form of
ThenForm -> (\ty -> ty)
_ -> m_app
by_arrow :: Type -> Type -- Wraps 'ty' to '(a->t) -> ty' if the By is present
by_arrow = case by' of
Nothing -> \ty -> ty
Just (_,e_ty) -> \ty -> (alphaTy `mkFunTy` e_ty) `mkFunTy` ty
tup_ty = mkBigCoreVarTupTy bndr_ids
poly_arg_ty = m_app alphaTy
poly_res_ty = m_app (n_app alphaTy)
using_poly_ty = mkInvForAllTy alphaTyVar $
by_arrow $
poly_arg_ty `mkFunTy` poly_res_ty
; using' <- tcPolyExpr using using_poly_ty
; let final_using = fmap (HsWrap (WpTyApp tup_ty)) using'
-- 'stmts' returns a result of type (m1_ty tuple_ty),
-- typically something like [(Int,Bool,Int)]
-- We don't know what tuple_ty is yet, so we use a variable
; let mk_n_bndr :: Name -> TcId -> TcId
mk_n_bndr n_bndr_name bndr_id = mkLocalIdOrCoVar n_bndr_name (n_app (idType bndr_id))
-- Ensure that every old binder of type `b` is linked up with its
-- new binder which should have type `n b`
-- See Note [GroupStmt binder map] in HsExpr
n_bndr_ids = zipWith mk_n_bndr n_bndr_names bndr_ids
bindersMap' = bndr_ids `zip` n_bndr_ids
-- Type check the thing in the environment with
-- these new binders and return the result
; thing <- tcExtendIdEnv n_bndr_ids (thing_inside elt_ty)
; return (TransStmt { trS_stmts = stmts', trS_bndrs = bindersMap'
, trS_by = fmap fst by', trS_using = final_using
, trS_ret = noSyntaxExpr
, trS_bind = noSyntaxExpr
, trS_fmap = noExpr
, trS_bind_arg_ty = unitTy
, trS_form = form }, thing) }
tcLcStmt _ _ stmt _ _
= pprPanic "tcLcStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- Monad comprehensions
-- (supports rebindable syntax)
---------------------------------------------------
tcMcStmt :: TcExprStmtChecker
tcMcStmt _ (LastStmt body noret return_op) res_ty thing_inside
= do { (body', return_op')
<- tcSyntaxOp MCompOrigin return_op [SynRho] res_ty $
\ [a_ty] ->
tcMonoExprNC body (mkCheckExpType a_ty)
; thing <- thing_inside (panic "tcMcStmt: thing_inside")
; return (LastStmt body' noret return_op', thing) }
-- Generators for monad comprehensions ( pat <- rhs )
--
-- [ body | q <- gen ] -> gen :: m a
-- q :: a
--
tcMcStmt ctxt (BindStmt pat rhs bind_op fail_op _) res_ty thing_inside
-- (>>=) :: rhs_ty -> (pat_ty -> new_res_ty) -> res_ty
= do { ((rhs', pat', thing, new_res_ty), bind_op')
<- tcSyntaxOp MCompOrigin bind_op
[SynRho, SynFun SynAny SynRho] res_ty $
\ [rhs_ty, pat_ty, new_res_ty] ->
do { rhs' <- tcMonoExprNC rhs (mkCheckExpType rhs_ty)
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat
(mkCheckExpType pat_ty) $
thing_inside (mkCheckExpType new_res_ty)
; return (rhs', pat', thing, new_res_ty) }
-- If (but only if) the pattern can fail, typecheck the 'fail' operator
; fail_op' <- tcMonadFailOp (MCompPatOrigin pat) pat' fail_op new_res_ty
; return (BindStmt pat' rhs' bind_op' fail_op' new_res_ty, thing) }
-- Boolean expressions.
--
-- [ body | stmts, expr ] -> expr :: m Bool
--
tcMcStmt _ (BodyStmt rhs then_op guard_op _) res_ty thing_inside
= do { -- Deal with rebindable syntax:
-- guard_op :: test_ty -> rhs_ty
-- then_op :: rhs_ty -> new_res_ty -> res_ty
-- Where test_ty is, for example, Bool
; ((thing, rhs', rhs_ty, guard_op'), then_op')
<- tcSyntaxOp MCompOrigin then_op [SynRho, SynRho] res_ty $
\ [rhs_ty, new_res_ty] ->
do { (rhs', guard_op')
<- tcSyntaxOp MCompOrigin guard_op [SynAny]
(mkCheckExpType rhs_ty) $
\ [test_ty] ->
tcMonoExpr rhs (mkCheckExpType test_ty)
; thing <- thing_inside (mkCheckExpType new_res_ty)
; return (thing, rhs', rhs_ty, guard_op') }
; return (BodyStmt rhs' then_op' guard_op' rhs_ty, thing) }
-- Grouping statements
--
-- [ body | stmts, then group by e using f ]
-- -> e :: t
-- f :: forall a. (a -> t) -> m a -> m (m a)
-- [ body | stmts, then group using f ]
-- -> f :: forall a. m a -> m (m a)
-- We type [ body | (stmts, group by e using f), ... ]
-- f <optional by> [ (a,b,c) | stmts ] >>= \(a,b,c) -> ...body....
--
-- We type the functions as follows:
-- f <optional by> :: m1 (a,b,c) -> m2 (a,b,c) (ThenForm)
-- :: m1 (a,b,c) -> m2 (n (a,b,c)) (GroupForm)
-- (>>=) :: m2 (a,b,c) -> ((a,b,c) -> res) -> res (ThenForm)
-- :: m2 (n (a,b,c)) -> (n (a,b,c) -> res) -> res (GroupForm)
--
tcMcStmt ctxt (TransStmt { trS_stmts = stmts, trS_bndrs = bindersMap
, trS_by = by, trS_using = using, trS_form = form
, trS_ret = return_op, trS_bind = bind_op
, trS_fmap = fmap_op }) res_ty thing_inside
= do { let star_star_kind = liftedTypeKind `mkFunTy` liftedTypeKind
; m1_ty <- newFlexiTyVarTy star_star_kind
; m2_ty <- newFlexiTyVarTy star_star_kind
; tup_ty <- newFlexiTyVarTy liftedTypeKind
; by_e_ty <- newFlexiTyVarTy liftedTypeKind -- The type of the 'by' expression (if any)
-- n_app :: Type -> Type -- Wraps a 'ty' into '(n ty)' for GroupForm
; n_app <- case form of
ThenForm -> return (\ty -> ty)
_ -> do { n_ty <- newFlexiTyVarTy star_star_kind
; return (n_ty `mkAppTy`) }
; let by_arrow :: Type -> Type
-- (by_arrow res) produces ((alpha->e_ty) -> res) ('by' present)
-- or res ('by' absent)
by_arrow = case by of
Nothing -> \res -> res
Just {} -> \res -> (alphaTy `mkFunTy` by_e_ty) `mkFunTy` res
poly_arg_ty = m1_ty `mkAppTy` alphaTy
using_arg_ty = m1_ty `mkAppTy` tup_ty
poly_res_ty = m2_ty `mkAppTy` n_app alphaTy
using_res_ty = m2_ty `mkAppTy` n_app tup_ty
using_poly_ty = mkInvForAllTy alphaTyVar $
by_arrow $
poly_arg_ty `mkFunTy` poly_res_ty
-- 'stmts' returns a result of type (m1_ty tuple_ty),
-- typically something like [(Int,Bool,Int)]
-- We don't know what tuple_ty is yet, so we use a variable
; let (bndr_names, n_bndr_names) = unzip bindersMap
; (stmts', (bndr_ids, by', return_op')) <-
tcStmtsAndThen (TransStmtCtxt ctxt) tcMcStmt stmts
(mkCheckExpType using_arg_ty) $ \res_ty' -> do
{ by' <- case by of
Nothing -> return Nothing
Just e -> do { e' <- tcMonoExpr e
(mkCheckExpType by_e_ty)
; return (Just e') }
-- Find the Ids (and hence types) of all old binders
; bndr_ids <- tcLookupLocalIds bndr_names
-- 'return' is only used for the binders, so we know its type.
-- return :: (a,b,c,..) -> m (a,b,c,..)
; (_, return_op') <- tcSyntaxOp MCompOrigin return_op
[synKnownType (mkBigCoreVarTupTy bndr_ids)]
res_ty' $ \ _ -> return ()
; return (bndr_ids, by', return_op') }
--------------- Typecheck the 'bind' function -------------
-- (>>=) :: m2 (n (a,b,c)) -> ( n (a,b,c) -> new_res_ty ) -> res_ty
; new_res_ty <- newFlexiTyVarTy liftedTypeKind
; (_, bind_op') <- tcSyntaxOp MCompOrigin bind_op
[ synKnownType using_res_ty
, synKnownType (n_app tup_ty `mkFunTy` new_res_ty) ]
res_ty $ \ _ -> return ()
--------------- Typecheck the 'fmap' function -------------
; fmap_op' <- case form of
ThenForm -> return noExpr
_ -> fmap unLoc . tcPolyExpr (noLoc fmap_op) $
mkInvForAllTy alphaTyVar $
mkInvForAllTy betaTyVar $
(alphaTy `mkFunTy` betaTy)
`mkFunTy` (n_app alphaTy)
`mkFunTy` (n_app betaTy)
--------------- Typecheck the 'using' function -------------
-- using :: ((a,b,c)->t) -> m1 (a,b,c) -> m2 (n (a,b,c))
; using' <- tcPolyExpr using using_poly_ty
; let final_using = fmap (HsWrap (WpTyApp tup_ty)) using'
--------------- Bulding the bindersMap ----------------
; let mk_n_bndr :: Name -> TcId -> TcId
mk_n_bndr n_bndr_name bndr_id = mkLocalIdOrCoVar n_bndr_name (n_app (idType bndr_id))
-- Ensure that every old binder of type `b` is linked up with its
-- new binder which should have type `n b`
-- See Note [GroupStmt binder map] in HsExpr
n_bndr_ids = zipWith mk_n_bndr n_bndr_names bndr_ids
bindersMap' = bndr_ids `zip` n_bndr_ids
-- Type check the thing in the environment with
-- these new binders and return the result
; thing <- tcExtendIdEnv n_bndr_ids $
thing_inside (mkCheckExpType new_res_ty)
; return (TransStmt { trS_stmts = stmts', trS_bndrs = bindersMap'
, trS_by = by', trS_using = final_using
, trS_ret = return_op', trS_bind = bind_op'
, trS_bind_arg_ty = n_app tup_ty
, trS_fmap = fmap_op', trS_form = form }, thing) }
-- A parallel set of comprehensions
-- [ (g x, h x) | ... ; let g v = ...
-- | ... ; let h v = ... ]
--
-- It's possible that g,h are overloaded, so we need to feed the LIE from the
-- (g x, h x) up through both lots of bindings (so we get the bindLocalMethods).
-- Similarly if we had an existential pattern match:
--
-- data T = forall a. Show a => C a
--
-- [ (show x, show y) | ... ; C x <- ...
-- | ... ; C y <- ... ]
--
-- Then we need the LIE from (show x, show y) to be simplified against
-- the bindings for x and y.
--
-- It's difficult to do this in parallel, so we rely on the renamer to
-- ensure that g,h and x,y don't duplicate, and simply grow the environment.
-- So the binders of the first parallel group will be in scope in the second
-- group. But that's fine; there's no shadowing to worry about.
--
-- Note: The `mzip` function will get typechecked via:
--
-- ParStmt [st1::t1, st2::t2, st3::t3]
--
-- mzip :: m st1
-- -> (m st2 -> m st3 -> m (st2, st3)) -- recursive call
-- -> m (st1, (st2, st3))
--
tcMcStmt ctxt (ParStmt bndr_stmts_s mzip_op bind_op _) res_ty thing_inside
= do { let star_star_kind = liftedTypeKind `mkFunTy` liftedTypeKind
; m_ty <- newFlexiTyVarTy star_star_kind
; let mzip_ty = mkInvForAllTys [alphaTyVar, betaTyVar] $
(m_ty `mkAppTy` alphaTy)
`mkFunTy`
(m_ty `mkAppTy` betaTy)
`mkFunTy`
(m_ty `mkAppTy` mkBoxedTupleTy [alphaTy, betaTy])
; mzip_op' <- unLoc `fmap` tcPolyExpr (noLoc mzip_op) mzip_ty
-- type dummies since we don't know all binder types yet
; id_tys_s <- (mapM . mapM) (const (newFlexiTyVarTy liftedTypeKind))
[ names | ParStmtBlock _ names _ <- bndr_stmts_s ]
-- Typecheck bind:
; let tup_tys = [ mkBigCoreTupTy id_tys | id_tys <- id_tys_s ]
tuple_ty = mk_tuple_ty tup_tys
; (((blocks', thing), inner_res_ty), bind_op')
<- tcSyntaxOp MCompOrigin bind_op
[ synKnownType (m_ty `mkAppTy` tuple_ty)
, SynFun (synKnownType tuple_ty) SynRho ] res_ty $
\ [inner_res_ty] ->
do { stuff <- loop m_ty (mkCheckExpType inner_res_ty)
tup_tys bndr_stmts_s
; return (stuff, inner_res_ty) }
; return (ParStmt blocks' mzip_op' bind_op' inner_res_ty, thing) }
where
mk_tuple_ty tys = foldr1 (\tn tm -> mkBoxedTupleTy [tn, tm]) tys
-- loop :: Type -- m_ty
-- -> ExpRhoType -- inner_res_ty
-- -> [TcType] -- tup_tys
-- -> [ParStmtBlock Name]
-- -> TcM ([([LStmt TcId], [TcId])], thing)
loop _ inner_res_ty [] [] = do { thing <- thing_inside inner_res_ty
; return ([], thing) }
-- matching in the branches
loop m_ty inner_res_ty (tup_ty_in : tup_tys_in)
(ParStmtBlock stmts names return_op : pairs)
= do { let m_tup_ty = m_ty `mkAppTy` tup_ty_in
; (stmts', (ids, return_op', pairs', thing))
<- tcStmtsAndThen ctxt tcMcStmt stmts (mkCheckExpType m_tup_ty) $
\m_tup_ty' ->
do { ids <- tcLookupLocalIds names
; let tup_ty = mkBigCoreVarTupTy ids
; (_, return_op') <-
tcSyntaxOp MCompOrigin return_op
[synKnownType tup_ty] m_tup_ty' $
\ _ -> return ()
; (pairs', thing) <- loop m_ty inner_res_ty tup_tys_in pairs
; return (ids, return_op', pairs', thing) }
; return (ParStmtBlock stmts' ids return_op' : pairs', thing) }
loop _ _ _ _ = panic "tcMcStmt.loop"
tcMcStmt _ stmt _ _
= pprPanic "tcMcStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- Do-notation
-- (supports rebindable syntax)
---------------------------------------------------
tcDoStmt :: TcExprStmtChecker
tcDoStmt _ (LastStmt body noret _) res_ty thing_inside
= do { body' <- tcMonoExprNC body res_ty
; thing <- thing_inside (panic "tcDoStmt: thing_inside")
; return (LastStmt body' noret noSyntaxExpr, thing) }
tcDoStmt ctxt (BindStmt pat rhs bind_op fail_op _) res_ty thing_inside
= do { -- Deal with rebindable syntax:
-- (>>=) :: rhs_ty -> (pat_ty -> new_res_ty) -> res_ty
-- This level of generality is needed for using do-notation
-- in full generality; see Trac #1537
((rhs', pat', new_res_ty, thing), bind_op')
<- tcSyntaxOp DoOrigin bind_op [SynRho, SynFun SynAny SynRho] res_ty $
\ [rhs_ty, pat_ty, new_res_ty] ->
do { rhs' <- tcMonoExprNC rhs (mkCheckExpType rhs_ty)
; (pat', thing) <- tcPat (StmtCtxt ctxt) pat
(mkCheckExpType pat_ty) $
thing_inside (mkCheckExpType new_res_ty)
; return (rhs', pat', new_res_ty, thing) }
-- If (but only if) the pattern can fail, typecheck the 'fail' operator
; fail_op' <- tcMonadFailOp (DoPatOrigin pat) pat' fail_op new_res_ty
; return (BindStmt pat' rhs' bind_op' fail_op' new_res_ty, thing) }
tcDoStmt ctxt (ApplicativeStmt pairs mb_join _) res_ty thing_inside
= do { let tc_app_stmts ty = tcApplicativeStmts ctxt pairs ty $
thing_inside . mkCheckExpType
; ((pairs', body_ty, thing), mb_join') <- case mb_join of
Nothing -> (, Nothing) <$> tc_app_stmts res_ty
Just join_op ->
second Just <$>
(tcSyntaxOp DoOrigin join_op [SynRho] res_ty $
\ [rhs_ty] -> tc_app_stmts (mkCheckExpType rhs_ty))
; return (ApplicativeStmt pairs' mb_join' body_ty, thing) }
tcDoStmt _ (BodyStmt rhs then_op _ _) res_ty thing_inside
= do { -- Deal with rebindable syntax;
-- (>>) :: rhs_ty -> new_res_ty -> res_ty
; ((rhs', rhs_ty, thing), then_op')
<- tcSyntaxOp DoOrigin then_op [SynRho, SynRho] res_ty $
\ [rhs_ty, new_res_ty] ->
do { rhs' <- tcMonoExprNC rhs (mkCheckExpType rhs_ty)
; thing <- thing_inside (mkCheckExpType new_res_ty)
; return (rhs', rhs_ty, thing) }
; return (BodyStmt rhs' then_op' noSyntaxExpr rhs_ty, thing) }
tcDoStmt ctxt (RecStmt { recS_stmts = stmts, recS_later_ids = later_names
, recS_rec_ids = rec_names, recS_ret_fn = ret_op
, recS_mfix_fn = mfix_op, recS_bind_fn = bind_op })
res_ty thing_inside
= do { let tup_names = rec_names ++ filterOut (`elem` rec_names) later_names
; tup_elt_tys <- newFlexiTyVarTys (length tup_names) liftedTypeKind
; let tup_ids = zipWith mkLocalId tup_names tup_elt_tys
tup_ty = mkBigCoreTupTy tup_elt_tys
; tcExtendIdEnv tup_ids $ do
{ ((stmts', (ret_op', tup_rets)), stmts_ty)
<- tcInferInst $ \ exp_ty ->
tcStmtsAndThen ctxt tcDoStmt stmts exp_ty $ \ inner_res_ty ->
do { tup_rets <- zipWithM tcCheckId tup_names
(map mkCheckExpType tup_elt_tys)
-- Unify the types of the "final" Ids (which may
-- be polymorphic) with those of "knot-tied" Ids
; (_, ret_op')
<- tcSyntaxOp DoOrigin ret_op [synKnownType tup_ty]
inner_res_ty $ \_ -> return ()
; return (ret_op', tup_rets) }
; ((_, mfix_op'), mfix_res_ty)
<- tcInferInst $ \ exp_ty ->
tcSyntaxOp DoOrigin mfix_op
[synKnownType (mkFunTy tup_ty stmts_ty)] exp_ty $
\ _ -> return ()
; ((thing, new_res_ty), bind_op')
<- tcSyntaxOp DoOrigin bind_op
[ synKnownType mfix_res_ty
, synKnownType tup_ty `SynFun` SynRho ]
res_ty $
\ [new_res_ty] ->
do { thing <- thing_inside (mkCheckExpType new_res_ty)
; return (thing, new_res_ty) }
; let rec_ids = takeList rec_names tup_ids
; later_ids <- tcLookupLocalIds later_names
; traceTc "tcdo" $ vcat [ppr rec_ids <+> ppr (map idType rec_ids),
ppr later_ids <+> ppr (map idType later_ids)]
; return (RecStmt { recS_stmts = stmts', recS_later_ids = later_ids
, recS_rec_ids = rec_ids, recS_ret_fn = ret_op'
, recS_mfix_fn = mfix_op', recS_bind_fn = bind_op'
, recS_bind_ty = new_res_ty
, recS_later_rets = [], recS_rec_rets = tup_rets
, recS_ret_ty = stmts_ty }, thing)
}}
tcDoStmt _ stmt _ _
= pprPanic "tcDoStmt: unexpected Stmt" (ppr stmt)
---------------------------------------------------
-- MonadFail Proposal warnings
---------------------------------------------------
-- The idea behind issuing MonadFail warnings is that we add them whenever a
-- failable pattern is encountered. However, instead of throwing a type error
-- when the constraint cannot be satisfied, we only issue a warning in
-- TcErrors.hs.
tcMonadFailOp :: CtOrigin
-> LPat TcId
-> SyntaxExpr Name -- The fail op
-> TcType -- Type of the whole do-expression
-> TcRn (SyntaxExpr TcId) -- Typechecked fail op
-- Get a 'fail' operator expression, to use if the pattern
-- match fails. If the pattern is irrefutatable, just return
-- noSyntaxExpr; it won't be used
tcMonadFailOp orig pat fail_op res_ty
| isIrrefutableHsPat pat
= return noSyntaxExpr
| otherwise
= do { -- Issue MonadFail warnings
rebindableSyntax <- xoptM LangExt.RebindableSyntax
; desugarFlag <- xoptM LangExt.MonadFailDesugaring
; missingWarning <- woptM Opt_WarnMissingMonadFailInstances
; if | rebindableSyntax && (desugarFlag || missingWarning)
-> warnRebindableClash pat
| not desugarFlag && missingWarning
-> emitMonadFailConstraint pat res_ty
| otherwise
-> return ()
-- Get the fail op itself
; snd <$> (tcSyntaxOp orig fail_op [synKnownType stringTy]
(mkCheckExpType res_ty) $ \_ -> return ()) }
emitMonadFailConstraint :: LPat TcId -> TcType -> TcRn ()
emitMonadFailConstraint pat res_ty
= do { -- We expect res_ty to be of form (monad_ty arg_ty)
(_co, (monad_ty, _arg_ty)) <- matchExpectedAppTy res_ty
-- Emit (MonadFail m), but ignore the evidence; it's
-- just there to generate a warning
; monadFailClass <- tcLookupClass monadFailClassName
; _ <- emitWanted (FailablePattern pat)
(mkClassPred monadFailClass [monad_ty])
; return () }
warnRebindableClash :: LPat TcId -> TcRn ()
warnRebindableClash pattern = addWarnAt
(Reason Opt_WarnMissingMonadFailInstances)
(getLoc pattern)
(text "The failable pattern" <+> quotes (ppr pattern)
$$
nest 2 (text "is used together with -XRebindableSyntax."
<+> text "If this is intentional,"
$$
text "compile with -Wno-missing-monadfail-instances."))
{-
Note [Treat rebindable syntax first]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When typechecking
do { bar; ... } :: IO ()
we want to typecheck 'bar' in the knowledge that it should be an IO thing,
pushing info from the context into the RHS. To do this, we check the
rebindable syntax first, and push that information into (tcMonoExprNC rhs).
Otherwise the error shows up when cheking the rebindable syntax, and
the expected/inferred stuff is back to front (see Trac #3613).
Note [typechecking ApplicativeStmt]
join ((\pat1 ... patn -> body) <$> e1 <*> ... <*> en)
fresh type variables:
pat_ty_1..pat_ty_n
exp_ty_1..exp_ty_n
t_1..t_(n-1)
body :: body_ty
(\pat1 ... patn -> body) :: pat_ty_1 -> ... -> pat_ty_n -> body_ty
pat_i :: pat_ty_i
e_i :: exp_ty_i
<$> :: (pat_ty_1 -> ... -> pat_ty_n -> body_ty) -> exp_ty_1 -> t_1
<*>_i :: t_(i-1) -> exp_ty_i -> t_i
join :: tn -> res_ty
-}
tcApplicativeStmts
:: HsStmtContext Name
-> [(SyntaxExpr Name, ApplicativeArg Name Name)]
-> ExpRhoType -- rhs_ty
-> (TcRhoType -> TcM t) -- thing_inside
-> TcM ([(SyntaxExpr TcId, ApplicativeArg TcId TcId)], Type, t)
tcApplicativeStmts ctxt pairs rhs_ty thing_inside
= do { body_ty <- newFlexiTyVarTy liftedTypeKind
; let arity = length pairs
; ts <- replicateM (arity-1) $ newInferExpTypeInst
; exp_tys <- replicateM arity $ newFlexiTyVarTy liftedTypeKind
; pat_tys <- replicateM arity $ newFlexiTyVarTy liftedTypeKind
; let fun_ty = mkFunTys pat_tys body_ty
-- NB. do the <$>,<*> operators first, we don't want type errors here
-- i.e. goOps before goArgs
-- See Note [Treat rebindable syntax first]
; let (ops, args) = unzip pairs
; ops' <- goOps fun_ty (zip3 ops (ts ++ [rhs_ty]) exp_tys)
; (args', thing) <- goArgs (zip3 args pat_tys exp_tys) $
thing_inside body_ty
; return (zip ops' args', body_ty, thing) }
where
goOps _ [] = return []
goOps t_left ((op,t_i,exp_ty) : ops)
= do { (_, op')
<- tcSyntaxOp DoOrigin op
[synKnownType t_left, synKnownType exp_ty] t_i $
\ _ -> return ()
; t_i <- readExpType t_i
; ops' <- goOps t_i ops
; return (op' : ops') }
goArgs
:: [(ApplicativeArg Name Name, Type, Type)]
-> TcM t
-> TcM ([ApplicativeArg TcId TcId], t)
goArgs [] thing_inside
= do { thing <- thing_inside
; return ([],thing)
}
goArgs ((ApplicativeArgOne pat rhs, pat_ty, exp_ty) : rest) thing_inside
= do { let stmt :: ExprStmt Name
stmt = mkBindStmt pat rhs
; setSrcSpan (combineSrcSpans (getLoc pat) (getLoc rhs)) $
addErrCtxt (pprStmtInCtxt ctxt stmt) $
do { rhs' <- tcMonoExprNC rhs (mkCheckExpType exp_ty)
; (pat',(pairs, thing)) <-
tcPat (StmtCtxt ctxt) pat (mkCheckExpType pat_ty) $
popErrCtxt $
goArgs rest thing_inside
; return (ApplicativeArgOne pat' rhs' : pairs, thing) } }
goArgs ((ApplicativeArgMany stmts ret pat, pat_ty, exp_ty) : rest)
thing_inside
= do { (stmts', (ret',pat',rest',thing)) <-
tcStmtsAndThen ctxt tcDoStmt stmts (mkCheckExpType exp_ty) $
\res_ty -> do
{ L _ ret' <- tcMonoExprNC (noLoc ret) res_ty
; (pat',(rest', thing)) <-
tcPat (StmtCtxt ctxt) pat (mkCheckExpType pat_ty) $
goArgs rest thing_inside
; return (ret', pat', rest', thing)
}
; return (ApplicativeArgMany stmts' ret' pat' : rest', thing) }
{-
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
@sameNoOfArgs@ takes a @[RenamedMatch]@ and decides whether the same
number of args are used in each equation.
-}
checkArgs :: Name -> MatchGroup Name body -> TcM ()
checkArgs _ (MG { mg_alts = L _ [] })
= return ()
checkArgs fun (MG { mg_alts = L _ (match1:matches) })
| null bad_matches
= return ()
| otherwise
= failWithTc (vcat [ text "Equations for" <+> quotes (ppr fun) <+>
text "have different numbers of arguments"
, nest 2 (ppr (getLoc match1))
, nest 2 (ppr (getLoc (head bad_matches)))])
where
n_args1 = args_in_match match1
bad_matches = [m | m <- matches, args_in_match m /= n_args1]
args_in_match :: LMatch Name body -> Int
args_in_match (L _ (Match _ pats _ _)) = length pats
| olsner/ghc | compiler/typecheck/TcMatches.hs | bsd-3-clause | 48,118 | 4 | 21 | 15,999 | 9,668 | 5,122 | 4,546 | 631 | 7 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[PatSyntax]{Abstract Haskell syntax---patterns}
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-} -- Note [Pass sensitive types]
-- in module PlaceHolder
{-# LANGUAGE ConstraintKinds #-}
module HsPat (
Pat(..), InPat, OutPat, LPat,
HsConDetails(..),
HsConPatDetails, hsConPatArgs,
HsRecFields(..), HsRecField(..), LHsRecField, hsRecFields,
mkPrefixConPat, mkCharLitPat, mkNilPat,
isStrictHsBind, looksLazyPatBind,
isStrictLPat, hsPatNeedsParens,
isIrrefutableHsPat,
pprParendLPat, pprConArgs
) where
import {-# SOURCE #-} HsExpr (SyntaxExpr, LHsExpr, HsSplice, pprLExpr, pprUntypedSplice)
-- friends:
import HsBinds
import HsLit
import PlaceHolder ( PostTc,DataId )
import HsTypes
import TcEvidence
import BasicTypes
-- others:
import PprCore ( {- instance OutputableBndr TyVar -} )
import TysWiredIn
import Var
import ConLike
import DataCon
import TyCon
import Outputable
import Type
import SrcLoc
import FastString
-- libraries:
import Data.Data hiding (TyCon,Fixity)
import Data.Maybe
type InPat id = LPat id -- No 'Out' constructors
type OutPat id = LPat id -- No 'In' constructors
type LPat id = Located (Pat id)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnBang'
data Pat id
= ------------ Simple patterns ---------------
WildPat (PostTc id Type) -- Wild card
-- The sole reason for a type on a WildPat is to
-- support hsPatType :: Pat Id -> Type
| VarPat id -- Variable
| LazyPat (LPat id) -- Lazy pattern
| AsPat (Located id) (LPat id) -- As pattern
| ParPat (LPat id) -- Parenthesised pattern
-- See Note [Parens in HsSyn] in HsExpr
| BangPat (LPat id) -- Bang pattern
------------ Lists, tuples, arrays ---------------
| ListPat [LPat id] -- Syntactic list
(PostTc id Type) -- The type of the elements
(Maybe (PostTc id Type, SyntaxExpr id)) -- For rebindable syntax
-- For OverloadedLists a Just (ty,fn) gives
-- overall type of the pattern, and the toList
-- function to convert the scrutinee to a list value
| TuplePat [LPat id] -- Tuple sub-patterns
Boxity -- UnitPat is TuplePat []
[PostTc id Type] -- [] before typechecker, filled in afterwards
-- with the types of the tuple components
-- You might think that the PostTc id Type was redundant, because we can
-- get the pattern type by getting the types of the sub-patterns.
-- But it's essential
-- data T a where
-- T1 :: Int -> T Int
-- f :: (T a, a) -> Int
-- f (T1 x, z) = z
-- When desugaring, we must generate
-- f = /\a. \v::a. case v of (t::T a, w::a) ->
-- case t of (T1 (x::Int)) ->
-- Note the (w::a), NOT (w::Int), because we have not yet
-- refined 'a' to Int. So we must know that the second component
-- of the tuple is of type 'a' not Int. See selectMatchVar
-- (June 14: I'm not sure this comment is right; the sub-patterns
-- will be wrapped in CoPats, no?)
| PArrPat [LPat id] -- Syntactic parallel array
(PostTc id Type) -- The type of the elements
------------ Constructor patterns ---------------
| ConPatIn (Located id)
(HsConPatDetails id)
| ConPatOut {
pat_con :: Located ConLike,
pat_arg_tys :: [Type], -- The univeral arg types, 1-1 with the universal
-- tyvars of the constructor/pattern synonym
-- Use (conLikeResTy pat_con pat_arg_tys) to get
-- the type of the pattern
pat_tvs :: [TyVar], -- Existentially bound type variables (tyvars only)
pat_dicts :: [EvVar], -- Ditto *coercion variables* and *dictionaries*
-- One reason for putting coercion variable here, I think,
-- is to ensure their kinds are zonked
pat_binds :: TcEvBinds, -- Bindings involving those dictionaries
pat_args :: HsConPatDetails id,
pat_wrap :: HsWrapper -- Extra wrapper to pass to the matcher
}
------------ View patterns ---------------
| ViewPat (LHsExpr id)
(LPat id)
(PostTc id Type) -- The overall type of the pattern
-- (= the argument type of the view function)
-- for hsPatType.
------------ Pattern splices ---------------
| SplicePat (HsSplice id)
------------ Quasiquoted patterns ---------------
-- See Note [Quasi-quote overview] in TcSplice
| QuasiQuotePat (HsQuasiQuote id)
------------ Literal and n+k patterns ---------------
| LitPat HsLit -- Used for *non-overloaded* literal patterns:
-- Int#, Char#, Int, Char, String, etc.
| NPat -- Used for all overloaded literals,
-- including overloaded strings with -XOverloadedStrings
(HsOverLit id) -- ALWAYS positive
(Maybe (SyntaxExpr id)) -- Just (Name of 'negate') for negative
-- patterns, Nothing otherwise
(SyntaxExpr id) -- Equality checker, of type t->t->Bool
| NPlusKPat (Located id) -- n+k pattern
(HsOverLit id) -- It'll always be an HsIntegral
(SyntaxExpr id) -- (>=) function, of type t->t->Bool
(SyntaxExpr id) -- Name of '-' (see RnEnv.lookupSyntaxName)
------------ Pattern type signatures ---------------
| SigPatIn (LPat id) -- Pattern with a type signature
(HsWithBndrs id (LHsType id)) -- Signature can bind both
-- kind and type vars
| SigPatOut (LPat id) -- Pattern with a type signature
Type
------------ Pattern coercions (translation only) ---------------
| CoPat HsWrapper -- If co :: t1 ~ t2, p :: t2,
-- then (CoPat co p) :: t1
(Pat id) -- Why not LPat? Ans: existing locn will do
Type -- Type of whole pattern, t1
-- During desugaring a (CoPat co pat) turns into a cast with 'co' on
-- the scrutinee, followed by a match on 'pat'
deriving (Typeable)
deriving instance (DataId id) => Data (Pat id)
-- HsConDetails is use for patterns/expressions *and* for data type declarations
data HsConDetails arg rec
= PrefixCon [arg] -- C p1 p2 p3
| RecCon rec -- C { x = p1, y = p2 }
| InfixCon arg arg -- p1 `C` p2
deriving (Data, Typeable)
type HsConPatDetails id = HsConDetails (LPat id) (HsRecFields id (LPat id))
hsConPatArgs :: HsConPatDetails id -> [LPat id]
hsConPatArgs (PrefixCon ps) = ps
hsConPatArgs (RecCon fs) = map (hsRecFieldArg . unLoc) (rec_flds fs)
hsConPatArgs (InfixCon p1 p2) = [p1,p2]
{-
However HsRecFields is used only for patterns and expressions
(not data type declarations)
-}
data HsRecFields id arg -- A bunch of record fields
-- { x = 3, y = True }
-- Used for both expressions and patterns
= HsRecFields { rec_flds :: [LHsRecField id arg],
rec_dotdot :: Maybe Int } -- Note [DotDot fields]
deriving (Data, Typeable)
-- Note [DotDot fields]
-- ~~~~~~~~~~~~~~~~~~~~
-- The rec_dotdot field means this:
-- Nothing => the normal case
-- Just n => the group uses ".." notation,
--
-- In the latter case:
--
-- *before* renamer: rec_flds are exactly the n user-written fields
--
-- *after* renamer: rec_flds includes *all* fields, with
-- the first 'n' being the user-written ones
-- and the remainder being 'filled in' implicitly
type LHsRecField id arg = Located (HsRecField id arg)
-- | - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnEqual',
data HsRecField id arg = HsRecField {
hsRecFieldId :: Located id,
hsRecFieldArg :: arg, -- Filled in by renamer
hsRecPun :: Bool -- Note [Punning]
} deriving (Data, Typeable)
-- Note [Punning]
-- ~~~~~~~~~~~~~~
-- If you write T { x, y = v+1 }, the HsRecFields will be
-- HsRecField x x True ...
-- HsRecField y (v+1) False ...
-- That is, for "punned" field x is expanded (in the renamer)
-- to x=x; but with a punning flag so we can detect it later
-- (e.g. when pretty printing)
--
-- If the original field was qualified, we un-qualify it, thus
-- T { A.x } means T { A.x = x }
hsRecFields :: HsRecFields id arg -> [id]
hsRecFields rbinds = map (unLoc . hsRecFieldId . unLoc) (rec_flds rbinds)
{-
************************************************************************
* *
* Printing patterns
* *
************************************************************************
-}
instance (OutputableBndr name) => Outputable (Pat name) where
ppr = pprPat
pprPatBndr :: OutputableBndr name => name -> SDoc
pprPatBndr var -- Print with type info if -dppr-debug is on
= getPprStyle $ \ sty ->
if debugStyle sty then
parens (pprBndr LambdaBind var) -- Could pass the site to pprPat
-- but is it worth it?
else
pprPrefixOcc var
pprParendLPat :: (OutputableBndr name) => LPat name -> SDoc
pprParendLPat (L _ p) = pprParendPat p
pprParendPat :: (OutputableBndr name) => Pat name -> SDoc
pprParendPat p | hsPatNeedsParens p = parens (pprPat p)
| otherwise = pprPat p
pprPat :: (OutputableBndr name) => Pat name -> SDoc
pprPat (VarPat var) = pprPatBndr var
pprPat (WildPat _) = char '_'
pprPat (LazyPat pat) = char '~' <> pprParendLPat pat
pprPat (BangPat pat) = char '!' <> pprParendLPat pat
pprPat (AsPat name pat) = hcat [pprPrefixOcc (unLoc name), char '@', pprParendLPat pat]
pprPat (ViewPat expr pat _) = hcat [pprLExpr expr, text " -> ", ppr pat]
pprPat (ParPat pat) = parens (ppr pat)
pprPat (ListPat pats _ _) = brackets (interpp'SP pats)
pprPat (PArrPat pats _) = paBrackets (interpp'SP pats)
pprPat (TuplePat pats bx _) = tupleParens (boxityNormalTupleSort bx) (interpp'SP pats)
pprPat (ConPatIn con details) = pprUserCon (unLoc con) details
pprPat (ConPatOut { pat_con = con, pat_tvs = tvs, pat_dicts = dicts,
pat_binds = binds, pat_args = details })
= getPprStyle $ \ sty -> -- Tiresome; in TcBinds.tcRhs we print out a
if debugStyle sty then -- typechecked Pat in an error message,
-- and we want to make sure it prints nicely
ppr con
<> braces (sep [ hsep (map pprPatBndr (tvs ++ dicts))
, ppr binds])
<+> pprConArgs details
else pprUserCon (unLoc con) details
pprPat (LitPat s) = ppr s
pprPat (NPat l Nothing _) = ppr l
pprPat (NPat l (Just _) _) = char '-' <> ppr l
pprPat (NPlusKPat n k _ _) = hcat [ppr n, char '+', ppr k]
pprPat (SplicePat splice) = pprUntypedSplice splice
pprPat (QuasiQuotePat qq) = ppr qq
pprPat (CoPat co pat _) = pprHsWrapper (ppr pat) co
pprPat (SigPatIn pat ty) = ppr pat <+> dcolon <+> ppr ty
pprPat (SigPatOut pat ty) = ppr pat <+> dcolon <+> ppr ty
pprUserCon :: (OutputableBndr con, OutputableBndr id) => con -> HsConPatDetails id -> SDoc
pprUserCon c (InfixCon p1 p2) = ppr p1 <+> pprInfixOcc c <+> ppr p2
pprUserCon c details = pprPrefixOcc c <+> pprConArgs details
pprConArgs :: OutputableBndr id => HsConPatDetails id -> SDoc
pprConArgs (PrefixCon pats) = sep (map pprParendLPat pats)
pprConArgs (InfixCon p1 p2) = sep [pprParendLPat p1, pprParendLPat p2]
pprConArgs (RecCon rpats) = ppr rpats
instance (OutputableBndr id, Outputable arg)
=> Outputable (HsRecFields id arg) where
ppr (HsRecFields { rec_flds = flds, rec_dotdot = Nothing })
= braces (fsep (punctuate comma (map ppr flds)))
ppr (HsRecFields { rec_flds = flds, rec_dotdot = Just n })
= braces (fsep (punctuate comma (map ppr (take n flds) ++ [dotdot])))
where
dotdot = ptext (sLit "..") <+> ifPprDebug (ppr (drop n flds))
instance (OutputableBndr id, Outputable arg)
=> Outputable (HsRecField id arg) where
ppr (HsRecField { hsRecFieldId = f, hsRecFieldArg = arg,
hsRecPun = pun })
= ppr f <+> (ppUnless pun $ equals <+> ppr arg)
{-
************************************************************************
* *
* Building patterns
* *
************************************************************************
-}
mkPrefixConPat :: DataCon -> [OutPat id] -> [Type] -> OutPat id
-- Make a vanilla Prefix constructor pattern
mkPrefixConPat dc pats tys
= noLoc $ ConPatOut { pat_con = noLoc (RealDataCon dc), pat_tvs = [], pat_dicts = [],
pat_binds = emptyTcEvBinds, pat_args = PrefixCon pats,
pat_arg_tys = tys, pat_wrap = idHsWrapper }
mkNilPat :: Type -> OutPat id
mkNilPat ty = mkPrefixConPat nilDataCon [] [ty]
mkCharLitPat :: String -> Char -> OutPat id
mkCharLitPat src c = mkPrefixConPat charDataCon
[noLoc $ LitPat (HsCharPrim src c)] []
{-
************************************************************************
* *
* Predicates for checking things about pattern-lists in EquationInfo *
* *
************************************************************************
\subsection[Pat-list-predicates]{Look for interesting things in patterns}
Unlike in the Wadler chapter, where patterns are either ``variables''
or ``constructors,'' here we distinguish between:
\begin{description}
\item[unfailable:]
Patterns that cannot fail to match: variables, wildcards, and lazy
patterns.
These are the irrefutable patterns; the two other categories
are refutable patterns.
\item[constructor:]
A non-literal constructor pattern (see next category).
\item[literal patterns:]
At least the numeric ones may be overloaded.
\end{description}
A pattern is in {\em exactly one} of the above three categories; `as'
patterns are treated specially, of course.
The 1.3 report defines what ``irrefutable'' and ``failure-free'' patterns are.
-}
isStrictLPat :: LPat id -> Bool
isStrictLPat (L _ (ParPat p)) = isStrictLPat p
isStrictLPat (L _ (BangPat {})) = True
isStrictLPat (L _ (TuplePat _ Unboxed _)) = True
isStrictLPat _ = False
isStrictHsBind :: HsBind id -> Bool
-- A pattern binding with an outermost bang or unboxed tuple must be matched strictly
-- Defined in this module because HsPat is above HsBinds in the import graph
isStrictHsBind (PatBind { pat_lhs = p }) = isStrictLPat p
isStrictHsBind _ = False
looksLazyPatBind :: HsBind id -> Bool
-- Returns True of anything *except*
-- a StrictHsBind (as above) or
-- a VarPat
-- In particular, returns True of a pattern binding with a compound pattern, like (I# x)
looksLazyPatBind (PatBind { pat_lhs = p }) = looksLazyLPat p
looksLazyPatBind _ = False
looksLazyLPat :: LPat id -> Bool
looksLazyLPat (L _ (ParPat p)) = looksLazyLPat p
looksLazyLPat (L _ (AsPat _ p)) = looksLazyLPat p
looksLazyLPat (L _ (BangPat {})) = False
looksLazyLPat (L _ (TuplePat _ Unboxed _)) = False
looksLazyLPat (L _ (VarPat {})) = False
looksLazyLPat (L _ (WildPat {})) = False
looksLazyLPat _ = True
isIrrefutableHsPat :: OutputableBndr id => LPat id -> Bool
-- (isIrrefutableHsPat p) is true if matching against p cannot fail,
-- in the sense of falling through to the next pattern.
-- (NB: this is not quite the same as the (silly) defn
-- in 3.17.2 of the Haskell 98 report.)
--
-- isIrrefutableHsPat returns False if it's in doubt; specifically
-- on a ConPatIn it doesn't know the size of the constructor family
-- But if it returns True, the pattern is definitely irrefutable
isIrrefutableHsPat pat
= go pat
where
go (L _ pat) = go1 pat
go1 (WildPat {}) = True
go1 (VarPat {}) = True
go1 (LazyPat {}) = True
go1 (BangPat pat) = go pat
go1 (CoPat _ pat _) = go1 pat
go1 (ParPat pat) = go pat
go1 (AsPat _ pat) = go pat
go1 (ViewPat _ pat _) = go pat
go1 (SigPatIn pat _) = go pat
go1 (SigPatOut pat _) = go pat
go1 (TuplePat pats _ _) = all go pats
go1 (ListPat {}) = False
go1 (PArrPat {}) = False -- ?
go1 (ConPatIn {}) = False -- Conservative
go1 (ConPatOut{ pat_con = L _ (RealDataCon con), pat_args = details })
= isJust (tyConSingleDataCon_maybe (dataConTyCon con))
-- NB: tyConSingleDataCon_maybe, *not* isProductTyCon, because
-- the latter is false of existentials. See Trac #4439
&& all go (hsConPatArgs details)
go1 (ConPatOut{ pat_con = L _ (PatSynCon _pat) })
= False -- Conservative
go1 (LitPat {}) = False
go1 (NPat {}) = False
go1 (NPlusKPat {}) = False
-- Both should be gotten rid of by renamer before
-- isIrrefutablePat is called
go1 (SplicePat {}) = urk pat
go1 (QuasiQuotePat {}) = urk pat
urk pat = pprPanic "isIrrefutableHsPat:" (ppr pat)
hsPatNeedsParens :: Pat a -> Bool
hsPatNeedsParens (NPlusKPat {}) = True
hsPatNeedsParens (SplicePat {}) = False
hsPatNeedsParens (QuasiQuotePat {}) = True
hsPatNeedsParens (ConPatIn _ ds) = conPatNeedsParens ds
hsPatNeedsParens p@(ConPatOut {}) = conPatNeedsParens (pat_args p)
hsPatNeedsParens (SigPatIn {}) = True
hsPatNeedsParens (SigPatOut {}) = True
hsPatNeedsParens (ViewPat {}) = True
hsPatNeedsParens (CoPat {}) = True
hsPatNeedsParens (WildPat {}) = False
hsPatNeedsParens (VarPat {}) = False
hsPatNeedsParens (LazyPat {}) = False
hsPatNeedsParens (BangPat {}) = False
hsPatNeedsParens (ParPat {}) = False
hsPatNeedsParens (AsPat {}) = False
hsPatNeedsParens (TuplePat {}) = False
hsPatNeedsParens (ListPat {}) = False
hsPatNeedsParens (PArrPat {}) = False
hsPatNeedsParens (LitPat {}) = False
hsPatNeedsParens (NPat {}) = False
conPatNeedsParens :: HsConDetails a b -> Bool
conPatNeedsParens (PrefixCon args) = not (null args)
conPatNeedsParens (InfixCon {}) = True
conPatNeedsParens (RecCon {}) = True
| bitemyapp/ghc | compiler/hsSyn/HsPat.hs | bsd-3-clause | 20,048 | 0 | 18 | 6,375 | 3,893 | 2,106 | 1,787 | 253 | 21 |
{-# language OverloadedLists #-}
{-# language OverloadedStrings #-}
{-# language TypeFamilies #-}
module Planetary.Core.Syntax.Test (unitTests) where
import Network.IPLD
import EasyTest
import Planetary.Core
import Planetary.Support.Ids
unitTy :: ValTy Cid
unitTy = DataTy (UidTy unitId) []
unitTests :: Test ()
unitTests = scope "syntax" $ tests
[ scope "extendAbility 1" $
let uidMap = [(unitId, [TyArgVal unitTy])]
actual :: Ability Cid
actual = extendAbility emptyAbility (Adjustment uidMap)
expected = Ability OpenAbility uidMap
in expect $ expected == actual
, scope "extendAbility 2" $
let uidMap = [(unitId, [TyArgVal unitTy])]
actual :: Ability Cid
actual = extendAbility closedAbility (Adjustment uidMap)
expected = Ability ClosedAbility uidMap
in expect $ expected == actual
, scope "TODO: unify" $ tests []
]
| joelburget/interplanetary-computation | src/Planetary/Core/Syntax/Test.hs | bsd-3-clause | 897 | 0 | 16 | 192 | 257 | 136 | 121 | 25 | 1 |
-- fundamental problem noted by slava:
-- too complicated. too hard to optimize. too "programmy" instead of
-- "relational".
module RethinkDB where
-- A Symbol is essentially an identifier, a way to refer to other
-- entities (tables, attributes, etc.) in the AST.
type Symbol = String
-- These are Aliases to symbols and are meant to clarify which
-- entities various parts of the AST refer to.
type TableName = Symbol -- Reference to a table
type AttrName = Symbol -- Reference to a JSON attribute
type DBName = Symbol -- Reference to a database
type Var = Symbol -- Reference to a variable/argument
-- A way to refer to tables in the AST. A user can be explicit
-- (i.e. specify database name where table belongs) or implicit
-- (i.e. just specify the table name, in which case the table will be
-- selected from the ones in the scope, and an error will be thrown if
-- the name is ambigious).
data TableRef = TableRef (Maybe DBName) TableName
deriving Show
data Term
= Var Var
| Let [(Var, Term)] Term
-- NB. can have terms under the builtin. think carefully about how to handle
-- this in code that deals with ASTs.
| Call Builtin [Term]
| If Term Term Term
-- `try e1 (v. e2)`
-- runs and returns e1;
-- if it fails, binds v to the error message and runs & returns e2.
| Try Term (Var, Term)
| Error String -- raises an error with a given message
-- Literals
| Number JSONNumber
| String String
| Bool Bool
| Null
| Array [Term]
| Map [(AttrName, Term)]
-- no "And", "Or" needed; can be desugared to "If".
| ViewAsStream View -- turns a view into a stream
-- `GetByKey tref attr expr`
-- is porcelain for:
-- try (nth 0 (filter tref (\x. x.attr == expr))) null
| GetByKey TableRef AttrName Term
data Builtin
= Not -- logical negation
-- Map/record operations
| GetAttr AttrName
| HasAttr AttrName
| PickAttrs [AttrName] -- technically porcelain
-- merges two maps, preferring the key-value pairs from its second argument
-- in case of conflict. used eg. to do field update.
| MapMerge
-- Array operations
| ArrayCons | ArrayConcat | ArraySlice | ArrayNth | ArrayLength
-- Arithmetic & comparison operations
| Add | Subtract | Multiply | Divide | Modulo
| Compare Comparison
-- don't need unary negation "-x", desugars to "0 - x".
-- Stream operations
| Filter Predicate
| Map Mapping -- mapping function has type (json -> json)
| ConcatMap Mapping -- mapping function has type (json -> stream)
| OrderBy OrderDirection Mapping
| Distinct Mapping
| Limit Int
| Length -- counts # of elements in a stream
| Union -- merges streams in undefined order
| Nth -- nth element from stream
| StreamToArray
| ArrayToStream
| Reduce Reduction
| GroupedMapReduce Mapping Mapping Reduction -- TODO: document
-- Arbitrary javascript function.
-- js funcs need to be type-annotated
-- TODO: figure out precise semantics
| Javascript String
-- Tim wants this, ask him what it should do
| JavascriptReturningStream String
-- "Porcelain"
| MapReduce Mapping Reduction
data Comparison = EQ | NE | LT | LE | GT | GE
-- TODO: possibly reductions, mappings, predicates should be *either* lambdas,
-- as they are now, or Javascript functions. (We could just desugar the latter
-- to the former, though...)
data Reduction
= Reduction {
reductionBase :: Term,
reductionArg1 :: Var,
reductionArg2 :: Var,
reductionBody :: Term
}
data Mapping
= Mapping {
mappingArg :: Var,
mappingBody :: Term
}
data Predicate
= Predicate {
predicateArg :: Var,
predicateBody :: Term
}
data View
= Table TableRef
| FilterView View Predicate
-- TODO: pick closed/open-ness convention
-- ie. does (range tref "a" 0 3) include or exclude rows where a = 0, 3?
| RangeView View AttrName (Maybe Term, Maybe Term)
data OrderDirection = Ascending | Descending
-- "magically" uses the query's type to determine whether to stream results?
data ReadQuery = ReadQuery Term
-- TODO: how are we doing deletes?
-- tim proposes returning "null" from a mapping to indicate "delete this row"
-- for an update. GetByKey can also return "null" to indicate "no row found".
data WriteQuery
= Update View Mapping
| Delete View
| Mutate View Mapping
-- TODO: how does insert work if we insert a row with a primary key that
-- already exists in the table?
| Insert TableRef [Term]
| InsertStream TableRef Stream
| ForEach Stream Var [WriteQuery]
-- `PointUpdate tref attr key updater`
-- updates THE row in tref with attr equal to key, using updater
-- NB. very restricted: attrname MUST be primary key
| PointUpdate TableRef AttrName Term Mapping
| PointDelete TableRef AttrName Term
| PointMutate TableRef AttrName Term Mapping
| jfriedly/rethinkdb | docs_internal/qlspec3.hs | agpl-3.0 | 5,183 | 0 | 8 | 1,415 | 609 | 392 | 217 | 79 | 0 |
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module X86.RegInfo (
mkVirtualReg,
regDotColor
)
where
#include "nativeGen/NCG.h"
#include "HsVersions.h"
import Size
import Reg
import Outputable
import Platform
import Unique
import UniqFM
import X86.Regs
mkVirtualReg :: Unique -> Size -> VirtualReg
mkVirtualReg u size
= case size of
FF32 -> VirtualRegSSE u
FF64 -> VirtualRegSSE u
FF80 -> VirtualRegD u
_other -> VirtualRegI u
regDotColor :: Platform -> RealReg -> SDoc
regDotColor platform reg
= let Just str = lookupUFM (regColors platform) reg
in text str
regColors :: Platform -> UniqFM [Char]
regColors platform = listToUFM (normalRegColors platform ++ fpRegColors)
normalRegColors :: Platform -> [(Reg,String)]
normalRegColors platform
| target32Bit platform = [ (eax, "#00ff00")
, (ebx, "#0000ff")
, (ecx, "#00ffff")
, (edx, "#0080ff") ]
| otherwise = [ (rax, "#00ff00"), (eax, "#00ff00")
, (rbx, "#0000ff"), (ebx, "#0000ff")
, (rcx, "#00ffff"), (ecx, "#00ffff")
, (rdx, "#0080ff"), (edx, "#00ffff")
, (r8, "#00ff80")
, (r9, "#008080")
, (r10, "#0040ff")
, (r11, "#00ff40")
, (r12, "#008040")
, (r13, "#004080")
, (r14, "#004040")
, (r15, "#002080") ]
fpRegColors :: [(Reg,String)]
fpRegColors =
[ (fake0, "#ff00ff")
, (fake1, "#ff00aa")
, (fake2, "#aa00ff")
, (fake3, "#aa00aa")
, (fake4, "#ff0055")
, (fake5, "#5500ff") ]
++ zip (map regSingle [24..39]) (repeat "red")
| frantisekfarka/ghc-dsi | compiler/nativeGen/X86/RegInfo.hs | bsd-3-clause | 2,135 | 10 | 11 | 700 | 532 | 314 | 218 | 52 | 4 |
{-# LANGUAGE TypeFamilies, MonoLocalBinds #-}
module T12526 where
import Data.Kind (Type)
type family P (s :: Type -> Type) :: Type -> Type -> Type
type instance P Signal = Causal
type family S (p :: Type -> Type -> Type) :: Type -> Type
type instance S Causal = Signal
class (P (S p) ~ p) => CP p
instance CP Causal
data Signal a = Signal
data Causal a b = Causal
shapeModOsci :: CP p => p Float Float
shapeModOsci = undefined
f :: Causal Float Float -> Bool
f = undefined
-- This fails
ping :: Bool
ping = let osci = shapeModOsci in f osci
-- This works
-- ping :: Bool
-- ping = f shapeModOsci
{-
osci :: p Float Float
[W] CP p, [D] P (S p) ~ p
-->
[W] CP p, [D] P fuv1 ~ fuv2, S p ~ fuv1, fuv2 ~ p
-->
p := fuv2
[W] CP fuv2, [D] P fuv1 ~ fuv2, S fuv2 ~ fuv1
-}
-- P (S p) ~ p
-- p Float Float ~ Causal Float Float
{-
P (S p) ~ p
p Float Float ~ Causal Float Float
--->
S p ~ fuv1 (FunEq)
P fuv1 ~ fuv2 (FunEq)
fuv2 ~ p
p F F ~ Causal F F
--->
p := fuv2
fuv2 ~ Causal
S fuv2 ~ fuv1 (FunEq)
P fuv1 ~ fuv2 (FunEq)
---> unflatten
fuv1 := S fuv2
fuv2 := Causal
-}
| sdiehl/ghc | testsuite/tests/indexed-types/should_compile/T12526.hs | bsd-3-clause | 1,133 | 0 | 10 | 315 | 213 | 122 | 91 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE Trustworthy #-} -- can't use Safe due to IsList instance
{-# LANGUAGE TypeFamilies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.List.NonEmpty
-- Copyright : (C) 2011-2015 Edward Kmett,
-- (C) 2010 Tony Morris, Oliver Taylor, Eelis van der Weegen
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- A 'NonEmpty' list is one which always has at least one element, but
-- is otherwise identical to the traditional list type in complexity
-- and in terms of API. You will almost certainly want to import this
-- module @qualified@.
--
-- @since 4.9.0.0
----------------------------------------------------------------------------
module Data.List.NonEmpty (
-- * The type of non-empty streams
NonEmpty(..)
-- * Non-empty stream transformations
, map -- :: (a -> b) -> NonEmpty a -> NonEmpty b
, intersperse -- :: a -> NonEmpty a -> NonEmpty a
, scanl -- :: Foldable f => (b -> a -> b) -> b -> f a -> NonEmpty b
, scanr -- :: Foldable f => (a -> b -> b) -> b -> f a -> NonEmpty b
, scanl1 -- :: (a -> a -> a) -> NonEmpty a -> NonEmpty a
, scanr1 -- :: (a -> a -> a) -> NonEmpty a -> NonEmpty a
, transpose -- :: NonEmpty (NonEmpty a) -> NonEmpty (NonEmpty a)
, sortBy -- :: (a -> a -> Ordering) -> NonEmpty a -> NonEmpty a
, sortWith -- :: Ord o => (a -> o) -> NonEmpty a -> NonEmpty a
-- * Basic functions
, length -- :: NonEmpty a -> Int
, head -- :: NonEmpty a -> a
, tail -- :: NonEmpty a -> [a]
, last -- :: NonEmpty a -> a
, init -- :: NonEmpty a -> [a]
, (<|), cons -- :: a -> NonEmpty a -> NonEmpty a
, uncons -- :: NonEmpty a -> (a, Maybe (NonEmpty a))
, unfoldr -- :: (a -> (b, Maybe a)) -> a -> NonEmpty b
, sort -- :: NonEmpty a -> NonEmpty a
, reverse -- :: NonEmpty a -> NonEmpty a
, inits -- :: Foldable f => f a -> NonEmpty a
, tails -- :: Foldable f => f a -> NonEmpty a
-- * Building streams
, iterate -- :: (a -> a) -> a -> NonEmpty a
, repeat -- :: a -> NonEmpty a
, cycle -- :: NonEmpty a -> NonEmpty a
, unfold -- :: (a -> (b, Maybe a) -> a -> NonEmpty b
, insert -- :: (Foldable f, Ord a) => a -> f a -> NonEmpty a
, some1 -- :: Alternative f => f a -> f (NonEmpty a)
-- * Extracting sublists
, take -- :: Int -> NonEmpty a -> [a]
, drop -- :: Int -> NonEmpty a -> [a]
, splitAt -- :: Int -> NonEmpty a -> ([a], [a])
, takeWhile -- :: Int -> NonEmpty a -> [a]
, dropWhile -- :: Int -> NonEmpty a -> [a]
, span -- :: Int -> NonEmpty a -> ([a],[a])
, break -- :: Int -> NonEmpty a -> ([a],[a])
, filter -- :: (a -> Bool) -> NonEmpty a -> [a]
, partition -- :: (a -> Bool) -> NonEmpty a -> ([a],[a])
, group -- :: Foldable f => Eq a => f a -> [NonEmpty a]
, groupBy -- :: Foldable f => (a -> a -> Bool) -> f a -> [NonEmpty a]
, groupWith -- :: (Foldable f, Eq b) => (a -> b) -> f a -> [NonEmpty a]
, groupAllWith -- :: (Foldable f, Ord b) => (a -> b) -> f a -> [NonEmpty a]
, group1 -- :: Eq a => NonEmpty a -> NonEmpty (NonEmpty a)
, groupBy1 -- :: (a -> a -> Bool) -> NonEmpty a -> NonEmpty (NonEmpty a)
, groupWith1 -- :: (Foldable f, Eq b) => (a -> b) -> f a -> NonEmpty (NonEmpty a)
, groupAllWith1 -- :: (Foldable f, Ord b) => (a -> b) -> f a -> NonEmpty (NonEmpty a)
-- * Sublist predicates
, isPrefixOf -- :: Foldable f => f a -> NonEmpty a -> Bool
-- * \"Set\" operations
, nub -- :: Eq a => NonEmpty a -> NonEmpty a
, nubBy -- :: (a -> a -> Bool) -> NonEmpty a -> NonEmpty a
-- * Indexing streams
, (!!) -- :: NonEmpty a -> Int -> a
-- * Zipping and unzipping streams
, zip -- :: NonEmpty a -> NonEmpty b -> NonEmpty (a,b)
, zipWith -- :: (a -> b -> c) -> NonEmpty a -> NonEmpty b -> NonEmpty c
, unzip -- :: NonEmpty (a, b) -> (NonEmpty a, NonEmpty b)
-- * Converting to and from a list
, fromList -- :: [a] -> NonEmpty a
, toList -- :: NonEmpty a -> [a]
, nonEmpty -- :: [a] -> Maybe (NonEmpty a)
, xor -- :: NonEmpty a -> Bool
) where
import Prelude hiding (break, cycle, drop, dropWhile,
filter, foldl, foldr, head, init, iterate,
last, length, map, repeat, reverse,
scanl, scanl1, scanr, scanr1, span,
splitAt, tail, take, takeWhile,
unzip, zip, zipWith, (!!))
import qualified Prelude
import Control.Applicative (Alternative, many)
import Control.Monad (ap)
import Control.Monad.Fix
import Control.Monad.Zip (MonadZip(..))
import Data.Data (Data)
import Data.Foldable hiding (length, toList)
import qualified Data.Foldable as Foldable
import Data.Function (on)
import qualified Data.List as List
import Data.Ord (comparing)
import qualified GHC.Exts as Exts (IsList(..))
import GHC.Generics (Generic, Generic1)
infixr 5 :|, <|
-- | Non-empty (and non-strict) list type.
--
-- @since 4.9.0.0
data NonEmpty a = a :| [a]
deriving ( Eq, Ord, Show, Read, Data, Generic, Generic1 )
instance Exts.IsList (NonEmpty a) where
type Item (NonEmpty a) = a
fromList = fromList
toList = toList
instance MonadFix NonEmpty where
mfix f = case fix (f . head) of
~(x :| _) -> x :| mfix (tail . f)
instance MonadZip NonEmpty where
mzip = zip
mzipWith = zipWith
munzip = unzip
-- | Number of elements in 'NonEmpty' list.
length :: NonEmpty a -> Int
length (_ :| xs) = 1 + Prelude.length xs
-- | Compute n-ary logic exclusive OR operation on 'NonEmpty' list.
xor :: NonEmpty Bool -> Bool
xor (x :| xs) = foldr xor' x xs
where xor' True y = not y
xor' False y = y
-- | 'unfold' produces a new stream by repeatedly applying the unfolding
-- function to the seed value to produce an element of type @b@ and a new
-- seed value. When the unfolding function returns 'Nothing' instead of
-- a new seed value, the stream ends.
unfold :: (a -> (b, Maybe a)) -> a -> NonEmpty b
unfold f a = case f a of
(b, Nothing) -> b :| []
(b, Just c) -> b <| unfold f c
-- | 'nonEmpty' efficiently turns a normal list into a 'NonEmpty' stream,
-- producing 'Nothing' if the input is empty.
nonEmpty :: [a] -> Maybe (NonEmpty a)
nonEmpty [] = Nothing
nonEmpty (a:as) = Just (a :| as)
-- | 'uncons' produces the first element of the stream, and a stream of the
-- remaining elements, if any.
uncons :: NonEmpty a -> (a, Maybe (NonEmpty a))
uncons ~(a :| as) = (a, nonEmpty as)
-- | The 'unfoldr' function is analogous to "Data.List"'s
-- 'Data.List.unfoldr' operation.
unfoldr :: (a -> (b, Maybe a)) -> a -> NonEmpty b
unfoldr f a = case f a of
(b, mc) -> b :| maybe [] go mc
where
go c = case f c of
(d, me) -> d : maybe [] go me
instance Functor NonEmpty where
fmap f ~(a :| as) = f a :| fmap f as
b <$ ~(_ :| as) = b :| (b <$ as)
instance Applicative NonEmpty where
pure a = a :| []
(<*>) = ap
instance Monad NonEmpty where
~(a :| as) >>= f = b :| (bs ++ bs')
where b :| bs = f a
bs' = as >>= toList . f
instance Traversable NonEmpty where
traverse f ~(a :| as) = (:|) <$> f a <*> traverse f as
instance Foldable NonEmpty where
foldr f z ~(a :| as) = f a (foldr f z as)
foldl f z ~(a :| as) = foldl f (f z a) as
foldl1 f ~(a :| as) = foldl f a as
foldMap f ~(a :| as) = f a `mappend` foldMap f as
fold ~(m :| ms) = m `mappend` fold ms
-- | Extract the first element of the stream.
head :: NonEmpty a -> a
head ~(a :| _) = a
-- | Extract the possibly-empty tail of the stream.
tail :: NonEmpty a -> [a]
tail ~(_ :| as) = as
-- | Extract the last element of the stream.
last :: NonEmpty a -> a
last ~(a :| as) = List.last (a : as)
-- | Extract everything except the last element of the stream.
init :: NonEmpty a -> [a]
init ~(a :| as) = List.init (a : as)
-- | Prepend an element to the stream.
(<|) :: a -> NonEmpty a -> NonEmpty a
a <| ~(b :| bs) = a :| b : bs
-- | Synonym for '<|'.
cons :: a -> NonEmpty a -> NonEmpty a
cons = (<|)
-- | Sort a stream.
sort :: Ord a => NonEmpty a -> NonEmpty a
sort = lift List.sort
-- | Converts a normal list to a 'NonEmpty' stream.
--
-- Raises an error if given an empty list.
fromList :: [a] -> NonEmpty a
fromList (a:as) = a :| as
fromList [] = errorWithoutStackTrace "NonEmpty.fromList: empty list"
-- | Convert a stream to a normal list efficiently.
toList :: NonEmpty a -> [a]
toList ~(a :| as) = a : as
-- | Lift list operations to work on a 'NonEmpty' stream.
--
-- /Beware/: If the provided function returns an empty list,
-- this will raise an error.
lift :: Foldable f => ([a] -> [b]) -> f a -> NonEmpty b
lift f = fromList . f . Foldable.toList
-- | Map a function over a 'NonEmpty' stream.
map :: (a -> b) -> NonEmpty a -> NonEmpty b
map f ~(a :| as) = f a :| fmap f as
-- | The 'inits' function takes a stream @xs@ and returns all the
-- finite prefixes of @xs@.
inits :: Foldable f => f a -> NonEmpty [a]
inits = fromList . List.inits . Foldable.toList
-- | The 'tails' function takes a stream @xs@ and returns all the
-- suffixes of @xs@.
tails :: Foldable f => f a -> NonEmpty [a]
tails = fromList . List.tails . Foldable.toList
-- | @'insert' x xs@ inserts @x@ into the last position in @xs@ where it
-- is still less than or equal to the next element. In particular, if the
-- list is sorted beforehand, the result will also be sorted.
insert :: (Foldable f, Ord a) => a -> f a -> NonEmpty a
insert a = fromList . List.insert a . Foldable.toList
-- | @'some1' x@ sequences @x@ one or more times.
some1 :: Alternative f => f a -> f (NonEmpty a)
some1 x = (:|) <$> x <*> many x
-- | 'scanl' is similar to 'foldl', but returns a stream of successive
-- reduced values from the left:
--
-- > scanl f z [x1, x2, ...] == z :| [z `f` x1, (z `f` x1) `f` x2, ...]
--
-- Note that
--
-- > last (scanl f z xs) == foldl f z xs.
scanl :: Foldable f => (b -> a -> b) -> b -> f a -> NonEmpty b
scanl f z = fromList . List.scanl f z . Foldable.toList
-- | 'scanr' is the right-to-left dual of 'scanl'.
-- Note that
--
-- > head (scanr f z xs) == foldr f z xs.
scanr :: Foldable f => (a -> b -> b) -> b -> f a -> NonEmpty b
scanr f z = fromList . List.scanr f z . Foldable.toList
-- | 'scanl1' is a variant of 'scanl' that has no starting value argument:
--
-- > scanl1 f [x1, x2, ...] == x1 :| [x1 `f` x2, x1 `f` (x2 `f` x3), ...]
scanl1 :: (a -> a -> a) -> NonEmpty a -> NonEmpty a
scanl1 f ~(a :| as) = fromList (List.scanl f a as)
-- | 'scanr1' is a variant of 'scanr' that has no starting value argument.
scanr1 :: (a -> a -> a) -> NonEmpty a -> NonEmpty a
scanr1 f ~(a :| as) = fromList (List.scanr1 f (a:as))
-- | 'intersperse x xs' alternates elements of the list with copies of @x@.
--
-- > intersperse 0 (1 :| [2,3]) == 1 :| [0,2,0,3]
intersperse :: a -> NonEmpty a -> NonEmpty a
intersperse a ~(b :| bs) = b :| case bs of
[] -> []
_ -> a : List.intersperse a bs
-- | @'iterate' f x@ produces the infinite sequence
-- of repeated applications of @f@ to @x@.
--
-- > iterate f x = x :| [f x, f (f x), ..]
iterate :: (a -> a) -> a -> NonEmpty a
iterate f a = a :| List.iterate f (f a)
-- | @'cycle' xs@ returns the infinite repetition of @xs@:
--
-- > cycle (1 :| [2,3]) = 1 :| [2,3,1,2,3,...]
cycle :: NonEmpty a -> NonEmpty a
cycle = fromList . List.cycle . toList
-- | 'reverse' a finite NonEmpty stream.
reverse :: NonEmpty a -> NonEmpty a
reverse = lift List.reverse
-- | @'repeat' x@ returns a constant stream, where all elements are
-- equal to @x@.
repeat :: a -> NonEmpty a
repeat a = a :| List.repeat a
-- | @'take' n xs@ returns the first @n@ elements of @xs@.
take :: Int -> NonEmpty a -> [a]
take n = List.take n . toList
-- | @'drop' n xs@ drops the first @n@ elements off the front of
-- the sequence @xs@.
drop :: Int -> NonEmpty a -> [a]
drop n = List.drop n . toList
-- | @'splitAt' n xs@ returns a pair consisting of the prefix of @xs@
-- of length @n@ and the remaining stream immediately following this prefix.
--
-- > 'splitAt' n xs == ('take' n xs, 'drop' n xs)
-- > xs == ys ++ zs where (ys, zs) = 'splitAt' n xs
splitAt :: Int -> NonEmpty a -> ([a],[a])
splitAt n = List.splitAt n . toList
-- | @'takeWhile' p xs@ returns the longest prefix of the stream
-- @xs@ for which the predicate @p@ holds.
takeWhile :: (a -> Bool) -> NonEmpty a -> [a]
takeWhile p = List.takeWhile p . toList
-- | @'dropWhile' p xs@ returns the suffix remaining after
-- @'takeWhile' p xs@.
dropWhile :: (a -> Bool) -> NonEmpty a -> [a]
dropWhile p = List.dropWhile p . toList
-- | @'span' p xs@ returns the longest prefix of @xs@ that satisfies
-- @p@, together with the remainder of the stream.
--
-- > 'span' p xs == ('takeWhile' p xs, 'dropWhile' p xs)
-- > xs == ys ++ zs where (ys, zs) = 'span' p xs
span :: (a -> Bool) -> NonEmpty a -> ([a], [a])
span p = List.span p . toList
-- | The @'break' p@ function is equivalent to @'span' (not . p)@.
break :: (a -> Bool) -> NonEmpty a -> ([a], [a])
break p = span (not . p)
-- | @'filter' p xs@ removes any elements from @xs@ that do not satisfy @p@.
filter :: (a -> Bool) -> NonEmpty a -> [a]
filter p = List.filter p . toList
-- | The 'partition' function takes a predicate @p@ and a stream
-- @xs@, and returns a pair of lists. The first list corresponds to the
-- elements of @xs@ for which @p@ holds; the second corresponds to the
-- elements of @xs@ for which @p@ does not hold.
--
-- > 'partition' p xs = ('filter' p xs, 'filter' (not . p) xs)
partition :: (a -> Bool) -> NonEmpty a -> ([a], [a])
partition p = List.partition p . toList
-- | The 'group' function takes a stream and returns a list of
-- streams such that flattening the resulting list is equal to the
-- argument. Moreover, each stream in the resulting list
-- contains only equal elements. For example, in list notation:
--
-- > 'group' $ 'cycle' "Mississippi"
-- > = "M" : "i" : "ss" : "i" : "ss" : "i" : "pp" : "i" : "M" : "i" : ...
group :: (Foldable f, Eq a) => f a -> [NonEmpty a]
group = groupBy (==)
-- | 'groupBy' operates like 'group', but uses the provided equality
-- predicate instead of `==`.
groupBy :: Foldable f => (a -> a -> Bool) -> f a -> [NonEmpty a]
groupBy eq0 = go eq0 . Foldable.toList
where
go _ [] = []
go eq (x : xs) = (x :| ys) : groupBy eq zs
where (ys, zs) = List.span (eq x) xs
-- | 'groupWith' operates like 'group', but uses the provided projection when
-- comparing for equality
groupWith :: (Foldable f, Eq b) => (a -> b) -> f a -> [NonEmpty a]
groupWith f = groupBy ((==) `on` f)
-- | 'groupAllWith' operates like 'groupWith', but sorts the list
-- first so that each equivalence class has, at most, one list in the
-- output
groupAllWith :: (Ord b) => (a -> b) -> [a] -> [NonEmpty a]
groupAllWith f = groupWith f . List.sortBy (compare `on` f)
-- | 'group1' operates like 'group', but uses the knowledge that its
-- input is non-empty to produce guaranteed non-empty output.
group1 :: Eq a => NonEmpty a -> NonEmpty (NonEmpty a)
group1 = groupBy1 (==)
-- | 'groupBy1' is to 'group1' as 'groupBy' is to 'group'.
groupBy1 :: (a -> a -> Bool) -> NonEmpty a -> NonEmpty (NonEmpty a)
groupBy1 eq (x :| xs) = (x :| ys) :| groupBy eq zs
where (ys, zs) = List.span (eq x) xs
-- | 'groupWith1' is to 'group1' as 'groupWith' is to 'group'
groupWith1 :: (Eq b) => (a -> b) -> NonEmpty a -> NonEmpty (NonEmpty a)
groupWith1 f = groupBy1 ((==) `on` f)
-- | 'groupAllWith1' is to 'groupWith1' as 'groupAllWith' is to 'groupWith'
groupAllWith1 :: (Ord b) => (a -> b) -> NonEmpty a -> NonEmpty (NonEmpty a)
groupAllWith1 f = groupWith1 f . sortWith f
-- | The 'isPrefix' function returns @True@ if the first argument is
-- a prefix of the second.
isPrefixOf :: Eq a => [a] -> NonEmpty a -> Bool
isPrefixOf [] _ = True
isPrefixOf (y:ys) (x :| xs) = (y == x) && List.isPrefixOf ys xs
-- | @xs !! n@ returns the element of the stream @xs@ at index
-- @n@. Note that the head of the stream has index 0.
--
-- /Beware/: a negative or out-of-bounds index will cause an error.
(!!) :: NonEmpty a -> Int -> a
(!!) ~(x :| xs) n
| n == 0 = x
| n > 0 = xs List.!! (n - 1)
| otherwise = errorWithoutStackTrace "NonEmpty.!! negative argument"
-- | The 'zip' function takes two streams and returns a stream of
-- corresponding pairs.
zip :: NonEmpty a -> NonEmpty b -> NonEmpty (a,b)
zip ~(x :| xs) ~(y :| ys) = (x, y) :| List.zip xs ys
-- | The 'zipWith' function generalizes 'zip'. Rather than tupling
-- the elements, the elements are combined using the function
-- passed as the first argument.
zipWith :: (a -> b -> c) -> NonEmpty a -> NonEmpty b -> NonEmpty c
zipWith f ~(x :| xs) ~(y :| ys) = f x y :| List.zipWith f xs ys
-- | The 'unzip' function is the inverse of the 'zip' function.
unzip :: Functor f => f (a,b) -> (f a, f b)
unzip xs = (fst <$> xs, snd <$> xs)
-- | The 'nub' function removes duplicate elements from a list. In
-- particular, it keeps only the first occurence of each element.
-- (The name 'nub' means \'essence\'.)
-- It is a special case of 'nubBy', which allows the programmer to
-- supply their own inequality test.
nub :: Eq a => NonEmpty a -> NonEmpty a
nub = nubBy (==)
-- | The 'nubBy' function behaves just like 'nub', except it uses a
-- user-supplied equality predicate instead of the overloaded '=='
-- function.
nubBy :: (a -> a -> Bool) -> NonEmpty a -> NonEmpty a
nubBy eq (a :| as) = a :| List.nubBy eq (List.filter (\b -> not (eq a b)) as)
-- | 'transpose' for 'NonEmpty', behaves the same as 'Data.List.transpose'
-- The rows/columns need not be the same length, in which case
-- > transpose . transpose /= id
transpose :: NonEmpty (NonEmpty a) -> NonEmpty (NonEmpty a)
transpose = fmap fromList
. fromList . List.transpose . Foldable.toList
. fmap Foldable.toList
-- | 'sortBy' for 'NonEmpty', behaves the same as 'Data.List.sortBy'
sortBy :: (a -> a -> Ordering) -> NonEmpty a -> NonEmpty a
sortBy f = lift (List.sortBy f)
-- | 'sortWith' for 'NonEmpty', behaves the same as:
--
-- > sortBy . comparing
sortWith :: Ord o => (a -> o) -> NonEmpty a -> NonEmpty a
sortWith = sortBy . comparing
| tolysz/prepare-ghcjs | spec-lts8/base/Data/List/NonEmpty.hs | bsd-3-clause | 18,806 | 0 | 13 | 4,784 | 4,320 | 2,370 | 1,950 | 247 | 2 |
{-
- Id Example Program
- Ensnaffled by SLPJ from MIT via
- RPaul <[email protected]> 93/08/26.
- Original author: Steve Heller
-}
module Main (main) where
import Data.Array
import System.Environment
-- Generation of radicals
data Radical = H | C Radical Radical Radical
three_partitions :: Int -> [(Int,Int,Int)]
three_partitions m =
[ (i,j,k) | i <- [0..(div m 3)], j <- [i..(div (m-i) 2)], k <- [m - (i+j)]]
remainders [] = []
remainders (r:rs) = (r:rs) : (remainders rs)
radical_generator :: Int -> Array Int [Radical]
radical_generator n =
radicals
where
radicals =
array (0,n) ((0,[H]) : [(j,rads_of_size_n radicals j) | j <- [1..n]])
rads_of_size_n :: Array Int [Radical] -> Int -> [Radical]
rads_of_size_n radicals n =
[ (C ri rj rk)
| (i,j,k) <- (three_partitions (n-1)),
(ri:ris) <- (remainders (radicals!i)),
(rj:rjs) <- (remainders (if (i==j) then (ri:ris) else radicals!j)),
rk <- (if (j==k) then (rj:rjs) else radicals!k)]
-- Generation of paraffins.
data Paraffin = BCP Radical Radical | CCP Radical Radical Radical Radical
bcp_generator :: Array Int [Radical] -> Int -> [Paraffin]
bcp_generator radicals n =
if (odd n) then []
else
[ (BCP r1 r2) | (r1:r1s) <- (remainders (radicals!(div n 2))),
r2 <- (r1:r1s) ]
four_partitions :: Int -> [(Int,Int,Int,Int)]
four_partitions m =
[ (i,j,k,l)
| i <- [0..(div m 4)],
j <- [i..(div (m-i) 3)],
k <- [(max j (ceiling ((fromIntegral m)/(fromInteger 2)) - i - j))..(div (m-i-j) 2)],
l <- [(m - (i+j+k))]]
ccp_generator :: Array Int [Radical] -> Int -> [Paraffin]
ccp_generator radicals n =
[ (CCP ri rj rk rl)
| (i,j,k,l) <- (four_partitions (n-1)),
(ri:ris) <- (remainders (radicals!i)),
(rj:rjs) <- (remainders (if (i==j) then (ri:ris) else radicals!j)),
(rk:rks) <- (remainders (if (j==k) then (rj:rjs) else radicals!k)),
rl <- (if (k==l) then (rk:rks) else radicals!l)]
bcp_until :: Int -> [Int]
bcp_until n =
[length(bcp_generator radicals j) | j <- [1..n]]
where
radicals = radical_generator (div n 2)
ccp_until :: Int -> [Int]
ccp_until n =
[length(ccp_generator radicals j) | j <- [1..n]]
where
radicals = radical_generator (div n 2)
paraffins_until :: Int -> [Int]
paraffins_until n =
[length (bcp_generator radicals j) + length (ccp_generator radicals j)
| j <- [1..n]]
where
radicals = radical_generator (div n 2)
main = do
[arg] <- getArgs
let num = read arg
print [length (rads!i) | rads <- [(radical_generator num)], i <- [0..num]]
print (bcp_until num)
print (ccp_until num)
print (paraffins_until num)
| seereason/ghcjs | test/nofib/imaginary/paraffins/Main.hs | mit | 2,657 | 0 | 18 | 566 | 1,373 | 744 | 629 | 62 | 4 |
{-# LANGUAGE ImplicitParams #-}
-- !!! Implicit parameter test
module Main where
main = do { let ?x = 13 in putStrLn $ show $ foo
; let ?x = 14 in putStrLn $ show $ baz () }
foo :: (?x :: Int) => Int
foo = ?x
-- Check that defaulting works too
baz () = ?x
| ezyang/ghc | testsuite/tests/typecheck/should_run/tcrun012.hs | bsd-3-clause | 272 | 0 | 11 | 75 | 93 | 51 | 42 | 7 | 1 |
module Q where
q = "DO NOT SEE ME"
| mydaum/cabal | cabal-testsuite/PackageTests/InternalLibraries/q/Q.hs | bsd-3-clause | 35 | 0 | 4 | 9 | 9 | 6 | 3 | 2 | 1 |
----------------------------------------------------------------
--
-- | aartifact
-- http://www.aartifact.org/
--
-- Contributors to this module:
-- Andrei Lapets
-- David House
--
-- @src\/ContextHypergraph.hs@
--
-- Data structure for a hypergraph with a defined closure
-- function.
--
----------------------------------------------------------------
--
module ContextHypergraph where
import MapUsingRBTree
----------------------------------------------------------------
-- | Marks (representing processed/unprocessed state).
type Mark = Bool
unmarked :: Mark
unmarked = False
marked :: Mark
marked = True
oneMarked :: [(Mark,a)] -> Bool
oneMarked = or.(map fst)
----------------------------------------------------------------
-- | Hypergraph interface.
-- Edges are labelled by a value of type "a", and nodes in the
-- graph are labelled by a value of type "b".
-- All edges having the same label are all stored under a
-- single entry designated by the label.
type Edge a b = (a, [b])
type EdgeList a b = ([a], [[b]])
type Law a b = (EdgeList a b, [Edge a b])
type Hypergraph a b = ([(Mark,Law a b)], Map a (Mark, [(Mark,[b])]))
empHG :: (Eq a, Ord a, Eq b) => Hypergraph a b
empHG = ([], emp)
hgSize :: Ord a => Hypergraph a b -> Integer
hgSize (ls,g) = toInteger $ length $ concat (map snd $ ran g)
addLawHG :: (Eq a, Ord a) => Hypergraph a b -> ([Edge a b], [Edge a b]) -> Hypergraph a b
addLawHG (ls,g) (r,o) = ((marked,((map fst r, map snd r),o)):ls,g')
where g' = foldr (\x g -> def x (unmarked,[]) (\_ y ->y) g) g (map fst r)
edgeHG :: (Eq a, Ord a, Eq b) => Edge a b -> Hypergraph a b -> Bool
edgeHG (e,ns) (_,g) = ns `elem` [ns | (_,ns) <- maybe [] snd $ app e g]
reportHG :: (Eq a, Ord a, Show a, Eq b) => ([b] -> Bool) -> Hypergraph a b -> [(a,[b])]
reportHG f (_,g) =
let l = list g
l' = concat [map (\y -> (x,y)) ys | (x,ys) <- [ (x, map snd ys) | (x,(_,ys)) <- l]]
in [(x,y) | (x,y) <- l', f y]
relabelHG :: (Eq a, Ord a, Eq b) => Hypergraph a b -> b -> b -> Hypergraph a b
relabelHG (ls,g) j j' = (ls, mapRan (\_ (mrk,es) -> (mrk, map relbl es)) g)
where relbl (m,is) = (m, [if i==j then j' else i | i<-is])
isEMarked :: (Eq a, Ord a, Eq b) => Hypergraph a b -> a -> Bool
isEMarked (_,g) c = maybe False fst $ app c g
hasMarkedHG :: (Eq a, Ord a, Eq b) => Hypergraph a b -> Bool
hasMarkedHG (_,g) = oneMarked $ ran g
resetMarksHG :: (Eq a, Ord a, Eq b) => Hypergraph a b -> Hypergraph a b
resetMarksHG (ls,g) =
([(unmarked,l) | (_,l)<-ls],
mapRan (\_ (_,es) -> (unmarked, map (\(_,is) -> (unmarked, is)) es)) g)
getHG :: (Eq a, Ord a, Eq b) => Hypergraph a b -> a -> [Edge a b]
getHG (_,g) e = [(e, snd ens) | ens <- maybe [] snd (app e g), fst ens]
----------------------------------------------------------------
-- | We optimize for the common cases by handling them
-- explicitly. This optimization improves performance by a
-- significant constant factor (between 2 and 20).
getsHG :: (Eq a, Ord a, Eq b) => Hypergraph a b -> [a] -> [[[b]]]
getsHG _ [] = []
getsHG (hg@(_,g)) es = if or $ map (isEMarked hg) es then (case es of
[e1] -> [[y1] | (m,y1)<-p e1, m]
[e1,e2] -> [[y1,y2] | (m1,y1)<-p e1, y2<-if m1 then map snd (p e2) else [y2|(m2,y2)<-p e2,m2]]
[e1,e2,e3] ->
[y1:ys |
(m1,y1)<-p e1,
ys<-if m1 then
[[y2,y3] | (_,y2)<-p e2,(_,y3)<-p e3]
else
[[y2,y3] | (m2,y2)<-p e2,
y3<-if m2 then map snd (p e3) else [y3 | (m3,y3)<-p e3,m3]]]
[e1,e2,e3,e4] ->
[y1:ys |
(m1,y1)<-p e1,
ys <- if m1 then
[[y2,y3,y4] | (_,y2)<-p e2,(_,y3)<-p e3,(_,y4)<-p e4]
else [y2:ys |
(m2,y2)<-p e2,
ys<-if m2 then
[[y3,y4] | (_,y3)<-p e3,(_,y4)<-p e4]
else
[[y3,y4] | (m3,y3)<-p e3,
y4 <- if m2 then map snd $ p e4 else [y4 | (m4,y4)<-p e4, m4]]]]
[e1,e2,e3,e4,e5] ->
[y1:ys |
(m1,y1)<-p e1,
ys <- if m1 then
[[y2,y3,y4,y5] | (_,y2)<-p e2,(_,y3)<-p e3,(_,y4)<-p e4,(_,y5)<-p e5]
else [y2:ys |
(m2,y2)<-p e2,
ys<-if m2 then
[[y3,y4,y5] | (_,y3)<-p e3,(_,y4)<-p e4,(_,y5)<-p e5]
else
[y3:ys |
(m3,y3)<-p e3,
ys <- if m2 then [[y4,y5] | (_,y4)<-p e4,(_,y5)<-p e5]
else [[y4,y5] | (m4,y4)<-p e4,
y5<-if m4 then map snd (p e5) else [y5 | (m5,y5)<-p e5,m5]]]]]
es -> get es
) else [] where
p c = snd $ appInDom c g
get0 [c] = [[y] | (_,y)<-p c]
get0 (c:cs) = [y:ys | (_,y)<-p c, ys<-get0 cs]
get [c] = [[y] | (m,y)<-p c,m]
get (c:cs) = [y:ys | (m,y)<-p c, ys<-if m then get0 cs else get cs]
putHG :: (Eq a, Ord a, Eq b) => Hypergraph a b -> Edge a b -> Hypergraph a b
putHG (ls,g) (e,ns) = (ls', def e (marked,[(marked,ns)]) add g)
where add _ (mrk,l) = if (ns `elem` map snd l) then (mrk,l) else (marked,(marked,ns):l)
ls' = [(mrk || (e `elem` fst r), (r,o)) | (mrk,(r,o)) <- ls]
putsHG :: (Eq a, Ord a, Eq b) => Hypergraph a b -> [Edge a b] -> Hypergraph a b
putsHG hg es = foldl putHG hg es
--eof | aartifact/aartifact-verifier | src/ContextHypergraph.hs | mit | 5,263 | 14 | 24 | 1,440 | 2,911 | 1,621 | 1,290 | 90 | 20 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.Matrix.Hermitian.Banded
(BandedHMatrix(..)
,vecChangeRep
,TriangularRepr
,upperRep
,lowerRep
,fullEigensystem)
where
import Data.Matrix
import Data.Matrix.Utils
import Data.Matrix.Dense (DenseMatrix,fromColMajorVector)
import LAPACK
import LAPACK.Utils
import Data.Complex
import Data.Complex.Utils
import Operator
import qualified Data.List as LST
import Data.Maybe (fromMaybe)
import Control.Monad (forM_)
import System.IO.Unsafe (unsafePerformIO)
import Foreign.C.Types (CDouble)
import qualified Data.Vector.Generic as GV
import qualified Data.Vector.Generic.Mutable as GVM
import qualified Data.Vector.Storable as SV
import qualified Data.Vector.Storable.Mutable as SVM
import qualified Data.Vector.Unboxed as UV
-- | Type for representations of triangular matrices, either upper or lower.
newtype TriangularRepr = TriRepr Bool deriving (Eq,Show)
-- | Upper triangular representation indicator
upperRep :: TriangularRepr
upperRep = TriRepr True
-- | Lower triangular representation indicator
lowerRep :: TriangularRepr
lowerRep = TriRepr False
-- | Basic FORTRAN-compatible type (i.e. col-major storage) type for banded
-- Hermitian matrices.
data BandedHMatrix v a
= BHMatrix
{ bhmOrder :: !Int -- ^ order of the (square) matrix
, bhmSDs :: !Int -- ^ super/sub-diagonals of the triangular matrix
, bhmRep :: !TriangularRepr -- ^ array storage convention
, bhmData :: v a -- ^ (column-major) data
}
deriving (Eq, Show)
instance (Conjugable a, GV.Vector v a) => Matrix BandedHMatrix v a where
rows = bhmOrder
cols = rows
row m r = GV.generate (bhmOrder m) (ij m r)
col m c = GV.generate (bhmOrder m) (flip (ij m) c)
ij (BHMatrix _ s rep d) r c | abs (r - c) > s = 0
| inRep = d `GV.unsafeIndex` vi r c
| otherwise = cconj $ d `GV.unsafeIndex` vi c r
where inRep = if rep == upperRep
then c >= r
else c <= r
vi = if rep == upperRep
then \i j -> s * (j + 1) + i
else \i j -> s * j + i
transpose m@(BHMatrix _ _ _ d) = m { bhmData = GV.map cconj d }
fromList bs = BHMatrix { bhmOrder = length . head $ bs
, bhmSDs = length bs - 1
, bhmRep = upperRep
, bhmData = GV.fromList . LST.concat . LST.transpose $ bs }
generate mo sd f =
BHMatrix
{ bhmOrder = mo
, bhmSDs = sd
, bhmRep = upperRep
, bhmData = GV.generate ((sd + 1) * mo) (uncurry f . rc) }
where rc i = let (c,r) = quotRem i (sd + 1)
in (c - sd + r, c)
generateM mo sd f = do
d <- GV.generateM ((sd + 1) * mo) (uncurry f . rc)
return BHMatrix { bhmOrder = mo
, bhmSDs = sd
, bhmRep = upperRep
, bhmData = d }
where rc i = let (c,r) = quotRem i (sd + 1)
in (c - sd + r, c)
-- | Change the in-memory vector representation for a 'BandedHMatrix'.
vecChangeRep :: (Conjugable a, GV.Vector v a) => BandedHMatrix v a -> BandedHMatrix v a
vecChangeRep m@(BHMatrix mo sd rep d)
= m { bhmRep = newRep, bhmData = d' }
where newRep = if rep == upperRep
then lowerRep
else upperRep
swapsU i = let i' = i + sd + (sd - 1) * rem i (sd + 1)
l = GV.length d
in if i' >= l
then (i,i)
else (i,i')
swapsL i = let (j,j') = swapsU i
l' = GV.length d - 1
in (l' - j, l' - j')
swaps = if rep == upperRep
then swapsU
else swapsL
d' = GV.modify (\v ->
forM_ [0 .. ((sd + 1) * mo) - 1 - sd] $ \n ->
uncurry (GVM.swap v) $ swaps n)
(GV.map cconj d)
instance (Conjugable a, GV.Vector v a, GV.Vector v Int) =>
MatrixRing BandedHMatrix v a BandedHMatrix v a where
type MSumMatrix BandedHMatrix v a BandedHMatrix v a = BandedHMatrix
type MSumElement BandedHMatrix v a BandedHMatrix v a = a
type MSumStore BandedHMatrix v a BandedHMatrix v a = v
mp m@(BHMatrix mo sd rep d) m'@(BHMatrix _ sd' rep' d')
| (rep == rep) && (sd == sd') = m { bhmData = GV.zipWith (+) d d'}
| rep == rep' = let si = if rep == upperRep
then (sd - sd') * mo
else (sd' + 1) * mo
d'' = switchMajorAxis d' mo (sd' + 1)
(ds, de) = GV.splitAt si (switchMajorAxis d mo (sd + 1))
df = if rep == upperRep
then ds GV.++ GV.zipWith (+) de d''
else GV.zipWith (+) ds d' GV.++ de
in if sd >= sd'
then m { bhmData = switchMajorAxis df (sd + 1) mo }
else mp m' m
| otherwise = mp m (vecChangeRep m')
-- | Wrapper to "raw" Haskell function 'hszhbevx' for the eigensystem of a
-- banded Hermitian matrix.
fullEigensystem :: (FComplexable a CDouble
, GV.Vector v a
, GV.Vector v (FComplex CDouble)
, GV.Vector v' Double
, GV.Vector v' CDouble
, GV.Vector v'' (FComplex CDouble)
, GV.Vector v'' (Complex Double)) =>
BandedHMatrix v a -- ^ the input matrix
-> Bool -- ^ calculate eigenvectors?
-> RANGE -- ^ eigenvalue calculation range type
-> Double -- ^ used as lower bound for eigenvalue interval
-> Double -- ^ used as upper bound for eigenvalue interval
-> Int -- ^ used as lower eigenvalue number
-> Int -- ^ used as upper eigenvalue number
-> (v' Double, Maybe (v'' (Complex Double)))
fullEigensystem bhm vecs rng vl vu il iu =
unsafePerformIO $ do
mdat <- SV.unsafeThaw . GV.convert . GV.map toFComplex . bhmData $ bhm
SVM.unsafeWith mdat $ \pab ->
hszhbevx
jz rng ul
mo sd pab (sd + 1)
vl vu (il + 1) (iu + 1) globalFAbstol >>= freezeHermitianES
where jz = if vecs then jzEigvalsEigvecs else jzEigvals
ul = if bhmRep bhm == upperRep
then uploUpper
else uploLower
mo = bhmOrder bhm
sd = bhmSDs bhm
instance (FComplexable a CDouble
,GV.Vector v a
,GV.Vector v (FComplex CDouble)) =>
EndoOperator (BandedHMatrix v a) where
type Eigenvalue (BandedHMatrix v a) = Double
type EigenvalueStorage (BandedHMatrix v a) = UV.Vector
type EigenvectorStorage (BandedHMatrix v a) = DenseMatrix UV.Vector (Complex Double)
eigvals m (Just (lo,hi)) = GV.unsafeTake (hi - lo + 1) . fst $
(fullEigensystem m False rngEigNums 0 0 lo hi
:: (UV.Vector Double, Maybe (UV.Vector (Complex Double))))
eigvals m Nothing = fst
(fullEigensystem m False rngAll 0 0 0 0
:: (UV.Vector Double, Maybe (UV.Vector (Complex Double))))
eigvecs m (Just (lo,hi)) = fromColMajorVector rs cs .
maybe GV.empty (GV.unsafeTake (rs * cs)) $
snd (fullEigensystem m True rngEigNums 0 0 lo hi
:: (UV.Vector Double, Maybe (UV.Vector (Complex Double))))
where rs = bhmOrder m
cs = hi - lo + 1
eigvecs m Nothing = fromColMajorVector (bhmOrder m) (bhmOrder m) .
fromMaybe GV.empty . snd $
(fullEigensystem m True rngAll 0 0 0 0
:: (UV.Vector Double, Maybe (UV.Vector (Complex Double))))
adjoint = id
instance (Functor s) => Functor (BandedHMatrix s) where
fmap f m@(BHMatrix _ _ _ d) = m { bhmData = fmap f d }
| lensky/hs-matrix | lib/Data/Matrix/Hermitian/Banded.hs | mit | 8,204 | 0 | 18 | 2,936 | 2,585 | 1,391 | 1,194 | 178 | 4 |
{-# LANGUAGE ScopedTypeVariables #-}
module Real
( real
) where
import Test.QuickCheck.Checkers (EqProp, inverseL)
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.QuickCheck (testProperty, Arbitrary)
real :: forall a. (Arbitrary a, EqProp a, Show a, Fractional a, Real a) =>
(a -> Rational) -> TestTree
real maxError = testGroup "Test Real instance" ts
where ts = [ testProperty "fromRational toRational left inverse"
(inverseL fromRational (toRational :: a -> Rational))
, testProperty "toRational fromRational left inverse (within error)"
(\r -> let x :: a
x = fromRational r
r' = toRational x
in r - r' <= maxError x)
]
| expipiplus1/exact-real | test/Real.hs | mit | 845 | 0 | 15 | 308 | 205 | 113 | 92 | 16 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Criterion.Main
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Examples
import Database.HLINQ.Deconstructor
import Database.HLINQ.Utilities
import System.IO.Unsafe
import qualified Query as HDB
normalisedCompose fun = do
exp <- runQ $ (normalise $ unTypeQ [||$$composeT "Edna" "Drew"||])
fun exp
normalisedDifferences fun = do
exp <- runQ $ (normalise $ unTypeQ [||$$differencesT||])
fun exp
printNormalised query = do
exp <- runQ $ (normalise $ unTypeQ query)
print exp
main = defaultMain [
bgroup "Range LINQ" [ bench "Range 30 40" $ whnfIO $ fromTestUntyped [|$(range) 30 40|],
bench "Range 0 100" $ whnfIO $ fromTestUntyped [|$(range) 0 100|],
bench "Range 0 10" $ whnfIO $ fromTestUntyped [|$(range) 0 10|]
],
bgroup "Range TLINQ" [ bench "Range 30 40" $ whnfIO $ fromTest [||$$(rangeT) 30 40||],
bench "Range 0 100" $ whnfIO $ fromTest [||$$(rangeT) 0 100||],
bench "Range 0 10" $ whnfIO $ fromTest [||$$(rangeT) 0 10||]
],
bgroup "RangeHDB" [ bench "RangeHDB 30 40" $ whnfIO $ (HDB.range 30 40),
bench "RangeHDB 0 100" $ whnfIO $ (HDB.range 0 100),
bench "RangeHDB 0 10" $ whnfIO $ (HDB.range 0 10)
],
bgroup "Range" [ bench "Range 30 40" $ whnfIO $ fromTest [||$$(rangeT) 30 40||],
bench "Range 0 100" $ whnfIO $ fromTest [||$$(rangeT) 0 100||],
bench "Range 0 10" $ whnfIO $ fromTest [||$$(rangeT) 0 10||],
bench "HLINQ 30 40" $ nfIO $ ((toList $ fromTest [||$$(rangeT) 30 40||]):: IO [(String)]),
bench "HLINQ 0 100" $ nfIO $ ((toList $ fromTest [||$$(rangeT) 0 100||]):: IO [(String)]),
bench "HLINQ 0 10" $ nfIO $ ((toList $ fromTest [||$$(rangeT) 0 10||]):: IO [(String)]),
bench "RangeHDB 30 40" $ whnfIO $ (HDB.range 30 40),
bench "RangeHDB 0 100" $ whnfIO $ (HDB.range 0 100),
bench "RangeHDB 0 10" $ whnfIO $ (HDB.range 0 10)
],
bgroup "Differences" [ bench "HLINQ" $ whnfIO $ fromTest [||$$(differencesT)||],
bench "HLINQ to Tuples" $ nfIO $ ((toTuple toTup2 $ fromTest [||$$differencesT||]):: IO [(String, Int)]),
bench "HaskellDB" $ nfIO $ HDB.differences
],
bgroup "GetAge" [ bench "HLINQ Drew" $ whnfIO $ fromTest [||$$(getAgeT) "Drew"||],
bench "HLINQ Drew to list" $ nfIO $ ((toList $ fromTest [||$$(getAgeT) "Drew"||]):: IO [(Int)]),
bench "HLINQ unknown name " $ whnfIO $ fromTest [||$$(getAgeT) "Ethan"||],
bench "HLINQ unknown name " $ nfIO $ ((toList $ fromTest [||$$(getAgeT) "Ethan"||]):: IO [(Int)]),
bench "HaskellDB Drew" $ whnfIO $ (HDB.getAge "Drew"),
bench "HaskellDB unknown name " $ whnfIO $ (HDB.getAge "Ethan")
],
bgroup "Compose" [ bench "HLINQ Edna Drew" $ whnfIO $ fromTest [||$$(composeT) "Edna" "Drew"||],
bench "HLINQ Edna Drew to tuples" $ nfIO $ ((toList $ fromTest [||$$(composeT) "Edna" "Drew"||]):: IO [(String)]),
bench "HLINQ Edna Drew to tuples fmap" $ nfIO $ ((toList' $ fromTest [||$$(composeT) "Edna" "Drew"||]):: IO [(String)]),
bench "HLINQ Edna Drew to tuples fmap'" $ nfIO $ ((toList'' $ fromTest [||$$(composeT) "Edna" "Drew"||]):: IO [(String)]),
bench "HLINQ Bob Tim" $ whnfIO $ fromTest [||$$(composeT) "Bob" "Tim"||],
bench "HLINQ Bob Tim to tuples" $ nfIO $ ((toList $ fromTest [||$$(composeT) "Bob" "Tim"||]):: IO [(String)]),
bench "HLINQ Bob Tim to tuples fmap" $ nfIO $ ((toList' $ fromTest [||$$(composeT) "Bob" "Tim"||]):: IO [(String)]),
bench "HLINQ Bob Tim to tuples fmap'" $ nfIO $ ((toList'' $ fromTest [||$$(composeT) "Bob" "Tim"||]):: IO [(String)]),
bench "HaskellDB Edna Drew" $ nfIO $ HDB.compose "Edna" "Drew",
bench "HaskellDB Bob Tim" $ whnfIO $ HDB.compose "Bob" "Tim"
],
bgroup "Satisfies" [ bench "LINQ" $ whnfIO $ fromTestUntyped [|$satisfies $pre|],
bench "TLINQ" $ whnfIO $ fromTest [||$$satisfiesT $$preT||]
],
bgroup "Satisfies Dynamic" [ bench "TLINQ" $ whnfIO $ fromTest [||$$satisfiesT $$(p t0)||]
],
bgroup "Normalisation" [ bench "compose normalise" $ whnfIO $ normalisedCompose print,
bench "differences normalise" $ whnfIO $ normalisedDifferences print,
bench "compose normalisation" $ whnfIO $ printNormalised [||$$(composeT) "Edna" "Drew"||],
bench "differences normalisation" $ whnfIO $ printNormalised [||$$differencesT||]
]
]
| juventietis/HLINQ | benchmark/Main.hs | mit | 4,599 | 35 | 11 | 1,113 | 341 | 197 | 144 | -1 | -1 |
-- This module manages the movement through Hyperspace
-- and the transistion from a ship being in Hyperspace
-- to being landed on a destination planet
module IX.Universe.HyperSpace
(manageTravel
,evalHyp
,evalSetSpeed
,evalHypComm
,evalMove
,commTransitions
,changeShip
,getName
,getPlanet
,setRepairField
) where
import DataStructures.Atomic
import DataStructures.Composite
import IX.Universe.Utils (intToPInt)
import Data.List (foldl')
import Safe (fromJustNote)
import Data.List.Utils (delFromAL)
import Control.Applicative ((<$>))
import Control.Monad (join)
import Data.Maybe (catMaybes)
import qualified Data.Map.Strict as M
import Debug.Trace
manageTravel :: PlanetMap ->
AgentMap ->
Maybe (M.Map AID ToPlanetName) ->
LocationMap ->
((),LocationMap)
manageTravel (pMap)
_
(Just transit_map)
(LocationMap l_map) =
let up_lmap = LocationMap $ M.foldrWithKey upLMap l_map transit_map
in ((),up_lmap)
where
upLMap :: AID -> ToPlanetName -> M.Map AID Location -> M.Map AID Location
upLMap aid tpn l_map =
let ((Location (Left (pn, _)))) =
fromJustNote aidNotFound (M.lookup aid l_map)
newLoc = Location $ Right $ (hSpace, Launched)
hSpace = HyperSpace {
destination = tpn
,origin = fpn
,totalDistance = distanceTo
,distanceTraversed = 0 :: PInt
}
distanceTo = getDistanceTo tpn $ FromPlanet (fpn,fp)
fp = getPlanet pn pMap
fpn = FromPlanetName pn
aidNotFound = "manageTravel failed to find " ++
(show aid) ++
"in LocationMap\n"
in M.insert aid newLoc l_map
manageTravel _ -- No need for a PlanetMap
(AgentMap aMap)
Nothing -- a tick must have happened if this matches
(LocationMap lMap) =
(,) ()
(LocationMap $
M.mapWithKey (updateLocationMap aMap) $
M.foldlWithKey removeDead lMap aMap)
where
removeDead lmap' aid (Dead _) = M.delete aid lmap'
removeDead lmap' _ _ = lmap'
updateLocationMap _ _ (Location (Left ( pName,Landed))) =
(Location (Left (pName,PlanetSide)))
updateLocationMap _ _ loc@(Location (Left (_,PlanetSide))) =
loc
updateLocationMap a_map aid (Location (Right (hSpace,tState))) =
let (ToPlanetName dest) = destination hSpace
tDist = totalDistance hSpace
traversed = distanceTraversed hSpace
in case tState of
Launched -> updatedHSpace
InHyperSpace -> if (tDist == traversed)
then landed
else updatedHSpace
where
landed = (Location $ Left $ (dest,Landed))
where
ship_speed =
case (findSpeed) of
Just speed' -> speed'
Nothing -> Turtle -- how did this happen?
updatedHSpace =
Location $ Right (incDist hSpace ship_speed,InHyperSpace)
findSpeed = join $
warp_speed <$>
ship_stats <$>
ship <$>
M.lookup aid a_map
evalHyp :: M.Map AID Location -> M.Map AID Agent -> HCommand -> (AID,Result)
evalHyp l_map a_map (HCommand (VAC (PlayerCommand comm aid))) =
let hyp_data = fromJustNote locFail (M.lookup aid l_map)
ship' = ship (fromJustNote agtFail (M.lookup aid a_map))
res = case comm of
Move pName -> CError (CantMoveTo $ pName)
Zap aid' -> CError (CantZap aid')
Look -> Looked (Right hyp_data) ship'
Repair -> ChangeShip Repairing
SetSpeed _ -> CError SpeedIsSet
Buy _ _ -> CError NoBusinessInHyperSpace
Sell _ _ -> CError NoBusinessInHyperSpace
Market -> CError NoBusinessInHyperSpace
in (aid,res)
where
locFail = "evalHyp did not find " ++
show aid ++
"in LocationMap"
agtFail = "evalHyp did not find " ++
show aid ++
"in AgentMap"
evalSetSpeed :: WarpSpeed -> Ship -> Result
evalSetSpeed warp_speed (Ship (ShipParts {engine = engine'}) _)
| w_speed <= e_power = ChangeShip $ WSpeed warp_speed
| otherwise = CError EngineTooWimpy
where
w_speed = fromEnum warp_speed
e_power = fromEnum engine'
evalHypComm :: LocationMap -> AgentMap -> HSpaceComm -> M.Map AID Result
evalHypComm (LocationMap l_map) (AgentMap a_map) (HSpaceComm hCommands) =
M.fromList (map (evalHyp l_map a_map) hCommands)
evalMove aid agt ((ToPlanet (tpn@(ToPlanetName pn),_)), fpn) (PlanetMap pMap') =
case (warp_speed $ ship_stats $ ship agt) of
Just _ -> attemptDest
Nothing -> Left $ (aid,resultErr)
where
resultErr = CError $ SetSpeedFirst
attemptDest =
let mDest =
join $
lookup pn <$>
neighbors <$>
M.lookup fpn pMap'
in case mDest of
Just _ -> Right $ (aid,tpn)
Nothing -> Left $ (aid,resultErr')
where
resultErr' = CError $ CantMoveTo tpn
commTransitions :: LocationMap -> [Maybe DAgentMap]
commTransitions (LocationMap lMap) =
eIsN $ LocationUpdate $ M.mapMaybe commTransitions' lMap
where
commTransitions' :: Location -> Maybe Message
commTransitions' (Location (Left (tpn, Landed))) =
Just (JustLandedOn tpn)
commTransitions' (Location (Right (hSpace,Launched))) =
let fpn = origin hSpace
tpn = destination hSpace
onward = Onward tpn fpn
in Just onward
commTransitions' _ = Nothing
eIsN :: DAgentMap -> [Maybe DAgentMap]
-- eIsN (LocationUpdate []) = Nothing
eIsN lu@(LocationUpdate _) = [Just lu]
eIsN _ = [Nothing]
changeShip :: AgentMap ->
M.Map AID Result ->
[Maybe DAgentMap]
changeShip (AgentMap a_map) resList =
snd `fmap` M.toList (M.mapWithKey changeShip' resList)
where
changeShip' aid (ChangeShip change) =
let agt = fromJustNote aAgentFail (M.lookup aid a_map)
res = case change of
(WSpeed w_speed) ->
setWarpSpeed w_speed agt
Repairing ->
setRepairField True agt
in Just $ DAgentMap $ SubAgentMap $ M.singleton aid res
where
aAgentFail = "changeShip failed to match aid " ++ (show aid)
changeShip' _ _ = Nothing
--removeDead :: [(AID,Location)] -> (AID,Agent) -> [(AID,Location)]
--removeDead lmap' (aid,(Dead _)) = delFromAL lmap' aid
--removeDead lmap' _ = lmap'
----------------------- Getters and Setters --------------------
getName :: Agent -> ClientName
getName (Player {aName = name}) = name
getName (Dead name) = name
getDistanceTo :: ToPlanetName ->
FromPlanet ->
Distance
getDistanceTo (ToPlanetName tpn) (FromPlanet ((FromPlanetName fpn),fp)) =
fromJustNote neighborFail (lookup tpn (neighbors fp))
where
neighborFail = show tpn ++ " should have been a neighbor of " ++ show fpn
getPlanet :: PlanetName -> PlanetMap -> Planet
getPlanet p_name (PlanetMap p_map) =
fromJustNote noPlanet (M.lookup p_name p_map)
where
noPlanet = "getPlanet failed to find " ++
"the following planet in PlanetMap " ++
show p_name
setRepairField :: Bool -> Agent -> Agent
setRepairField bool agt@(Player {ship = Ship shipParts shipStats}) =
agt {ship = set_repair_field}
where
set_repair_field = Ship shipParts shipStats {repairing = bool}
setWarpSpeed :: WarpSpeed -> Agent -> Agent
setWarpSpeed w_speed agt@(Player {ship = Ship shipParts shipStats}) =
agt {ship = set_warp_speed}
where
set_warp_speed = Ship shipParts shipStats {warp_speed = Just w_speed}
setWarpSpeed _ agt = agt
incDist :: HyperSpace -> WarpSpeed -> HyperSpace
incDist hSpace w_speed = hSpace {distanceTraversed = incremented}
where incremented = (distanceTraversed hSpace) + increment
increment = (intToPInt $ fromEnum w_speed) + 1
| mlitchard/IX | src/IX/Universe/HyperSpace.hs | mit | 9,033 | 0 | 17 | 3,292 | 2,306 | 1,201 | 1,105 | 187 | 8 |
-- Imports
import qualified Data.Map.Strict as M
import Data.Map.Strict((!))
import Utils
import System.Environment
--import Debug.Trace
import qualified FeatherweightJava as FJ
-- Data types
type KlassName = String
type MethodName = String
type FieldName = String
type ObjectName = String
data KlassType = SimpleKlass KlassName | ComplexKlass KlassGeneric deriving (Eq, Show)
data KlassGeneric = KlassGeneric KlassName [KlassType] deriving (Eq, Show)
data Generic = Generic KlassName KlassGeneric [(FieldName, KlassType)] [(KlassName, KlassGeneric)] (M.Map MethodName Method) deriving (Eq, Show)
data Method = Method MethodName [(ObjectName, KlassType)] Expr KlassType [(KlassName, KlassGeneric)] deriving (Eq, Show)
data Expr = Arg ObjectName
| FieldCall Expr FieldName
| MethodCall Expr MethodName [Expr] [KlassType]
| NewObject KlassGeneric [Expr]
| TypeCast Expr KlassGeneric deriving (Eq, Show)
type KlassTable = M.Map KlassName Generic
type Program = (KlassTable, Expr)
type Context = M.Map KlassName KlassGeneric
type Environment = M.Map ObjectName KlassType
-- Small utils
mk_SimpleClass :: KlassName -> KlassType
mk_SimpleClass name = SimpleKlass name
mk_ComplexClass :: KlassGeneric -> KlassType
mk_ComplexClass gen = ComplexKlass gen
mk_context :: [(KlassName, KlassGeneric)] -> M.Map KlassName KlassGeneric
mk_context = M.fromList
join_context :: M.Map KlassName KlassGeneric -> M.Map KlassName KlassGeneric -> M.Map KlassName KlassGeneric
join_context c1 c2 = mk_context $ (M.toList c1) ++ (M.toList c2)
get_generic :: KlassTable -> KlassName -> Generic
get_generic kt kName = kt ! kName
mk_type_mapper :: [KlassName] -> [KlassType] -> M.Map KlassName KlassType
mk_type_mapper type_variables kts = M.fromList (zip type_variables kts)
isSimpleKlass :: KlassType -> Bool
isSimpleKlass kType = case kType of
SimpleKlass _ -> True
_ -> False
free_variables :: KlassType -> [KlassName]
free_variables (SimpleKlass kName) = [kName]
free_variables (ComplexKlass (KlassGeneric _ kts)) = concatMap free_variables kts
is_defined :: KlassTable -> MethodName -> KlassName -> Bool
is_defined kt mName "Object" = False
is_defined kt mName kName = mName `elem` (M.keys gMethods)
where (Generic _ _ _ _ gMethods) = get_generic kt kName
get_method :: KlassTable -> KlassName -> MethodName -> Method
get_method kt kName mName = gMethods ! mName
where (Generic _ _ _ _ gMethods) = get_generic kt kName
-- Auxiliary functions, mostly specific functions from official publication about FJ
klass_type_substitute :: M.Map KlassName KlassType -> KlassType -> KlassType
klass_type_substitute mapper kType = case kType of
ComplexKlass (KlassGeneric kName kts) -> ComplexKlass (KlassGeneric kName (map (klass_type_substitute mapper) kts))
SimpleKlass kName -> (case (M.lookup kName mapper) of
Just kType' -> kType'
Nothing -> kType)
klass_generic_substitute :: M.Map KlassName KlassType -> KlassGeneric -> KlassGeneric
klass_generic_substitute mapper kGeneric = kGeneric'
where ComplexKlass kGeneric' = klass_type_substitute mapper (ComplexKlass kGeneric)
expr_substitute :: M.Map KlassName KlassType -> Expr -> Expr
expr_substitute mapper expr = case expr of
Arg x -> expr
FieldCall expr0 fName -> FieldCall (expr_substitute mapper expr0) fName
MethodCall expr0 mName es kts -> MethodCall (expr_substitute mapper expr0) mName (map (expr_substitute mapper) es) (map (klass_type_substitute mapper) kts)
NewObject kGeneric es -> NewObject (klass_generic_substitute mapper kGeneric) (map (expr_substitute mapper) es)
TypeCast es kGeneric -> TypeCast (expr_substitute mapper expr) (klass_generic_substitute mapper kGeneric)
fields :: KlassTable -> KlassGeneric -> [(FieldName, KlassType)]
fields kt (KlassGeneric "Object" _) = []
fields kt (KlassGeneric gName kts) = parent_fields ++ generic_fields
where (Generic _ gParent gFields gParams _) = get_generic kt gName
type_mapper = mk_type_mapper (map fst gParams) kts
parent_fields = fields kt (klass_generic_substitute type_mapper gParent)
kts' = map (klass_type_substitute type_mapper) kts
generic_fields = zip (map fst gFields) kts'
mtype :: KlassTable -> KlassGeneric -> MethodName -> ([(KlassName, KlassGeneric, KlassType)], KlassType)
mtype kt kGeneric mName = case (M.lookup mName gMethods) of
Just m -> (mArgs'', mRetType')
where (Method _ mArgs _ mRetType mCast) = m
mRetType' = klass_type_substitute type_mapper mRetType
mArgs' = uncurry zip3 (unzip mCast) (map snd mArgs)
mArgs'' = map (\(x,y,z) -> (x, klass_generic_substitute type_mapper y, klass_type_substitute type_mapper z)) mArgs'
Nothing -> mtype kt kGeneric' mName
where (KlassGeneric gName kts) = kGeneric
(Generic _ gParent _ gParams gMethods) = get_generic kt gName
type_mapper = mk_type_mapper (map fst gParams) kts
kGeneric' = klass_generic_substitute type_mapper gParent
mbody :: KlassTable -> KlassGeneric -> MethodName -> [KlassType] -> ([KlassName], Expr)
mbody kt kGeneric mName mValues = case (M.lookup mName gMethods) of
Just m -> (mVars, mExpr')
where (Method _ mArgs mExpr mRetType mCast) = m
mVars = map fst mArgs
type_mapper' = mk_type_mapper (map fst mCast) mValues
new_type_mapper = M.union type_mapper type_mapper'
mExpr' = expr_substitute new_type_mapper mExpr
Nothing -> mbody kt kGeneric' mName mValues
where (KlassGeneric gName kts) = kGeneric
(Generic _ gParent _ gParams gMethods) = get_generic kt gName
type_mapper = mk_type_mapper (map fst gParams) kts
kGeneric' = klass_generic_substitute type_mapper gParent
bound :: Context -> KlassType -> KlassGeneric
bound ctx kType = case kType of
SimpleKlass name -> ctx ! name
ComplexKlass kGeneric -> kGeneric
isSubclassOf :: KlassTable -> KlassName -> KlassName -> Bool
isSubclassOf kt k1 k2
| k1 == k2 = True
| k2 == pName = True
| k1 == "Object" = False
| otherwise = isSubclassOf kt pName k2
where (Generic _ (KlassGeneric pName _) _ _ _) = get_generic kt k1
isSubtypeOf :: KlassTable -> Context -> KlassType -> KlassType -> Bool
isSubtypeOf kt ctx k1 k2
| k1 == k2 = True
| k1 == ComplexKlass (KlassGeneric "Object" []) = False
| otherwise = case k1 of
SimpleKlass k1Name -> (kc == k2) || (isSubtypeOf kt ctx kc k2)
where kc = ComplexKlass (ctx ! k1Name)
ComplexKlass (KlassGeneric k1Name kts) -> (kp == k2) || (isSubtypeOf kt ctx kp k2)
where (Generic _ kGenericParent _ gParams _) = get_generic kt k1Name
type_mapper = mk_type_mapper (map fst gParams) kts
kp = ComplexKlass (klass_generic_substitute type_mapper kGenericParent)
isWellFormed :: KlassTable -> Context -> KlassType -> Bool
isWellFormed kt ctx (ComplexKlass (KlassGeneric "Object" _)) = True
isWellFormed kt ctx (SimpleKlass kName) = kName `elem` (M.keys ctx)
isWellFormed kt ctx (ComplexKlass (KlassGeneric gName kts)) = params_ok && subtypes_ok
where (Generic _ _ _ gParams _) = get_generic kt gName
params_ok = and$ map (isWellFormed kt ctx) kts
bounds = map snd gParams
type_mapper = mk_type_mapper (map fst gParams) kts
bounds' = map (ComplexKlass . (klass_generic_substitute type_mapper)) bounds
subtypes_ok = and$ map (uncurry$ isSubtypeOf kt ctx) $ zip kts bounds'
downcast :: KlassTable -> KlassName -> KlassName -> Bool
downcast kt k1 k2
| k1 == "Object" = False
| k2 == pName = set_equal type_vars free_vars
| otherwise = downcast kt pName k2
where (Generic _ gParent _ gParams _) = get_generic kt k1
(KlassGeneric pName kts) = gParent
type_vars = map fst gParams
free_vars = concatMap free_variables kts
-- Determining type
typeof :: KlassTable -> Context -> Environment -> Expr -> KlassType
typeof kt ctx env (Arg x) = env ! x
typeof kt ctx env (FieldCall expr0 fName) = lookupe fName expr0_fields
where expr0_type = typeof kt ctx env expr0
expr0_fields = fields kt (bound ctx expr0_type)
typeof kt ctx env expr@(NewObject kGeneric es) = if and (klass_ok:subtypes)
then ComplexKlass kGeneric else error ("Error when typechecking `"++(show expr)++"`.\n")
where klass_ok = isWellFormed kt ctx (ComplexKlass kGeneric)
field_types = map snd $ fields kt kGeneric
arg_types = map (typeof kt ctx env) es
subtypes = map (uncurry $ isSubtypeOf kt ctx) $ zip arg_types field_types
typeof kt ctx env expr@(MethodCall expr0 mName es kts) = if everything_ok
then ret_type' else error ("Error when typechecking `"++(show expr)++"`.\n")
where expr0_type = typeof kt ctx env expr0
es_types = map (typeof kt ctx env) es
(arg_types, ret_type) = mtype kt (bound ctx expr0_type) mName
type_mapper = mk_type_mapper (map fst3 arg_types) kts
type_vars_ok = and$ map (isWellFormed kt ctx) kts
vars_subtypes_ok = and$ map (uncurry $ isSubtypeOf kt ctx) (zip kts (map (ComplexKlass . (klass_generic_substitute type_mapper) . snd3) arg_types))
val_subtypes_ok = and$ map (uncurry $ isSubtypeOf kt ctx) (zip es_types (map (klass_type_substitute type_mapper . trd3) arg_types))
everything_ok = type_vars_ok && vars_subtypes_ok && val_subtypes_ok
ret_type' = klass_type_substitute type_mapper ret_type
typeof kt ctx env expr@(TypeCast expr0 kGeneric)
| isSubtypeOf kt ctx bounded_expr0_type ret_type = ret_type
| isSubtypeOf kt ctx ret_type bounded_expr0_type && dcast_conds = ret_type
| scast_conds = ret_type
| otherwise = error "Impossible!"
where expr0_type = typeof kt ctx env expr0
bounded_expr0_kgen = bound ctx expr0_type
bounded_expr0_type = (ComplexKlass bounded_expr0_kgen)
ret_type = (ComplexKlass kGeneric)
(KlassGeneric kGenericName _) = kGeneric
(KlassGeneric bounded_name bounded_vars) = bounded_expr0_kgen
ret_type_ok = isWellFormed kt ctx ret_type
dcast_conds = (downcast kt kGenericName bounded_name) && ret_type_ok
scast_conds = ret_type_ok && (not (isSubclassOf kt kGenericName bounded_name)) && (not (isSubclassOf kt bounded_name kGenericName))
-- Erasure
type_erasure :: Context -> KlassType -> KlassName
type_erasure ctx kType = name
where (KlassGeneric name _) = bound ctx kType
fieldsmax :: KlassTable -> KlassName -> M.Map FieldName KlassName
fieldsmax kt' kName' = res
where res = M.fromList (fieldsmax' kt' kName')
fieldsmax' kt "Object" = []
fieldsmax' kt kName = parent_fields ++ klass_fields
where (Generic _ (KlassGeneric pName _) gFields gParams _) = get_generic kt kName
parent_fields = fieldsmax' kt pName
ctx = mk_context gParams
klass_fields = zip (map fst gFields) (map (type_erasure ctx . snd) gFields)
mtypemax :: KlassTable -> MethodName -> KlassName -> ([FJ.KlassName], FJ.KlassName)
mtypemax kt mName kName = if is_defined kt mName pName then mtypemax kt mName pName else (mArgs', mRetType')
where (Generic gName gParent _ gParams _) = get_generic kt kName
(Method _ mArgs _ mRetType mCast) = get_method kt kName mName
(KlassGeneric pName _) = gParent
ctx = mk_context (gParams ++ mCast)
mRetType' = type_erasure ctx mRetType
mArgs' = map (type_erasure ctx) (map snd mArgs)
klass_expr_substitute :: M.Map ObjectName FJ.Expr -> FJ.Expr -> FJ.Expr
klass_expr_substitute mapper expr@(FJ.Arg x) = if x `elem` (M.keys mapper) then mapper ! x else expr
klass_expr_substitute mapper (FJ.FieldCall expr0 fName) = FJ.FieldCall (klass_expr_substitute mapper expr0) fName
klass_expr_substitute mapper (FJ.MethodCall expr0 mName es) = FJ.MethodCall expr0' mName es'
where expr0' = klass_expr_substitute mapper expr0
es' = map (klass_expr_substitute mapper) es
klass_expr_substitute mapper (FJ.NewObject kName es) = FJ.NewObject kName (map (klass_expr_substitute mapper) es)
klass_expr_substitute mapper (FJ.TypeCast expr0 kType) = FJ.TypeCast (klass_expr_substitute mapper expr0) kType
erasure :: KlassTable -> Context -> Environment -> Expr -> FJ.Expr
erasure kt ctx env expr@(Arg x) = FJ.Arg x
erasure kt ctx env expr@(FieldCall expr0 fName)
| field_type == expr_type = expr'
| otherwise = FJ.TypeCast expr' expr_type
where expr_type = type_erasure ctx (typeof kt ctx env expr)
expr0_type = type_erasure ctx (typeof kt ctx env expr0)
field_type = (fieldsmax kt expr0_type) ! fName
expr0' = erasure kt ctx env expr0
expr' = FJ.FieldCall expr0' fName
erasure kt ctx env expr@(MethodCall expr0 mName es kts)
| return_type == expr_type = expr'
| otherwise = FJ.TypeCast expr' expr_type
where expr_type = type_erasure ctx (typeof kt ctx env expr)
expr0_type = type_erasure ctx (typeof kt ctx env expr0)
expr0' = erasure kt ctx env expr0
es' = map (erasure kt ctx env) es
(_,return_type) = mtypemax kt mName expr0_type
expr' = FJ.MethodCall expr0' mName es'
erasure kt ctx env expr@(NewObject kGeneric es) = FJ.NewObject kType' es'
where es' = map (erasure kt ctx env) es
kType' = type_erasure ctx (ComplexKlass kGeneric)
erasure kt ctx env expr@(TypeCast expr0 kGeneric) = FJ.TypeCast expr0' kType'
where expr0' = erasure kt ctx env expr0
kType' = type_erasure ctx (ComplexKlass kGeneric)
compile_method :: KlassTable -> Generic -> Context -> Method -> FJ.Method
compile_method kt gen ctx m@(Method mName mArgs mExpr mRetType mCast) = m'
where (Generic gName _ _ _ _) = gen
(arg_types, mRetType') = mtypemax kt mName gName
new_ctx = join_context ctx (mk_context mCast)
env = M.fromList $ ("this", ComplexKlass (KlassGeneric gName (map mk_SimpleClass (M.keys ctx)))):mArgs
var_map_fun (d,(x,t)) = if d == t then (x, FJ.Arg x) else (x, FJ.TypeCast (FJ.Arg x) t)
var_mapper = M.fromList $ map var_map_fun (zip arg_types (map (\(x,t) -> (x, type_erasure new_ctx t)) mArgs))
mExpr' = klass_expr_substitute var_mapper (erasure kt ctx env mExpr)
mArgs' = zip (map fst mArgs) arg_types
m' = FJ.Method mName mArgs' mExpr' mRetType'
compile_class :: KlassTable -> Generic -> FJ.Klass
compile_class kt gen@(Generic gName (KlassGeneric pName kts) gFields gParams gMethods) = klass
where ctx = mk_context gParams
kFields = map (\(n,k) -> (n, type_erasure ctx k)) gFields
kMethods = M.map (compile_method kt gen ctx) gMethods
klass = FJ.Klass gName pName kFields kMethods
-- Examples
object_generic = KlassGeneric "Object" []
example_kt_pair = M.fromList [a_generic, b_generic, pair_generic]
where a_generic = ("A", Generic "A" object_generic [] [] M.empty)
b_generic = ("B", Generic "B" object_generic [] [] M.empty)
pair_fields = [("fst", SimpleKlass "X"), ("snd", SimpleKlass "Y")]
pair_params = [("X", object_generic), ("Y", object_generic)]
setfst_args = [("newfst", SimpleKlass "Z")]
setfst_expr = NewObject (KlassGeneric "Pair" [SimpleKlass "Z", SimpleKlass "Y"]) [Arg "newfst", FieldCall (Arg "this") "snd"]
setfst_ret_type = ComplexKlass (KlassGeneric "Pair" [SimpleKlass "Z", SimpleKlass "Y"])
setfst_cast = [("Z", object_generic)]
setfst_method = Method "setfst" setfst_args setfst_expr setfst_ret_type setfst_cast
pair_methods = M.fromList [("setfst", setfst_method)]
pair_generic = ("Pair", Generic "Pair" object_generic pair_fields pair_params pair_methods)
example_kt_list = M.fromList [a_generic, b_generic, c_generic, list_generic, empty_generic, node_generic]
where a_generic = ("A", Generic "A" object_generic [] [] M.empty)
b_generic = ("B", Generic "B" object_generic [] [] M.empty)
c_generic = ("C", Generic "C" object_generic [] [] M.empty)
prepend_method = Method "prepend" [("new_val", ComplexKlass object_generic)] (NewObject (KlassGeneric "Node" []) [Arg "new_val", Arg "this"]) (ComplexKlass (KlassGeneric "Node" [])) []
node_generic = ("Node", Generic "Node" (KlassGeneric "List" []) [("value", ComplexKlass (KlassGeneric "Object" [])), ("tail", ComplexKlass (KlassGeneric "List" []))] [] M.empty)
empty_generic = ("EmptyList", Generic "EmptyList" (KlassGeneric "List" []) [] [] M.empty)
list_generic = ("List", Generic "List" object_generic [] [] (M.fromList [("prepend", prepend_method)]))
example1 = (example_kt_pair, expr)
where expr = MethodCall (NewObject (KlassGeneric "Pair" [ComplexKlass (KlassGeneric "A" []), ComplexKlass (KlassGeneric "B" [])]) [NewObject (KlassGeneric "A" []) [],NewObject (KlassGeneric "B" []) []]) "setfst" [NewObject (KlassGeneric "B" []) []] [ComplexKlass (KlassGeneric "B" [])]
example2 = (example_kt_list, expr4)
where expr1 = NewObject (KlassGeneric "EmptyList" []) []
expr2 = MethodCall expr1 "prepend" [NewObject (KlassGeneric "A" []) []] []
expr3 = MethodCall expr2 "prepend" [NewObject (KlassGeneric "B" []) []] []
expr4 = MethodCall expr3 "prepend" [NewObject (KlassGeneric "A" []) []] []
compile :: Program -> FJ.Program
compile prog@(kt, expr) = (kt', expr')
where kt' = M.map (compile_class kt) kt
expr' = erasure kt M.empty M.empty expr
main = do
args <- getArgs
let example = if (length args >= 1) && (args !! 0 == "pair") then example1 else example2
let ast@(kt, expr) = compile example
putStrLn$ "=== AST: ===\n" ++ (concatMap (\(n,k) -> "Class `"++n++"`: "++(show k)++"\n") (M.toList kt)) ++ "===========\n"
FJ.run ast
| swistak35/fgj_interpreter | main_fgj.hs | mit | 19,542 | 0 | 17 | 5,418 | 6,138 | 3,154 | 2,984 | 282 | 5 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
module Control.Applicative.Extra
( dispatchA
, dispatchByA
, dispatchWhenA
) where
import Prelude (error)
import Data.List
import Data.String
import Data.Bool
import Data.Eq
import Data.Function
import Data.Int
-- import Data.Ord
import Data.Functor
import Safe.Extra (fromRightPrefix')
-- import Control.Monad
import Control.Applicative
import Data.Traversable (traverse)
import Data.List.Split (chunksOf)
import Data.List.Split.Extra (splitPlaces)
import Data.List.Extra (collateBy)
-- import Data.Monoid
import Data.Bifunctor (bimap)
import Text.Show (show)
assertLengthEq :: String -> [a] -> [b] -> [b]
assertLengthEq !_note _is os =
if length _is /= length os
then error (_note ++ ": output length " ++ (show . length) os ++ " does not match input length " ++ (show . length) _is)
else os
{-# INLINE assertLengthEq #-}
-- | Dispatch elements according to the splits (odd lists are sent to the first function, even lists to the second)
-- and gather the results maintaining order
--
-- (TODO: create a parallel version of this function)
--
-- >>> dispatchA [0,2,0,3,4,0,0,1,1,0,2,2]
-- (\xs -> print xs >> return (map (+ 0.1) xs))
-- (\xs -> print xs >> return (map (+ 0.9) xs))
-- [ 19,29 , 39,49,59 , 11,21,31,41 , 69 , 51 , 61,71 , 79,89 ]
-- OUT: [11.0,21.0,31.0,41.0,51.0,61.0,71.0]
-- OUT: [19.0,29.0,39.0,49.0,59.0,69.0,79.0,89.0]
-- [19.9,29.9,39.9,49.9,59.9,11.1,21.1,31.1,41.1,69.9,51.1,61.1,71.1,79.9,89.9]
--
-- >>> splitPlaces [0,2,0,3,4,0,0,1,1,0,2,2] [ 19,29 , 39,49,59 , 11,21,31,41 , 69 , 51 , 61,71 , 79,89 ]
-- [ [],[19,29] , [],[39,49,59] , [11,21,31,41],[] , [],[69] , [51],[] , [61,71],[79,89] ]
--
-- >>> foldr zipChunks ([],[]) $ chunksOf 2 $ [[],[19,29],[],[39,49,59],[11,21,31,41],[],[],[69],[51],[],[61,71],[79,89]]
-- ( [[],[],[11,21,31,41],[],[51],[61,71]] , [[19,29],[39,49,59],[],[69],[],[79,89]] )
--
dispatchA :: Applicative m => [Int] -> ([a] -> m [b]) -> ([a] -> m [b]) -> [a] -> m [b]
dispatchA seqs f g xs =
let seqss = fromRightPrefix' "dispatchA" $ splitPlaces seqs xs
(lxs,rxs) = (concat `bimap` concat) (foldr zipChunks ([],[]) $ chunksOf 2 seqss)
lxs' = f lxs
rxs' = g rxs
in gather seqs <$> (assertLengthEq "dispatchA/f" lxs <$> lxs') <*> (assertLengthEq "dispatchA/g" rxs <$> rxs')
where
zipChunks :: [a] -> ([a], [a]) -> ([a], [a])
zipChunks (x:[] ) (ls,rs) = (x:ls, rs)
zipChunks (x:y:[]) (ls,rs) = (x:ls, y:rs)
zipChunks _ _ = error "dispatchA/zipChunks"
gather :: [Int] -> [a] -> [a] -> [a]
gather [] _ _ = []
gather (lseqs:[] ) ls _ = take lseqs ls
gather (lseqs:rseqs:seqs') ls rs =
let (ls',ls'') = splitAt lseqs ls
(rs',rs'') = splitAt rseqs rs
in ls' ++ rs' ++ gather seqs' ls'' rs''
-- | Dispatch elements by a bucketing function and gather the results maintaining order
-- In line with the standard ordering of Bool (False < True), elements that evaluate to
-- False are sent to the first function and those that evaluate to True to the second.
dispatchByA :: Applicative m => (a -> Bool) -> ([a] -> m [b]) -> ([a] -> m [b]) -> [a] -> m [b]
dispatchByA bucket f g xs = dispatchA (length `map` collateBy bucket xs) f g xs
-- | Dispatch elements to a batch function when they meet some condition (return the rest as given)
--
-- >>> dispatchWhenA (> 0) (const Nothing) (\xs -> print xs >> return (map Just xs)) [-1,-4,0,2,3,-4,1,0 :: Int]
-- OUT: [2,3,1]
-- [Nothing,Nothing,Nothing,Just 2,Just 3,Nothing,Just 1,Nothing]
--
dispatchWhenA :: Applicative m => (a -> Bool) -> (a -> b) -> ([a] -> m [b]) -> [a] -> m [b]
dispatchWhenA cond def = dispatchByA cond (traverse $ pure . def)
-- | Dispatch elements by lumping elements into several buckets and gather the results maintaining order
--
-- dispatchGroupsA :: (Enum e, Bounded e, Applicative m) => [(e, Int)] -> [[a] -> m [b]] -> [a] -> m [b]
--
| circuithub/circuithub-prelude | Control/Applicative/Extra.hs | mit | 4,005 | 0 | 14 | 813 | 976 | 547 | 429 | 50 | 5 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Text.Html.PigLet.Th
( makeTemplate
, setContent
, embedContent
, addAttr
, pass
, Selector (..))
where
import Util.BlazeFromHtml hiding (main)
import Util.GenerateHtmlCombinators hiding (main)
import Text.HTML.TagSoup
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as HA
import Text.Blaze.Html.Renderer.Pretty (renderHtml)
import Language.Haskell.TH
import Data.Monoid
import Data.Maybe (fromJust)
import GHC.Exts (IsString (..))
import Text.Html.PigLet.Html5Defs
-- TODO: R
-- 1. All html5 tags
-- 2. Better selector
data Modify = SetContent ExpQ
| EmbedContent ExpQ
| AddAttr (Attribute String)
| NotTouched
data HtmlMod = HtmlParent String Attributes HtmlMod Modify
| HtmlBlock [HtmlMod]
| HtmlText String
| HtmlComment String
| HtmlDoctype
data Selector = Dom String
| Attr (Attribute String)
deriving (Show)
makeTemplate :: FilePath -> [(HtmlMod -> HtmlMod)] -> ExpQ
makeTemplate file trans = runIO (readFile file) >>= transformHtml trans
transformHtml :: [(HtmlMod -> HtmlMod)] -> String -> ExpQ
transformHtml trans htmlStr = genCode $ foldr ($) hm trans
where hm = html2HtmlMod $ htmlTree html5 htmlStr
pass :: HtmlMod -> HtmlMod
pass = id
setContent :: Selector -> ExpQ -> HtmlMod -> HtmlMod
setContent selector expr = attachModify selector (SetContent expr)
embedContent :: Selector -> ExpQ -> HtmlMod -> HtmlMod
embedContent selector expr = attachModify selector (EmbedContent expr)
addAttr :: Selector -> Attribute String -> HtmlMod -> HtmlMod
addAttr selector attr = attachModify selector (AddAttr attr)
(##) :: Selector -> Modify -> HtmlMod -> HtmlMod
selector ## modi = attachModify selector modi
selected :: Selector -> String -> Attributes -> Bool
selected (Dom tag') tag _ = tag == tag'
selected (Attr attr) _ attrs = elem attr attrs
attachModify :: Selector -> Modify -> HtmlMod -> HtmlMod
attachModify selector modi (HtmlParent tag attrs child modi') =
if selected selector tag attrs
then HtmlParent tag attrs (attachModify selector modi child) modi
else HtmlParent tag attrs (attachModify selector modi child) modi'
attachModify _ _ (HtmlText t) = HtmlText t
attachModify selector modi (HtmlBlock htmls) =
HtmlBlock $ map (attachModify selector modi) htmls
attachModify _ _ _ = error "blk Undefined"
html2HtmlMod :: Html -> HtmlMod
html2HtmlMod (Parent tag attrs child) =
HtmlParent tag attrs (html2HtmlMod child) NotTouched
html2HtmlMod (Text t) = HtmlText t
html2HtmlMod (Block htmls) = HtmlBlock $ map html2HtmlMod htmls
html2HtmlMod _ = error "Cannot support doctype and comment"
htmlTree :: HtmlVariant -> String -> Html
htmlTree variant = removeEmptyText . fst . makeTree variant False [] .
parseTagsOptions parseOptions { optTagPosition = True }
genCode :: HtmlMod -> ExpQ
genCode (HtmlText str) = [| H.toHtml (str :: String) |]
genCode (HtmlParent tag attrs children NotTouched)
| isParent tag = genParent tag attrs children Nothing
| otherwise = genLeaf tag attrs Nothing
genCode (HtmlParent tag attrs children (AddAttr attr))
| isParent tag = genParent tag attrs children (Just attr)
| otherwise = genLeaf tag attrs (Just attr)
genCode (HtmlParent _ _ _ (SetContent expr)) = expr
genCode (HtmlParent tag attrs _ (EmbedContent expr)) =
[| $(getHtmlParent tag) H.! $(genAttrs attrs) $ $expr |]
genCode (HtmlBlock htmls) = [| $(foldr genHtmls [| mempty |] htmls) |]
genCode _ = error $ "Undefined nodes"
genHtmls :: HtmlMod -> ExpQ -> ExpQ
genHtmls html code = [| $(genCode html) <> $code |]
genParent :: String -> Attributes -> HtmlMod -> Maybe (Attribute String) -> ExpQ
genParent tag attrs (HtmlBlock []) Nothing = [| $(getHtmlParent tag) H.!
$(genAttrs attrs)
$ mempty |]
genParent tag attrs child Nothing = [| $(getHtmlParent tag) H.! $(genAttrs attrs)
$ $(genCode child) |]
genParent tag attrs (HtmlBlock []) (Just attr) =
[| $(getHtmlParent tag) H.! makeAttrs (mergeAttr attr attrs) $ mempty |]
genParent tag attrs child (Just attr) =
[| $(getHtmlParent tag) H.! makeAttrs (mergeAttr attr attrs) $
$(genCode child) |]
genLeaf :: String -> Attributes -> Maybe (Attribute String) -> ExpQ
genLeaf tag attrs Nothing = [| $(getHtmlLeaf tag) H.! $(genAttrs attrs) |]
genLeaf tag attrs (Just attr) =
[| $(getHtmlLeaf tag) H.! makeAttrs (mergeAttr attr attrs) |]
makeAttrs :: Attributes -> H.Attribute
makeAttrs = mconcat .
map (\(n, v) -> fromJust (lookup n html5Attr1) $ fromString v)
mergeAttr :: Attribute String -> Attributes -> Attributes
mergeAttr (name, value) attrs =
case lookup name attrs of
Just _ -> map (\(n, v) -> if n == name
then (n, value ++ " " ++ v)
else (n, v)) attrs
Nothing -> (name, value) : attrs
genAttrs :: Attributes -> ExpQ
genAttrs = foldr genAttr [| mempty |]
where genAttr (attr, val) code = [| $(getHtmlAttr attr) val <> $code |]
| kkspeed/PigLet | src/Text/Html/PigLet/Th.hs | mit | 5,284 | 1 | 14 | 1,222 | 1,551 | 831 | 720 | 111 | 3 |
module Parser where
import Text.ParserCombinators.Parsec
import Control.Applicative hiding ((<|>))
import Stack
number :: Parser String
number = many1 digit
minus :: Parser String
minus = (:) <$> char '-' <*> number
integer :: Parser String
integer = {- plus <|> -} minus <|> number
float :: Parser Float
float = fmap rd $ (++) <$> integer <*> decimal
where rd = read :: String -> Float
decimal = option "" $ (:) <$> char '.' <*> number
atomNumber :: Parser Atom
atomNumber = Number <$> float
operator :: Parser Atom
operator = do
c <- oneOf "+-*^/"
return $ Operator $ case c of
'+' -> Plus
'-' -> Minus
'*' -> Mult
'/' -> Div
'^' -> Pow
_ -> error $ "unrecognized opperator: " ++ [c]
mathFun :: Parser Atom
mathFun = try sinP <|> try cosP <|> sqrtP
piP :: Parser Atom
piP = do
_ <- string "pi"
return $ Number 3.145926
eP :: Parser Atom
eP = do
_ <- string "e"
return $ Number 2.7181818
constants :: Parser Atom
constants = piP <|> eP
cosP :: Parser Atom
cosP = do
_ <- string "cos"
return $ Operator Cos
sqrtP :: Parser Atom
sqrtP = do
_ <- string "sqrt"
return $ Operator Sqrt
sinP :: Parser Atom
sinP = do
_ <- string "sin"
return $ Operator Sin
atom :: Parser Atom
atom = try atomNumber <|> try operator <|> try mathFun <|> constants
| JonHarder/RPNCalc | src/Parser.hs | mit | 1,330 | 0 | 12 | 331 | 500 | 251 | 249 | 52 | 6 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Integration.Foreign.Lua.APISpec (spec) where
import qualified Data.Text as T
import TestImport hiding (assertEqual)
import Test.HUnit (assertEqual, assertFailure)
import qualified Foreign.Lua as Lua
import Foreign.Lua.Types (LuaExtra(LuaExtra))
-------------------------------------------------------------------------------
spec :: Spec
spec = withApp $ do
describe "alven.output" $ do
it "single output" $
checkTheme "api/output_single" "hello"
it "multiple output" $
checkTheme "api/output_multiple" "hellohello2"
describe "alven.get_theme_url" $ do
it "fake setup" $
checkTheme "api/get_theme_url" "nop"
describe "alven.get_current_page" $ do
it "public page is retrieved" $ do
let expOutp = T.unpack (textPageName tmpPage1)
runDB $ do
void $ insert tmpPage1
checkTheme "api/get_current_page" expOutp
it "private page is not retrieved" $ do
runDB $ do
void $ insert tmpPage1{ textPagePublic = False }
checkTheme "api/get_current_page" ""
describe "alven.get_pages" $ do
it "1 public and 1 private returns 1" $ do
let expOutp = "1"
runDB $ do
void $ insert tmpPage1
void $ insert tmpPage2{ textPagePublic = False }
checkTheme "api/get_pages" expOutp
describe "alven.read_theme_file" $ do
it "read_theme_file successfully reads a css file" $ do
let expOutp = "body{color: red;}" :: Text
liftIO $
writeFile "test/static/lua/api/read_theme_file/main.css" expOutp
checkTheme "api/read_theme_file" (T.unpack expOutp)
describe "basic page list combination" $ do
it "unordered HTML list is generated" $ do
let expOutp = T.unpack $ pagesToHTMLList tmpPages
runDB $ do
void $ insert tmpPage1
void $ insert tmpPage2
checkTheme "examples/page_list" expOutp
where
tmpPage1 = TextPage "Test page 1" "test-page1" "" True Nothing
tmpPage2 = TextPage "Test page 2" "test-page2" "" True Nothing
tmpPages = [tmpPage1, tmpPage2]
-- | Helper for checking the output running the given theme in `themeDir`
checkTheme themeDir expOutp = do
yesod <- getTestYesod
outputBuffer <- liftIO $ newIORef ""
let themeDir' = "test/static/lua/" </> themeDir
urlRenderer _ = "nop"
currPlink = (textPageSlug tmpPage1)
lextra = LuaExtra themeDir' currPlink (runDBIO yesod)
outputBuffer urlRenderer
liftIO $ do
result <- Lua.runThemeScript lextra
case result of
Left errm -> assertFailure errm
Right outp -> assertEqual "theme expected result" expOutp outp
-------------------------------------------------------------------------------
-- * Utils
{-|
Generates a HTML unordered list of pages without any spaces or line breaks.
>>> pagesToHTMLList [TextPage "Page 1" "page1" "" True Nothing]
"<ul><li>Page 1</li></ul>
-}
pagesToHTMLList :: [TextPage] -> Text
pagesToHTMLList ps = T.intercalate "" $ "<ul>" : li ++ ["</ul>", "\n"]
where
li = map toLi ps
toLi p = T.intercalate "" ["<li>", textPageName p, "</li>"]
| rzetterberg/alven | src/test/Integration/Foreign/Lua/APISpec.hs | mit | 3,498 | 0 | 20 | 1,053 | 730 | 349 | 381 | 67 | 2 |
riffle :: [a] -> [a] -> [a]
riffle xs ys = concat [[x,y] | (x,y) <- xs `zip` ys]
| AkronCodeClub/edX-FP101x-Oct-2014 | ryan_p/HW1029/Riffle.hs | mit | 81 | 0 | 9 | 19 | 69 | 38 | 31 | 2 | 1 |
module Auth0.API.Management where
| kelecorix/api-auth0 | src/Auth0/API/Management.hs | mit | 35 | 0 | 3 | 4 | 7 | 5 | 2 | 1 | 0 |
{-# OPTIONS_GHC -Wall #-}
module LogAnalysis ( module Log
, parseMessage
, parse
, insert
, build
, inOrder
, whatWentWrong ) where
import Log
parseMessage :: String -> LogMessage
parseMessage line = case words line of
("I":ts:msg) -> correct Info ts msg
("W":ts:msg) -> correct Warning ts msg
("E":code:ts:msg) -> correct (Error $ read code) ts msg
_ -> Unknown line
where correct mtype ts msg = LogMessage mtype (read ts) (unwords msg)
parse :: String -> [LogMessage]
parse = map parseMessage . lines
insert :: LogMessage -> MessageTree -> MessageTree
insert (Unknown _) tree = tree
insert entry@(LogMessage _ ts _) tree
| Leaf <- tree
= Node Leaf entry Leaf
| Node left root@(LogMessage _ ts' _) right <- tree
= if ts > ts' then (Node left root (insert entry right))
else (Node (insert entry left) root right)
build :: [LogMessage] -> MessageTree
build [] = Leaf
build (m:ms) = insert m (build ms)
inOrder :: MessageTree -> [LogMessage]
inOrder Leaf = []
inOrder (Node left entry right) = inOrder left ++ [entry] ++ inOrder right
whatWentWrong :: [LogMessage] -> [String]
whatWentWrong = map message . inOrder . build . filter relevant
where relevant (LogMessage (Error severity) _ _) = severity >= 50
relevant _ = False
message (LogMessage _ _ msg) = msg
| mgrabovsky/upenn-cis194 | hw02/LogAnalysis.hs | cc0-1.0 | 1,556 | 0 | 12 | 515 | 557 | 287 | 270 | 37 | 4 |
module System.DevUtils.MySQL.Helpers.Databases.Default (
default',
default'List
) where
import System.DevUtils.MySQL.Helpers.Databases.Include (Databases(..))
default' :: Databases
default' = Databases {
_database = Nothing
}
default'List :: [Databases]
default'List = []
| adarqui/DevUtils-MySQL | src/System/DevUtils/MySQL/Helpers/Databases/Default.hs | gpl-3.0 | 278 | 0 | 6 | 33 | 69 | 46 | 23 | 9 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
--------------------------------------------------------------------------------
-- |
-- Module : Melkor.BuildMap
-- Description : 'BuildMap' type definition
-- Copyright : (c) Boris Buliga, 2020
-- License : MIT
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : POSIX
module Melkor.BuildMap
( BuildMap,
mkBuildMap,
buildMap,
buildMapDeep,
rules,
ambiguous,
orphans,
providers,
member,
)
where
--------------------------------------------------------------------------------
import Melkor.Extra.Display ()
import Melkor.Extra.Tuple
import Melkor.Types.Eff
import Melkor.Types.Internal.SingleRelMap
( SingleRelMap,
mkSingleRelMap,
)
import qualified Melkor.Types.Internal.SingleRelMap as SRM
import Melkor.Types.Provider
import Melkor.Types.Resource
import RIO
import qualified RIO.HashMap as HM
import qualified RIO.HashSet as HS
--------------------------------------------------------------------------------
newtype BuildMap
= BuildMap (SingleRelMap Resource Provider)
deriving (Eq, Show, Display, Semigroup, Monoid)
mkBuildMap :: HashMap Resource (HashSet Provider) -> BuildMap
mkBuildMap = BuildMap . mkSingleRelMap
{-# INLINE mkBuildMap #-}
buildMapRaw ::
(HasContext env) =>
HashSet Resource ->
HashSet Provider ->
RIO env (HashMap Resource (HashSet Provider))
buildMapRaw rset pset =
HM.fromList
<$> traverse (traverseToSnd (`providersOf` toList pset)) (toList rset)
where
providersOf r =
fmap HS.fromList
. filterM (\p -> runEff $ providerSatisfies p r)
unBuildMap :: BuildMap -> SingleRelMap Resource Provider
unBuildMap (BuildMap srm) = srm
{-# INLINE unBuildMap #-}
--------------------------------------------------------------------------------
buildMap ::
(HasContext env) =>
HashSet Resource ->
HashSet Provider ->
RIO env BuildMap
buildMap rset pset = mkBuildMap <$> buildMapRaw rset pset
buildMapDeep ::
(HasContext env) =>
HashSet Resource ->
HashSet Provider ->
RIO env BuildMap
buildMapDeep = go mempty
where
go bm0 rset pset = do
bm <- mkBuildMap <$> buildMapRaw rset pset
let s = bm <> bm0
rsetExtra =
mconcat
. fmap providerDependencies
. HM.elems
. rules
$ bm
isNew r = not . member r $ s
rsetNew = HS.filter isNew rsetExtra
-- traceDisplayM (HS.toList rsetExtra)
if HS.null rsetNew
then pure s
else go s rsetNew pset
--------------------------------------------------------------------------------
rules :: BuildMap -> HashMap Resource Provider
rules (BuildMap m) = SRM.rel m
{-# INLINE rules #-}
ambiguous :: BuildMap -> HashMap Resource (HashSet Provider)
ambiguous (BuildMap m) = SRM.ambiguous m
{-# INLINE ambiguous #-}
orphans :: BuildMap -> HashSet Resource
orphans (BuildMap m) = SRM.orphans m
{-# INLINE orphans #-}
providers :: BuildMap -> HashSet Provider
providers = HS.fromList . HM.elems . rules
{-# INLINE providers #-}
--------------------------------------------------------------------------------
member :: Resource -> BuildMap -> Bool
member r = SRM.member r . unBuildMap
{-# INLINE member #-}
--------------------------------------------------------------------------------
| d12frosted/environment | melkor/src/Melkor/BuildMap.hs | gpl-3.0 | 3,452 | 0 | 16 | 650 | 744 | 403 | 341 | 84 | 2 |
{-# LANGUAGE PackageImports #-}
import "audidoza" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, setPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings (setPort port defaultSettings) app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "yesod-devel/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| c0c0n3/audidoza | devel.hs | gpl-3.0 | 679 | 2 | 10 | 104 | 193 | 100 | 93 | 21 | 2 |
-- Print out the nth prime, where n is the 1st argument
module Main where
import GalePrimes2 (primes)
import System (getArgs)
printNthPrime :: Int -> IO ()
printNthPrime n = print (n, primes !! (n - 1))
main = do
args <- getArgs
printNthPrime $ read $ head args
| dkensinger/haskell | haskell-primes/GalePrimes2Test.hs | gpl-3.0 | 275 | 0 | 9 | 62 | 89 | 48 | 41 | 8 | 1 |
-- Copyright 2015 Oleg Plakhotniuk
--
-- This file is part of Hinance.
--
-- Hinance is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- Hinance is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Hinance. If not, see <http://www.gnu.org/licenses/>.
module Hinance.User.Type where
import Hinance.User.Tag
import Hinance.Currency
class Taggable a where
tagged :: a -> Tag -> Bool
class Patchable a where
patched :: [a] -> [a]
data Change = Change {camount::Integer, ctime::Integer, clabel::String,
ccur::Currency, curl::String, cgroup::String, ctags::[Tag]}
deriving (Read, Show, Ord, Eq)
data Slice = Slice {sname::String, scategs::[SliceCateg], stags::[Tag]}
deriving (Read, Show, Ord, Eq)
data SliceCateg = SliceCateg {scbg::String, scfg::String,
scname::String, sctags::[Tag]}
deriving (Read, Show, Ord, Eq)
| hinance/hinance | src/hs/user_type.hs | gpl-3.0 | 1,314 | 0 | 9 | 240 | 260 | 161 | 99 | 15 | 0 |
-- | All possible runtime errors in Mintette
module RSCoin.Mintette.Error
( MintetteError (..)
, isMEInactive
, logMintetteError
) where
import Control.Exception (Exception (..), SomeException)
import Control.Monad.Trans (MonadIO)
import Data.MessagePack (MessagePack (fromObject, toObject), Object)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text.Buildable as B (Buildable (build))
import Data.Typeable (Typeable)
import Formatting (bprint, build, int, sformat, stext, (%))
import RSCoin.Core.Error (rscExceptionFromException,
rscExceptionToException)
import RSCoin.Core.Logging (WithNamedLogger, logInfo, logWarning)
import RSCoin.Core.MessagePack ()
import RSCoin.Core.Primitives (AddrId)
import RSCoin.Core.Types (PeriodId)
data MintetteError
= MEInternal Text -- ^ Should not happen.
| MEInactive -- ^ Mintette is not active right now.
| MEPeriodMismatch PeriodId PeriodId -- ^ PeriodId expected by mintette is
-- different from the one expected by somebody.
| MEInvalidTxSums -- ^ Mintette received transaction with invalid sums.
| MEInconsistentRequest Text -- ^ Inconsistency detected.
| MENotUnspent AddrId -- ^ Given addrId is not an unspent output.
| MEInvalidSignature -- ^ Signature check failed.
| MENotConfirmed -- ^ Can't deduce that transaction was confirmed.
| MEAlreadyActive -- ^ Can't start new period because mintette
-- is already active.
| MEInvalidBankSignature -- ^ Bank's signature can't be verified.
| MENotAllowed -- ^ Tried to send transaction that spends money
-- from blacklisted address
deriving (Show, Typeable, Eq)
instance Exception MintetteError where
toException = rscExceptionToException
fromException = rscExceptionFromException
instance B.Buildable MintetteError where
build (MEInternal m) = "internal error: " <> B.build m
build MEInactive = "mintette is not active right now"
build (MEPeriodMismatch expected received) =
bprint
("received strange PeriodId: " % int % " (expected " % int % ")")
received expected
build MEInvalidTxSums =
"sum of transaction outputs is greater than sum of inputs"
build (MEInconsistentRequest msg) = B.build msg
build (MENotUnspent a) =
bprint ("can't deduce that " % build % " is unspent transaction output") a
build MEInvalidSignature = "failed to verify signature"
build MENotConfirmed = "transaction doesn't have enough confirmations"
build MEAlreadyActive = "can't start new period when period is active"
build MEInvalidBankSignature = "bank's signature can't be verified"
build MENotAllowed =
"tried to send transaction that spends money from blacklisted address"
toObj :: MessagePack a => (Int, a) -> Object
toObj = toObject
instance MessagePack MintetteError where
toObject (MEInternal t) = toObj (0, t)
toObject MEInactive = toObj (1, ())
toObject (MEPeriodMismatch p1 p2) = toObj (2, (p1, p2))
toObject MEInvalidTxSums = toObj (3, ())
toObject (MEInconsistentRequest t) = toObj (4, t)
toObject (MENotUnspent a) = toObj (5, a)
toObject MEInvalidSignature = toObj (6, ())
toObject MENotConfirmed = toObj (7, ())
toObject MEAlreadyActive = toObj (8, ())
toObject MEInvalidBankSignature = toObj (9, ())
toObject MENotAllowed = toObj (10, ())
fromObject obj = do
(i,payload) <- fromObject obj
case (i :: Int) of
0 -> MEInternal <$> fromObject payload
1 -> pure MEInactive
2 -> uncurry MEPeriodMismatch <$> fromObject payload
3 -> pure MEInvalidTxSums
4 -> MEInconsistentRequest <$> fromObject payload
5 -> MENotUnspent <$> fromObject payload
6 -> pure MEInvalidSignature
7 -> pure MENotConfirmed
8 -> pure MEAlreadyActive
9 -> pure MEInvalidBankSignature
10 -> pure MENotAllowed
_ -> Nothing
isMEInactive :: SomeException -> Bool
isMEInactive = maybe False (== MEInactive) . fromException
logMintetteError
:: (MonadIO m, WithNamedLogger m)
=> MintetteError -> Text -> m ()
logMintetteError e msg =
case e of
MEInactive -> logInfo toPrint
_ -> logWarning toPrint
where
toPrint = sformat (stext % ", error: " % Formatting.build) msg e
| input-output-hk/rscoin-haskell | src/RSCoin/Mintette/Error.hs | gpl-3.0 | 5,047 | 0 | 12 | 1,658 | 1,029 | 565 | 464 | 91 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.HealthChecks.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified HealthCheck resource.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.healthChecks.delete@.
module Network.Google.Resource.Compute.HealthChecks.Delete
(
-- * REST Resource
HealthChecksDeleteResource
-- * Creating a Request
, healthChecksDelete
, HealthChecksDelete
-- * Request Lenses
, hcdHealthCheck
, hcdProject
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.healthChecks.delete@ method which the
-- 'HealthChecksDelete' request conforms to.
type HealthChecksDeleteResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"healthChecks" :>
Capture "healthCheck" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes the specified HealthCheck resource.
--
-- /See:/ 'healthChecksDelete' smart constructor.
data HealthChecksDelete = HealthChecksDelete'
{ _hcdHealthCheck :: !Text
, _hcdProject :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'HealthChecksDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hcdHealthCheck'
--
-- * 'hcdProject'
healthChecksDelete
:: Text -- ^ 'hcdHealthCheck'
-> Text -- ^ 'hcdProject'
-> HealthChecksDelete
healthChecksDelete pHcdHealthCheck_ pHcdProject_ =
HealthChecksDelete'
{ _hcdHealthCheck = pHcdHealthCheck_
, _hcdProject = pHcdProject_
}
-- | Name of the HealthCheck resource to delete.
hcdHealthCheck :: Lens' HealthChecksDelete Text
hcdHealthCheck
= lens _hcdHealthCheck
(\ s a -> s{_hcdHealthCheck = a})
-- | Project ID for this request.
hcdProject :: Lens' HealthChecksDelete Text
hcdProject
= lens _hcdProject (\ s a -> s{_hcdProject = a})
instance GoogleRequest HealthChecksDelete where
type Rs HealthChecksDelete = Operation
type Scopes HealthChecksDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient HealthChecksDelete'{..}
= go _hcdProject _hcdHealthCheck (Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy HealthChecksDeleteResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/HealthChecks/Delete.hs | mpl-2.0 | 3,342 | 0 | 15 | 774 | 388 | 233 | 155 | 65 | 1 |
module Game.Toliman.Graphical.SDL.Events where
import Data.Functor ((<$>))
import Foreign.Storable (peekElemOff)
import Control.Monad.Lift.IO (liftIO)
import Graphics.UI.SDL as SDL
(Event,
pumpEvents, peepEvents,
pattern SDL_GETEVENT,
pattern SDL_FIRSTEVENT, pattern SDL_LASTEVENT)
import Game.Toliman.Internal.Lens
import Game.Toliman.Graphical.SDL.Types
import Game.Toliman.Graphical.Types (MonadGraphical, sdl)
import Game.Toliman.Graphical.SDL.Core
getEvents :: MonadGraphical [SDL.Event]
getEvents = do
p <- access (sdl.init_events)
sdlCheckPred "init_events" p
liftIO $ pumpEvents
getEvents'
where
getEvents' :: MonadGraphical [SDL.Event]
getEvents' = do
b <- access (sdl.ev_buf)
n <- sdlCheckRet' "peep events" $
peepEvents b sdlEvBufLen SDL_GETEVENT SDL_FIRSTEVENT SDL_LASTEVENT
case n of
0 -> return []
_ -> (++) <$> getEvents' <*> (liftIO $ sequence [peekElemOff b i | i <- [0..(sdlEvBufLen-1)]])
| duncanburke/toliman-graphical | src-lib/Game/Toliman/Graphical/SDL/Events.hs | mpl-2.0 | 983 | 0 | 21 | 176 | 298 | 168 | 130 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.AccountUserLinks.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates permissions for an existing user on the given account.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.accountUserLinks.update@.
module Network.Google.Resource.Analytics.Management.AccountUserLinks.Update
(
-- * REST Resource
ManagementAccountUserLinksUpdateResource
-- * Creating a Request
, managementAccountUserLinksUpdate
, ManagementAccountUserLinksUpdate
-- * Request Lenses
, mauluPayload
, mauluAccountId
, mauluLinkId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.accountUserLinks.update@ method which the
-- 'ManagementAccountUserLinksUpdate' request conforms to.
type ManagementAccountUserLinksUpdateResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"entityUserLinks" :>
Capture "linkId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] EntityUserLink :>
Put '[JSON] EntityUserLink
-- | Updates permissions for an existing user on the given account.
--
-- /See:/ 'managementAccountUserLinksUpdate' smart constructor.
data ManagementAccountUserLinksUpdate =
ManagementAccountUserLinksUpdate'
{ _mauluPayload :: !EntityUserLink
, _mauluAccountId :: !Text
, _mauluLinkId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ManagementAccountUserLinksUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mauluPayload'
--
-- * 'mauluAccountId'
--
-- * 'mauluLinkId'
managementAccountUserLinksUpdate
:: EntityUserLink -- ^ 'mauluPayload'
-> Text -- ^ 'mauluAccountId'
-> Text -- ^ 'mauluLinkId'
-> ManagementAccountUserLinksUpdate
managementAccountUserLinksUpdate pMauluPayload_ pMauluAccountId_ pMauluLinkId_ =
ManagementAccountUserLinksUpdate'
{ _mauluPayload = pMauluPayload_
, _mauluAccountId = pMauluAccountId_
, _mauluLinkId = pMauluLinkId_
}
-- | Multipart request metadata.
mauluPayload :: Lens' ManagementAccountUserLinksUpdate EntityUserLink
mauluPayload
= lens _mauluPayload (\ s a -> s{_mauluPayload = a})
-- | Account ID to update the account-user link for.
mauluAccountId :: Lens' ManagementAccountUserLinksUpdate Text
mauluAccountId
= lens _mauluAccountId
(\ s a -> s{_mauluAccountId = a})
-- | Link ID to update the account-user link for.
mauluLinkId :: Lens' ManagementAccountUserLinksUpdate Text
mauluLinkId
= lens _mauluLinkId (\ s a -> s{_mauluLinkId = a})
instance GoogleRequest
ManagementAccountUserLinksUpdate
where
type Rs ManagementAccountUserLinksUpdate =
EntityUserLink
type Scopes ManagementAccountUserLinksUpdate =
'["https://www.googleapis.com/auth/analytics.manage.users"]
requestClient ManagementAccountUserLinksUpdate'{..}
= go _mauluAccountId _mauluLinkId (Just AltJSON)
_mauluPayload
analyticsService
where go
= buildClient
(Proxy ::
Proxy ManagementAccountUserLinksUpdateResource)
mempty
| brendanhay/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/AccountUserLinks/Update.hs | mpl-2.0 | 4,204 | 0 | 16 | 937 | 466 | 278 | 188 | 80 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.IAM.Projects.Locations.WorkLoadIdentityPools.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all non-deleted WorkloadIdentityPools in a project. If
-- \`show_deleted\` is set to \`true\`, then deleted pools are also listed.
--
-- /See:/ <https://cloud.google.com/iam/ Identity and Access Management (IAM) API Reference> for @iam.projects.locations.workloadIdentityPools.list@.
module Network.Google.Resource.IAM.Projects.Locations.WorkLoadIdentityPools.List
(
-- * REST Resource
ProjectsLocationsWorkLoadIdentityPoolsListResource
-- * Creating a Request
, projectsLocationsWorkLoadIdentityPoolsList
, ProjectsLocationsWorkLoadIdentityPoolsList
-- * Request Lenses
, plwliplParent
, plwliplXgafv
, plwliplUploadProtocol
, plwliplAccessToken
, plwliplUploadType
, plwliplShowDeleted
, plwliplPageToken
, plwliplPageSize
, plwliplCallback
) where
import Network.Google.IAM.Types
import Network.Google.Prelude
-- | A resource alias for @iam.projects.locations.workloadIdentityPools.list@ method which the
-- 'ProjectsLocationsWorkLoadIdentityPoolsList' request conforms to.
type ProjectsLocationsWorkLoadIdentityPoolsListResource
=
"v1" :>
Capture "parent" Text :>
"workloadIdentityPools" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "showDeleted" Bool :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListWorkLoadIdentityPoolsResponse
-- | Lists all non-deleted WorkloadIdentityPools in a project. If
-- \`show_deleted\` is set to \`true\`, then deleted pools are also listed.
--
-- /See:/ 'projectsLocationsWorkLoadIdentityPoolsList' smart constructor.
data ProjectsLocationsWorkLoadIdentityPoolsList =
ProjectsLocationsWorkLoadIdentityPoolsList'
{ _plwliplParent :: !Text
, _plwliplXgafv :: !(Maybe Xgafv)
, _plwliplUploadProtocol :: !(Maybe Text)
, _plwliplAccessToken :: !(Maybe Text)
, _plwliplUploadType :: !(Maybe Text)
, _plwliplShowDeleted :: !(Maybe Bool)
, _plwliplPageToken :: !(Maybe Text)
, _plwliplPageSize :: !(Maybe (Textual Int32))
, _plwliplCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsWorkLoadIdentityPoolsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plwliplParent'
--
-- * 'plwliplXgafv'
--
-- * 'plwliplUploadProtocol'
--
-- * 'plwliplAccessToken'
--
-- * 'plwliplUploadType'
--
-- * 'plwliplShowDeleted'
--
-- * 'plwliplPageToken'
--
-- * 'plwliplPageSize'
--
-- * 'plwliplCallback'
projectsLocationsWorkLoadIdentityPoolsList
:: Text -- ^ 'plwliplParent'
-> ProjectsLocationsWorkLoadIdentityPoolsList
projectsLocationsWorkLoadIdentityPoolsList pPlwliplParent_ =
ProjectsLocationsWorkLoadIdentityPoolsList'
{ _plwliplParent = pPlwliplParent_
, _plwliplXgafv = Nothing
, _plwliplUploadProtocol = Nothing
, _plwliplAccessToken = Nothing
, _plwliplUploadType = Nothing
, _plwliplShowDeleted = Nothing
, _plwliplPageToken = Nothing
, _plwliplPageSize = Nothing
, _plwliplCallback = Nothing
}
-- | Required. The parent resource to list pools for.
plwliplParent :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList Text
plwliplParent
= lens _plwliplParent
(\ s a -> s{_plwliplParent = a})
-- | V1 error format.
plwliplXgafv :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList (Maybe Xgafv)
plwliplXgafv
= lens _plwliplXgafv (\ s a -> s{_plwliplXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plwliplUploadProtocol :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList (Maybe Text)
plwliplUploadProtocol
= lens _plwliplUploadProtocol
(\ s a -> s{_plwliplUploadProtocol = a})
-- | OAuth access token.
plwliplAccessToken :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList (Maybe Text)
plwliplAccessToken
= lens _plwliplAccessToken
(\ s a -> s{_plwliplAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plwliplUploadType :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList (Maybe Text)
plwliplUploadType
= lens _plwliplUploadType
(\ s a -> s{_plwliplUploadType = a})
-- | Whether to return soft-deleted pools.
plwliplShowDeleted :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList (Maybe Bool)
plwliplShowDeleted
= lens _plwliplShowDeleted
(\ s a -> s{_plwliplShowDeleted = a})
-- | A page token, received from a previous \`ListWorkloadIdentityPools\`
-- call. Provide this to retrieve the subsequent page.
plwliplPageToken :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList (Maybe Text)
plwliplPageToken
= lens _plwliplPageToken
(\ s a -> s{_plwliplPageToken = a})
-- | The maximum number of pools to return. If unspecified, at most 50 pools
-- are returned. The maximum value is 1000; values above are 1000 truncated
-- to 1000.
plwliplPageSize :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList (Maybe Int32)
plwliplPageSize
= lens _plwliplPageSize
(\ s a -> s{_plwliplPageSize = a})
. mapping _Coerce
-- | JSONP
plwliplCallback :: Lens' ProjectsLocationsWorkLoadIdentityPoolsList (Maybe Text)
plwliplCallback
= lens _plwliplCallback
(\ s a -> s{_plwliplCallback = a})
instance GoogleRequest
ProjectsLocationsWorkLoadIdentityPoolsList
where
type Rs ProjectsLocationsWorkLoadIdentityPoolsList =
ListWorkLoadIdentityPoolsResponse
type Scopes
ProjectsLocationsWorkLoadIdentityPoolsList
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsWorkLoadIdentityPoolsList'{..}
= go _plwliplParent _plwliplXgafv
_plwliplUploadProtocol
_plwliplAccessToken
_plwliplUploadType
_plwliplShowDeleted
_plwliplPageToken
_plwliplPageSize
_plwliplCallback
(Just AltJSON)
iAMService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsWorkLoadIdentityPoolsListResource)
mempty
| brendanhay/gogol | gogol-iam/gen/Network/Google/Resource/IAM/Projects/Locations/WorkLoadIdentityPools/List.hs | mpl-2.0 | 7,376 | 0 | 19 | 1,613 | 964 | 558 | 406 | 147 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.Hl7V2Stores.TestIAMPermissions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a \`NOT_FOUND\` error. Note: This operation is designed to be used
-- for building permission-aware UIs and command-line tools, not for
-- authorization checking. This operation may \"fail open\" without
-- warning.
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.hl7V2Stores.testIamPermissions@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.Hl7V2Stores.TestIAMPermissions
(
-- * REST Resource
ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissionsResource
-- * Creating a Request
, projectsLocationsDataSetsHl7V2StoresTestIAMPermissions
, ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions
-- * Request Lenses
, pldshvstipXgafv
, pldshvstipUploadProtocol
, pldshvstipAccessToken
, pldshvstipUploadType
, pldshvstipPayload
, pldshvstipResource
, pldshvstipCallback
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.hl7V2Stores.testIamPermissions@ method which the
-- 'ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions' request conforms to.
type ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissionsResource
=
"v1" :>
CaptureMode "resource" "testIamPermissions" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TestIAMPermissionsRequest :>
Post '[JSON] TestIAMPermissionsResponse
-- | Returns permissions that a caller has on the specified resource. If the
-- resource does not exist, this will return an empty set of permissions,
-- not a \`NOT_FOUND\` error. Note: This operation is designed to be used
-- for building permission-aware UIs and command-line tools, not for
-- authorization checking. This operation may \"fail open\" without
-- warning.
--
-- /See:/ 'projectsLocationsDataSetsHl7V2StoresTestIAMPermissions' smart constructor.
data ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions =
ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions'
{ _pldshvstipXgafv :: !(Maybe Xgafv)
, _pldshvstipUploadProtocol :: !(Maybe Text)
, _pldshvstipAccessToken :: !(Maybe Text)
, _pldshvstipUploadType :: !(Maybe Text)
, _pldshvstipPayload :: !TestIAMPermissionsRequest
, _pldshvstipResource :: !Text
, _pldshvstipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldshvstipXgafv'
--
-- * 'pldshvstipUploadProtocol'
--
-- * 'pldshvstipAccessToken'
--
-- * 'pldshvstipUploadType'
--
-- * 'pldshvstipPayload'
--
-- * 'pldshvstipResource'
--
-- * 'pldshvstipCallback'
projectsLocationsDataSetsHl7V2StoresTestIAMPermissions
:: TestIAMPermissionsRequest -- ^ 'pldshvstipPayload'
-> Text -- ^ 'pldshvstipResource'
-> ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions
projectsLocationsDataSetsHl7V2StoresTestIAMPermissions pPldshvstipPayload_ pPldshvstipResource_ =
ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions'
{ _pldshvstipXgafv = Nothing
, _pldshvstipUploadProtocol = Nothing
, _pldshvstipAccessToken = Nothing
, _pldshvstipUploadType = Nothing
, _pldshvstipPayload = pPldshvstipPayload_
, _pldshvstipResource = pPldshvstipResource_
, _pldshvstipCallback = Nothing
}
-- | V1 error format.
pldshvstipXgafv :: Lens' ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions (Maybe Xgafv)
pldshvstipXgafv
= lens _pldshvstipXgafv
(\ s a -> s{_pldshvstipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldshvstipUploadProtocol :: Lens' ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions (Maybe Text)
pldshvstipUploadProtocol
= lens _pldshvstipUploadProtocol
(\ s a -> s{_pldshvstipUploadProtocol = a})
-- | OAuth access token.
pldshvstipAccessToken :: Lens' ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions (Maybe Text)
pldshvstipAccessToken
= lens _pldshvstipAccessToken
(\ s a -> s{_pldshvstipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldshvstipUploadType :: Lens' ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions (Maybe Text)
pldshvstipUploadType
= lens _pldshvstipUploadType
(\ s a -> s{_pldshvstipUploadType = a})
-- | Multipart request metadata.
pldshvstipPayload :: Lens' ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions TestIAMPermissionsRequest
pldshvstipPayload
= lens _pldshvstipPayload
(\ s a -> s{_pldshvstipPayload = a})
-- | REQUIRED: The resource for which the policy detail is being requested.
-- See the operation documentation for the appropriate value for this
-- field.
pldshvstipResource :: Lens' ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions Text
pldshvstipResource
= lens _pldshvstipResource
(\ s a -> s{_pldshvstipResource = a})
-- | JSONP
pldshvstipCallback :: Lens' ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions (Maybe Text)
pldshvstipCallback
= lens _pldshvstipCallback
(\ s a -> s{_pldshvstipCallback = a})
instance GoogleRequest
ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions
where
type Rs
ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions
= TestIAMPermissionsResponse
type Scopes
ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissions'{..}
= go _pldshvstipResource _pldshvstipXgafv
_pldshvstipUploadProtocol
_pldshvstipAccessToken
_pldshvstipUploadType
_pldshvstipCallback
(Just AltJSON)
_pldshvstipPayload
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsHl7V2StoresTestIAMPermissionsResource)
mempty
| brendanhay/gogol | gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/Hl7V2Stores/TestIAMPermissions.hs | mpl-2.0 | 7,508 | 0 | 16 | 1,444 | 791 | 467 | 324 | 126 | 1 |
----------------------------------------------------------------------
-- |
-- Module : Graphics.UI.Awesomium.Javascript.JSArray
-- Copyright : (c) 2012 Maksymilian Owsianny
-- License : LGPL-3 (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : Experimental
-- Portability : Portable? (needs FFI)
--
----------------------------------------------------------------------
module Graphics.UI.Awesomium.Javascript.JSArray
( JSArray
, create
, destroy
, with
, getSize
, getElement
) where
import Graphics.UI.Awesomium.Raw
import Control.Exception (bracket)
import Foreign (withArray)
import Foreign.Ptr (Ptr)
create :: Ptr JSValue -> Int -> IO JSArray
create = awe_jsarray_create
destroy :: JSArray -> IO ()
destroy = awe_jsarray_destroy
with :: [JSValue] -> (JSArray -> IO b) -> IO b
with vs f = withArray vs $ \ptr ->
bracket (create ptr (length vs)) destroy f
getSize :: JSArray -> IO Int
getSize = awe_jsarray_get_size
getElement :: JSArray -> Int -> IO JSValue
getElement = awe_jsarray_get_element
| MaxOw/awesomium | src/Graphics/UI/Awesomium/Javascript/JSArray.hs | lgpl-3.0 | 1,113 | 0 | 11 | 200 | 228 | 131 | 97 | 22 | 1 |
-- |
-- Module : Numeric.Optimization.Hopty
-- Maintainer : Yakov Shklarov <[email protected]>
module Numeric.Optimization.Hopty
(newton, graddes, patternsearch, coorddes)
where
import Data.List
import Data.Ord
-- | Larger values will cause less precise results.
epsilon :: RealFloat a => a
epsilon = 1e-14
-- | Find a root of a function using the Newton-Raphson method.
newton :: RealFloat a =>
(a -> a) -- ^ function to find the root of
-> (a -> a) -- ^ derivative of given function
-> a -- ^ starting point for the method; should be near the root
-> a -- ^ the root of f
newton f deriv1 x
| abs fx < epsilon = x -- Success, root found.
| isInfinite x = x -- Failure: asymptotic.
| isNaN x = x
| x == nextx = x -- Failure to converge to required precision.
| otherwise = newton f deriv1 nextx
where
fx = f x
nextx = x - fx / (deriv1 x)
-- | Find a local minimum point of a given function by the method of gradient descent.
graddes :: RealFloat a =>
a -- ^ Step size
-> ([a] -> [a]) -- ^ Gradient of function
-> [a] -- ^ Starting point
-> [a] -- ^ Local minimum point
graddes gamma gradf x
| magnitude (gradf x) < epsilon = x -- Success, minimum point found.
| or (map isInfinite x) = x -- Failure: diverged to inifinity.
| or (map isNaN x) = x
| x == nextx = x -- Failure to converge to required precision.
| otherwise = graddes nextgamma gradf nextx
where
nextgamma = gamma * 0.99999 -- To reduce thrashing
nextx = mapOver2 (-) x $ map (*gamma) (gradf x)
-- | Find a local minimum point of a given function by the method of coordinate descent.
coorddes :: RealFloat a =>
a -- ^ Initial step size
-> ([a] -> a) -- ^ Function to minimize
-> [a] -- ^ Starting point
-> [a] -- ^ Local minimum point
coorddes stepsize f x
| x == nextx = x -- After one cycle, we're still at the same point
| magnitude (mapOver2 subtract x nextx)
< epsilon = x -- We're moving too slowly.
| otherwise = coorddes stepsize f nextx
where
nextx = foldl' minimizeByKth x [0..length x - 1]
minimizeByKth vec k =
replaceKth k
(head (patternsearch stepsize
(\ [kthval] -> f (replaceKth k kthval vec))
[(vec !! k)]))
vec
-- | Find a local minimum point of a function f : R^n -> R directly,
-- using a pattern search.
patternsearch :: RealFloat a =>
a -- ^ Initial step size
-> ([a] -> a) -- ^ Function to minimize
-> [a] -- ^ Starting point
-> [a] -- ^ Local minimum point
patternsearch stepsize f x
| stepsize < epsilon = x -- Success.
-- FIXME: the following takes too long, is there a better way to detect it?
| or (map isInfinite x) = x -- Failure: diverged to infinity.
| or (map isNaN x) = x
| x == nextx = patternsearch (stepsize/2) f nextx
| otherwise = patternsearch stepsize f nextx
where
nextx = minimumBy (comparing f) $
x : (concatMap adjPoints [0..length x - 1])
adjPoints k = map (\g -> applyToKth k g x)
[(+ stepsize), (subtract stepsize)]
-- | Find the Euclidian norm of a given vector.
magnitude :: RealFloat a => [a] -> a
magnitude = sqrt . sum . map (^2)
-- | Like map, but for a function of two arguments, the first of which is taken from
-- the first list, and the second from the second list.
mapOver2 :: (t1 -> t2 -> a) -> [t1] -> [t2] -> [a]
mapOver2 _ [] _ = []
mapOver2 _ _ [] = []
mapOver2 f (x:xs) (y:ys) = f x y : mapOver2 f xs ys
-- | Apply a function to a single element of an array. Indexed from zero.
applyToKth :: Int -> (a -> a) -> [a] -> [a]
applyToKth k f xs = take k xs
++ [f (xs !! k)]
++ drop (k+1) xs
-- | Replace the kth element of xs by newkth.
replaceKth :: Int -> a -> [a] -> [a]
replaceKth k newkth xs = applyToKth k (const newkth) xs
| yshklarov/hopty | Numeric/Optimization/Hopty.hs | unlicense | 4,254 | 0 | 16 | 1,419 | 1,128 | 593 | 535 | 76 | 1 |
{-
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
module Camfort.Specification.Stencils.Syntax
(
-- * Datatypes and Aliases
Linearity(..)
, Region(..)
, RegionDecl
, RegionEnv
, RegionProd(..)
, RegionSum(..)
, Spatial(..)
, SpecDecl
, SpecDecls
, Specification(..)
, IsStencil
, Variable
-- * Functions
, absoluteRep
, fromBool
, groupKeyBy
, hasDuplicates
, isEmpty
, isUnit
, pprintSpecDecls
, setLinearity
) where
import Camfort.Specification.Stencils.Model ( Multiplicity(..)
, peel
, Approximation(..)
)
import Prelude hiding (sum)
import Data.Data
import Data.List hiding (sum)
import Control.Applicative
type Variable = String
{- Contains the syntax representation for stencil specifications -}
{- *** 0. Representations -}
-- 'absoluteRep' is an integer to use to represent absolute indexing expressions
-- (which may be constants, non-affine indexing expressions, or expressions
-- involving non-induction variables). This is set to maxBoound :: Int usually,
-- but can be made smaller for debugging purposes,
-- e.g., 100, but it needs to be high enough to clash with reasonable
-- relative indices.
absoluteRep = maxBound :: Int
{- *** 1 . Specification syntax -}
type RegionDecl = (Variable, RegionSum)
type SpecDecl = ([Variable], Specification)
-- List of region sums associated to region variables
type RegionEnv = [(Variable, RegionSum)]
-- List of specifications associated to variables
-- This is not a map so there might be multiple entries for each variable
-- use `lookupAggregate` to access it
type SpecDecls = [SpecDecl]
pprintSpecDecls :: SpecDecls -> String
pprintSpecDecls =
concatMap (\(names, spec) ->
show spec ++ " :: " ++ intercalate "," names ++ "\n")
-- Top-level of specifications: may be either spatial or temporal
-- | `isStencil` is used to mark whether a specification is associated
-- | with a stencil computation, or a general array computation
type IsStencil = Bool
data Specification =
Specification (Multiplicity (Approximation Spatial)) IsStencil
deriving (Eq, Data, Typeable)
isEmpty :: Specification -> Bool
isEmpty (Specification mult _) = isUnit . peel $ mult
-- **********************
-- Spatial specifications:
-- is a regionSum
--
-- Regions are in disjunctive normal form (with respect to
-- products on dimensions and sums):
-- i.e., (A * B) U (C * D)...
data Spatial = Spatial RegionSum
deriving (Eq, Data, Typeable)
-- Helpers for dealing with linearity information
-- A boolean is used to represent multiplicity in the backend
-- with False = multiplicity=1 and True = multiplicity > 1
fromBool :: Bool -> Linearity
fromBool True = NonLinear
fromBool False = Linear
hasDuplicates :: Eq a => [a] -> ([a], Bool)
hasDuplicates xs = (nub xs, nub xs /= xs)
setLinearity :: Linearity -> Specification -> Specification
setLinearity l (Specification mult isStencil)
| l == Linear = Specification (Once $ peel mult) isStencil
| l == NonLinear = Specification (Mult $ peel mult) isStencil
data Linearity = Linear | NonLinear deriving (Eq, Data, Typeable)
type Dimension = Int -- spatial dimensions are 1 indexed
type Depth = Int
type IsRefl = Bool
-- Individual regions
data Region where
Forward :: Depth -> Dimension -> IsRefl -> Region
Backward :: Depth -> Dimension -> IsRefl -> Region
Centered :: Depth -> Dimension -> IsRefl -> Region
deriving (Eq, Data, Typeable)
-- An (arbitrary) ordering on regions for the sake of normalisation
instance Ord Region where
(Forward dep dim _) <= (Forward dep' dim' _)
| dep == dep' = dim <= dim'
| otherwise = dep <= dep'
(Backward dep dim _) <= (Backward dep' dim' _)
| dep == dep' = dim <= dim'
| otherwise = dep <= dep'
(Centered dep dim _) <= (Centered dep' dim' _)
| dep == dep' = dim <= dim'
| otherwise = dep <= dep'
-- Order in the way defined above: Forward <: Backward <: Centered
Forward{} <= _ = True
Backward{} <= Centered{} = True
_ <= _ = False
-- Product of specifications
newtype RegionProd = Product {unProd :: [Region]}
deriving (Eq, Data, Typeable)
-- Sum of product specifications
newtype RegionSum = Sum {unSum :: [RegionProd]}
deriving (Eq, Data, Typeable)
instance Ord RegionProd where
(Product xs) <= (Product xs') = xs <= xs'
-- Operations on specifications
-- Operations on region specifications form a semiring
-- where `sum` is the additive, and `prod` is the multiplicative
-- [without the annihilation property for `zero` with multiplication]
class RegionRig t where
sum :: t -> t -> t
prod :: t -> t -> t
one :: t
zero :: t
isUnit :: t -> Bool
-- Lifting to the `Maybe` constructor
instance RegionRig a => RegionRig (Maybe a) where
sum (Just x) (Just y) = Just $ sum x y
sum x Nothing = x
sum Nothing x = x
prod (Just x) (Just y) = Just $ prod x y
prod x Nothing = x
prod Nothing x = x
one = Just one
zero = Just zero
isUnit Nothing = True
isUnit (Just x) = isUnit x
instance RegionRig Spatial where
sum (Spatial s) (Spatial s') = Spatial (sum s s')
prod (Spatial s) (Spatial s') = Spatial (prod s s')
one = Spatial one
zero = Spatial zero
isUnit (Spatial ss) = isUnit ss
instance RegionRig (Approximation Spatial) where
sum (Exact s) (Exact s') = Exact (sum s s')
sum (Exact s) (Bound l u) = Bound (sum (Just s) l) (sum (Just s) u)
sum (Bound l u) (Bound l' u') = Bound (sum l l') (sum u u')
sum s s' = sum s' s
prod (Exact s) (Exact s') = Exact (prod s s')
prod (Exact s) (Bound l u) = Bound (prod (Just s) l) (prod (Just s) u)
prod (Bound l u) (Bound l' u') = Bound (prod l l') (prod u u') -- (prod l u') (prod l' u))
prod s s' = prod s' s
one = Exact one
zero = Exact zero
isUnit (Exact s) = isUnit s
isUnit (Bound x y) = isUnit x && isUnit y
instance RegionRig RegionSum where
prod (Sum ss) (Sum ss') =
Sum $ nub $ -- Take the cross product of list of summed specifications
do (Product spec) <- ss
(Product spec') <- ss'
return $ Product $ nub $ sort $ spec ++ spec'
sum (Sum ss) (Sum ss') = Sum $ ss ++ ss'
zero = Sum []
one = Sum [Product []]
isUnit s@(Sum ss) = s == zero || s == one || all (== Product []) ss
-- Pretty print top-level specifications
instance Show Specification where
show (Specification sp True) = "stencil " ++ show sp
show (Specification sp False) = "access " ++ show sp
instance {-# OVERLAPS #-} Show (Multiplicity (Approximation Spatial)) where
show mult
| Mult appr <- mult = apprStr empty empty appr
| Once appr <- mult = apprStr "readOnce" ", " appr
where
apprStr linearity sep appr =
case appr of
Exact s -> linearity ++ optionalSeparator sep (show s)
Bound Nothing Nothing -> "empty"
Bound Nothing (Just s) -> linearity ++ optionalSeparator sep "atMost, " ++ show s
Bound (Just s) Nothing -> linearity ++ optionalSeparator sep "atLeast, " ++ show s
Bound (Just sL) (Just sU) ->
concat [ linearity, optionalSeparator sep (show sL), ";"
, if linearity == empty then "" else " " ++ linearity ++ ", "
, "atMost, ", show sU ]
optionalSeparator _ "" = ""
optionalSeparator sep s = sep ++ s
instance {-# OVERLAPS #-} Show (Approximation Spatial) where
show (Exact s) = show s
show (Bound Nothing Nothing) = "empty"
show (Bound Nothing (Just s)) = "atMost, " ++ show s
show (Bound (Just s) Nothing) = "atLeast, " ++ show s
show (Bound (Just sL) (Just sU)) =
"atLeast, " ++ show sL ++ "; atMost, " ++ show sU
-- Pretty print spatial specs
instance Show Spatial where
show (Spatial region) =
-- Map "empty" spec to Nothing here
case show region of
"empty" -> ""
xs -> xs
-- Pretty print region sums
instance Show RegionSum where
showsPrec _ (Sum []) = showString "empty"
showsPrec p (Sum specs) =
showParen (p > 6) $ inter specs
where
inter [ ] = id
inter [ x ] = showsPrec 6 x
inter (x:xs) = showsPrec 6 x . (" + " ++) . inter xs
instance Show RegionProd where
showsPrec _ (Product []) = showString "empty"
showsPrec p (Product ss) =
showParen (p > 7) $ inter ss
where
inter [ ] = id
inter [ x ] = showsPrec 7 x
inter (x:xs) = showsPrec 7 x . ('*' :) . inter xs
instance Show Region where
show (Forward dep dim reflx) = showRegion "forward" dep dim reflx
show (Backward dep dim reflx) = showRegion "backward" dep dim reflx
show (Centered dep dim reflx)
| dep == 0 = "pointed(dim=" ++ show dim ++ ")"
| otherwise = showRegion "centered" dep dim reflx
-- Helper for showing regions
showRegion typ depS dimS reflx = typ ++ "(depth=" ++ show depS
++ ", dim=" ++ show dimS
++ (if reflx then "" else ", nonpointed")
++ ")"
-- Helper for reassociating an association list, grouping the keys together that
-- have matching values
groupKeyBy :: Eq b => [(a, b)] -> [([a], b)]
groupKeyBy = groupKeyBy' . map (\ (k, v) -> ([k], v))
where
groupKeyBy' [] = []
groupKeyBy' [(ks, v)] = [(ks, v)]
groupKeyBy' ((ks1, v1):((ks2, v2):xs))
| v1 == v2 = groupKeyBy' ((ks1 ++ ks2, v1) : xs)
| otherwise = (ks1, v1) : groupKeyBy' ((ks2, v2) : xs)
| dorchard/camfort | src/Camfort/Specification/Stencils/Syntax.hs | apache-2.0 | 10,412 | 1 | 15 | 2,708 | 3,084 | 1,612 | 1,472 | 197 | 3 |
{-
Copyrights (c) 2016. Samsung Electronics Ltd. All right reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE QuasiQuotes #-}
module P4.Header ( p4HeaderDecls
, p4DefaultCommands
, p4DefaultDecls
, p4InitHeader
, p4CleanupHeader) where
import Text.Heredoc
p4HeaderDecls :: String
p4HeaderDecls = [str|
|header_type intrinsic_metadata_t {
| fields {
| bit<4> mgid;
| }
|}
|
|header_type eth_t {
| fields {
| bit<48> dstAddr;
| bit<48> srcAddr;
| bit<16> etherType;
| }
|}
|
|header_type vlan_tag_t {
| fields {
| bit<16> vid;
| bit<16> etherType;
| }
|}
|
|header_type mtag_t {
| fields {
| bit<8> up1;
| bit<8> up2;
| bit<8> down1;
| bit<8> down2;
| bit<16> etherType;
| }
|}
|
|header_type stag_t {
| fields {
| bit<8> srcColor;
| bit<16> etherType;
| }
|}
|
|header_type ipv4_t {
| fields {
| bit<4> version;
| bit<4> ihl;
| bit<8> diffserv;
| bit<16> totalLen;
| bit<16> identification;
| bit<3> flags;
| bit<13> fragOffset;
| bit<8> ttl;
| bit<8> protocol;
| bit<16> hdrChecksum;
| bit<8> src_ip3;
| bit<8> src_ip2;
| bit<8> src_ip1;
| bit<8> src_ip0;
| bit<8> dst_ip3;
| bit<8> dst_ip2;
| bit<8> dst_ip1;
| bit<8> dst_ip0;
| }
|}
|
|header_type arp_t {
| fields {
| bit<16> htype;
| bit<16> ptype;
| bit<8> hlen;
| bit<8> plen;
| bit<16> oper;
| bit<48> sha;
| bit<32> spa;
| bit<48> tha;
| bit<32> tpa;
| }
|}
|
|metadata intrinsic_metadata_t intrinsic_metadata;
|header eth_t eth;
|metadata eth_t _tmp_eth_t;
|header vlan_tag_t vlan;
|metadata vlan_tag_t _tmp_vlan_tag_t;
|header ipv4_t ip4;
|metadata ipv4_t _tmp_ipv4_t;
|header arp_t arp;
|metadata arp_t _tmp_arp_t;
|header mtag_t mtag;
|metadata mtag_t _tmp_mtag_t;
|header stag_t stag;
|metadata stag_t _tmp_stag_t;
|
|parser start {
| return parse_ethernet;
|}
|
|#define ETHERTYPE_VLAN 0x8100, 0x9100, 0x9200, 0x9300
|#define ETHERTYPE_MTAG 0xaaaa
|#define ETHERTYPE_STAG 0xaaab
|#define ETHERTYPE_IPV4 0x0800
|#define ETHERTYPE_ARP 0x0806
|
|
|parser parse_vlan {
| extract(vlan);
| return select(latest.etherType) {
| ETHERTYPE_IPV4 : parse_ipv4;
| ETHERTYPE_ARP : parse_arp;
| ETHERTYPE_MTAG : parse_mtag;
| }
|}
|
|parser parse_mtag {
| extract(mtag);
| return select(latest.etherType) {
| ETHERTYPE_IPV4 : parse_ipv4;
| ETHERTYPE_ARP : parse_arp;
| ETHERTYPE_STAG : parse_stag;
| }
|}
|
|parser parse_stag {
| extract(stag);
| return select(latest.etherType) {
| ETHERTYPE_IPV4 : parse_ipv4;
| ETHERTYPE_ARP : parse_arp;
| }
|}
|
|parser parse_ethernet {
| extract(eth);
| return select(latest.etherType) {
| ETHERTYPE_VLAN : parse_vlan;
| ETHERTYPE_IPV4 : parse_ipv4;
| ETHERTYPE_ARP : parse_arp;
| }
|}
|parser parse_arp {
| return select(current(16, 32)) {
| 0x08000604 : parse_arp_ip4;
| }
|}
|
|parser parse_arp_ip4 {
| extract(arp);
| return ingress;
|}
|
|parser parse_ipv4 {
| extract(ip4);
| return ingress;
|}
|action yes(){}
|action no(){}
|
|action broadcast() {
| modify_field(intrinsic_metadata.mgid, 1);
|}
|
|action adrop() {
| drop();
|}
|
|table drop {
| actions {adrop;}
|}
|
|]
p4DefaultDecls::String
p4DefaultDecls = [str|
|]
p4DefaultCommands::String
p4DefaultCommands = [str|
|table_set_default drop adrop
|]
p4InitHeader :: String -> String
p4InitHeader h = case h of
"vlan" -> "modify_field(eth.etherType, 0x8100);\n" ++
"modify_field(vlan.etherType, ETHERTYPE_IPV4);"
"arp" -> "modify_field(eth.etherType, ETHERTYPE_ARP);\n" ++
"modify_field(arp.htype, 0x1);\n" ++
"modify_field(arp.ptype, 0x0800);\n" ++
"modify_field(arp.hlen, 0x6);\n" ++
"modify_field(arp.plen, 0x4);"
"mtag" -> "modify_field(mtag.etherType, vlan.etherType);\n" ++
"modify_field(vlan.etherType, ETHERTYPE_MTAG);\n"
"stag" -> "modify_field(stag.etherType, mtag.etherType);\n" ++
"modify_field(mtag.etherType, ETHERTYPE_STAG);\n"
_ -> error $ "P4.Header.p4InitHeader: unknown header " ++ h
p4CleanupHeader :: String -> String
p4CleanupHeader h = case h of
"vlan" -> "modify_field(eth.etherType, ETHERTYPE_IPV4);"
"arp" -> ""
"mtag" -> "modify_field(vlan.etherType, mtag.etherType);"
"stag" -> "modify_field(mtag.etherType, stag.etherType);"
_ -> error $ "P4.Header.p4InitHeader: unknown header " ++ h
| ryzhyk/cocoon | cocoon/P4/Header.hs | apache-2.0 | 5,613 | 0 | 11 | 1,694 | 218 | 124 | 94 | 34 | 5 |
--Modules.
--import Data.List
-- :m + Data.List Data.Map Data.Set --No GHCI
-- import Data.List (nub, sort) --Métodos específicos
-- import Data.List hiding (nub) --Retira método específico
-- import qualified Data.Map as M -- Com namespace (customizado)
import Data.List
import Data.Char
import qualified Data.Map as Map
import Data.Set
import Data.Function
--import Geometry.Sphere as Sphere
--import Geometry.Cuboid as Cuboid
--import Geometry.Cube as Cube
--DATA.LIST
-- interperse - Passa um valor de uma lista. Adiciona o valor no meio de cada item da lista.
-- intercalate - Passa uma lista e uma lista de listas. Estica os valores, adicionando a lista passada no meio das listas que constituiam a outra lisca (sic!)
-- transpose - Sem segredo, pega uma lista de listas e transforma as "colunas" viram as "linhas"
equation1 = [0,3,5,9]
equation2 = [10,0,0,9]
equation3 = [8,5,1,-1]
equations = [equation1, equation2, equation3]
equationsSum = Data.List.map sum $ transpose equations
-- foldl' e foldl1' - são as versões estrítas
-- concat - apesar do nome, apenas faz um flat em uma lista de listas...
-- concatMap - mesma coisa que fazer um map e dps concat
-- and - recebe uma lista de booleans e retorna true se todos forem true
-- or - recebe uma lista de booleans e retorna true se um deles for true
asd = and $ Data.List.map (>3) [1,2,3,4,5]
asdqwe = or $ Data.List.map (==4) [1,2,3,4,5]
-- any e all - recebe um predicado e verifica se algum ou todos elementos satisfazem o predicado, respectivamente. É usado normalmente ao invés da combinação MAP + (OR/AND)
afaqwe = all (>3) [1,2,3,4,5]
ert = any (==4) [1,2,3,4,5]
--iterate - Pega uma função de um valor inicial e vai acumulando ele eternamente... Daí usa um take, por exemplo pra pegar o valor.
rty = take 10 $ iterate (*2) 1
-- splitAt - pega um número e quebra uma lista em duas tuplas com cada metade.
poi = let (a,b) = splitAt 4 "WILLGLUK" in b ++ a --invertendo a string na maciota
-- takeWhile - conhecida já
-- dropWhile - Parecida com takeWhile, mas em vez de "pegar" enquanto for true, remove da lista enquanto for true
-- span - recebe uma função e uma lista também, mas retorna duas listas. Uma é o resultado de um takeWhile e a outra são os que "caíram fora" da validação do takeWhile
-- sort - recebe uma lista de Ord e ordena
-- group - pega uma lista de divide em uma lista de listas com todos os itens que são iguais. Agrupa os iguais, basicamente (por ordem)
-- inits e tails - cria uma lista recursivamente aplicando init e tail, até não sobrar mais nada. Retorna uma lista dos resultados parciais.
search :: (Eq a) => [a]-> [a] -> Bool
search needle haystack =
let nlen = length needle
in Data.List.foldl (\acc x -> if take nlen x == needle then True else acc) False (tails haystack)
-- isInfixOf - basicamente a implementação acima. Procura uma lista dentro de uma lista. Tem isPrefixOf e isSuffixOf que vem o início e final, respectivamente.
-- elem e notElem verifica se um item está ou não, respectivamente, em uma lista.
-- partition - Recebe uma lista e predicado. Retorna primeira a lista que satisfaz o predicado e dps a que não.
-- find - Recebe um predicado e retorna um Maybe do primeiro encontrado. Maybe pode ser Just alguma coisa ou Nothing.
-- elemIndex - Tipo elem, mas em vez de retornar boolean retorna um Maybe do índice do primeiro encontrado.
-- elemIndices - Igual elemIndex mas retorna uma lista de índices. Serve pra encontrar vários.
-- findIndex - Tipo find, só que retorna uma lista de índices. Serve pra encontrar vários.
-- zip3, zip4, zipWith3, zipWith4...vai até 7.
-- lines - pra lidar com textos. Pega cada linha de texto e coloca em uma lista diferente. Retorna uma lista de listas dai.
-- unlines - Função inversa de lines.
-- words e unwords - divide o texto em palavras e o contrário, respectivamente.
-- delete - deleta a primeira ocorrência do valor passado da lista passada.
-- \\ - Usa LISTA \\ LISTA - cria uma nova lista sem os elementos que existem na lista da esquerda e direita (exclusão é um pra um)
-- union - Junta duas listas mas remove repetições!
-- intersect - cria nova lista apenas com os valores que se repetem em ambas as listas.
-- insert - adiciona um valor numa lista que pode ser ordenada, na primeira posição "correta" encontrada (baseada na ordenação).
-- length, take, drop, splitAt, !! e replicate usam INTS por motivos históricos
-- Para casos diferentes usar genericNOME_DO_METODO (exceção sendo genericIndex que representa a função !!)
-- nub, delete, union, intersect e group usam == para as comparações
-- Quando quiser comparação customizada usar nubBy, deleteBy, unionBy, intersectBy e groupBy. usar group é igual usar groupBy (==)
separatePositivesAndNegatives = Data.List.groupBy (\x y -> (x > 0) == (y > 0))
-- on
separatePositivesAndNegatives' = groupBy ((==) `on` (>))
-- sort, insert, maximum e minimum também podem ser usados pra ordenação customizada com sortBy, insertBy, maximumBy e minimumBy.
-- Como saber onde adicionar uma lista de listas? Se uma lista não é Ord
xs = [[5,4,5,4,4], [1,2,3], [3,5,4,3], [], [2], [2,2]]
gfh = Data.List.sortBy (compare `on` length) xs
-- Resultado [[],[2],[2,2],[1,2,3],[3,5,4,3],[5,4,5,4,4]]
--DATA.CHAR
-- isControl (verifica se é caracter de controle), isSpace, isLower, isUpper, isAlpha (letra), isAlphaNum (número e letra), isPrint,
-- isDigit, isOctoDigit (é um dígito octal), isHexDigit, isLetter, isMark (verifica se é um caracter unicode de marcação..francês),
-- isNumber, isPunctuation, isSymbol, isSeparator (espaços e separadores), isAscii, isAsciiUpper, isAsciiLower...
tyu = all isAlphaNum "bobby283"
--True
--Simulando words com isSpace
wefv = Data.List.filter (not. any Data.Char.isSpace) . Data.List.groupBy ((==) `on` Data.Char.isSpace) $ "hey guys its me"
-- ["hey", "guys", "its", "me"]
-- generalCategory - identifica a GeneralCategory (extende de Eq) de um caracter. É um enumerador
sdfsa = generalCategory 'A'
-- UppercaseLetter
-- Para manipular: toUpper, toLower, toTitle (as vzs é UpperCase), digitToInt (0..9) (a..f), intToDigit.
-- Ord retorna o valor numérico que representa o caracter. chr retorna o char de um valor numérico
encode :: Int -> String -> String
encode shift text =
let ords = Data.List.map ord text
shifted = Data.List.map (+shift) ords
in Data.List.map chr shifted
decode :: Int -> String -> String
decode shift text = encode (negate shift) text
--DATA.MAP
-- uma maneira de representar maps? Association List [(a,b)]
findKey :: (Eq k) => k -> [(k, v)] -> Maybe v
--findKey key [] = Nothing
--findKey key ((k,v):xs) = if key == k then Just v else findKey key xs
findKey key = Data.Set.foldr (\(k,v) acc -> if key == k then Just v else acc) Nothing
-- Mapas são mais otimizados. Use fromList para transformar uma lista associativa em um Map. Remove chaves duplicadas.
-- Map.fromList ::(Ord k) => [(k,v)] -> Map.Map k v
--empty - retorna map vazio
-- insert - recebe chave, valor e um mapa e faz o trabalho
-- null - verifica se o mapa é nulo
-- size - Retorna o tamanho do mapa
-- singleton - recebe chave valor e cria um mapa com apenas um valor Oo
-- lookup - igual o do Data.List. Retorna Just valor ou Nothing.
-- member - recebe a key e o mapa e retorna boolean dizendo se a chave está
-- map e filter - igual o da lista, operam apenas no valor.
-- toList - inverso do fromList
-- keys e elems retorna uma lista com as chaves e elementos, respectivamente.
-- fromListWith - recebe uma função para decidir o que fazer com chaves duplicadas.
--phoneBookToMap :: (Ord k) => [(k,String)] -> Map.Map k String
--phoneBookToMap xs = Map.fromListWith (\number1 number2 -> number1 ++ ", " ++ number2) xs
phoneBookToMap :: (Ord k) => [(k, a)] -> Map.Map k [a]
phoneBookToMap xs = Map.fromListWith (++) $ Data.Set.map (\(k,v) -> (k, [v])) xs -- transforma valor em lista e aplica somma de listas.
--insertWith - o que fromListWith é para fromList insertWith é para insert. No caso de valores repetidos.
| WillGluck/HaskellPlayground | modulesPlayground.hs | apache-2.0 | 8,070 | 10 | 12 | 1,381 | 1,011 | 596 | 415 | 38 | 2 |
module Auth0.Management.Rules where
--------------------------------------------------------------------------------
import Data.Aeson
import Data.Proxy
import Data.Text
import GHC.Generics
import Servant.API
import Servant.Client
--------------------------------------------------------------------------------
import Auth0.Types
--------------------------------------------------------------------------------
data RuleResponse
= RuleResponse
{ name :: Maybe Text
, id :: Maybe Text
, enabled :: Maybe Bool
, script :: Maybe Text
, number :: Maybe Double
, stage :: Maybe Text
} deriving (Generic, Show)
instance FromJSON RuleResponse where
parseJSON =
genericParseJSON defaultOptions { omitNothingFields = True, fieldLabelModifier = camelTo2 '_' }
--------------------------------------------------------------------------------
-- GET /api/v2/rules
type RulesGetApi
= Header' '[Required] "Authorization" AccessToken
:> QueryParam "enabled" Bool
:> QueryParam "fields" Text
:> QueryParam "include_fields" Bool
:> Get '[JSON] [RuleResponse]
rulesGet ::
AccessToken
-> Maybe Bool
-> Maybe Text
-> Maybe Bool
-> ClientM [RuleResponse]
--------------------------------------------------------------------------------
-- POST /api/v2/rules
data RuleCreate
= RuleCreate
{ name :: Maybe Text
, script :: Maybe Text
, order :: Maybe Double
, enabled :: Maybe Bool
} deriving (Generic, Show)
instance ToJSON RuleCreate where
toJSON =
genericToJSON defaultOptions { omitNothingFields = True, fieldLabelModifier = camelTo2 '_' }
type RuleCreateApi
= Header' '[Required] "Authorization" AccessToken
:> ReqBody '[JSON] RuleCreate
:> Post '[JSON] RuleResponse
ruleCreate ::
AccessToken
-> RuleCreate
-> ClientM RuleResponse
--------------------------------------------------------------------------------
-- GET /api/v2/rules/{id}
type RuleGetApi
= Header' '[Required] "Authorization" AccessToken
:> Capture "id" Text
:> QueryParam "fields" Text
:> QueryParam "include_fields" Bool
:> Get '[JSON] [RuleResponse]
ruleGet ::
AccessToken
-> Text
-> Maybe Text
-> Maybe Bool
-> ClientM [RuleResponse]
--------------------------------------------------------------------------------
-- DELETE /api/v2/rules/{id}
type RuleDeleteApi
= Header' '[Required] "Authorization" AccessToken
:> Capture "id" Text
:> Delete '[JSON] NoContent
ruleDelete ::
AccessToken
-> Text
-> ClientM NoContent
--------------------------------------------------------------------------------
-- PATCH /api/v2/rules/{id}
type RuleUpdate = RuleCreate
type RuleUpdateApi
= Header' '[Required] "Authorization" AccessToken
:> Capture "id" Text
:> ReqBody '[JSON] RuleUpdate
:> Patch '[JSON] RuleResponse
ruleUpdate ::
AccessToken
-> Text
-> RuleUpdate
-> ClientM RuleResponse
--------------------------------------------------------------------------------
type RulesApi
= "api"
:> "v2"
:> "rules"
:> ( RulesGetApi
:<|> RuleCreateApi
:<|> RuleGetApi
:<|> RuleDeleteApi
:<|> RuleUpdateApi
)
rulesApi :: Proxy RulesApi
rulesApi = Proxy
rulesGet
:<|> ruleCreate
:<|> ruleGet
:<|> ruleDelete
:<|> ruleUpdate
= client rulesApi
| alasconnect/auth0 | src/Auth0/Management/Rules.hs | apache-2.0 | 3,305 | 0 | 11 | 564 | 726 | 385 | 341 | -1 | -1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QTextFrame.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:21
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QTextFrame (
qTextFrame
,childFrames
,frameFormat
,layoutData
,parentFrame
,setFrameFormat
,setLayoutData
,qTextFrame_delete
,qTextFrame_deleteLater
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
instance QuserMethod (QTextFrame ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QTextFrame_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QTextFrame_userMethod" qtc_QTextFrame_userMethod :: Ptr (TQTextFrame a) -> CInt -> IO ()
instance QuserMethod (QTextFrameSc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QTextFrame_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QTextFrame ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QTextFrame_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QTextFrame_userMethodVariant" qtc_QTextFrame_userMethodVariant :: Ptr (TQTextFrame a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QTextFrameSc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QTextFrame_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
qTextFrame :: (QTextDocument t1) -> IO (QTextFrame ())
qTextFrame (x1)
= withQTextFrameResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame cobj_x1
foreign import ccall "qtc_QTextFrame" qtc_QTextFrame :: Ptr (TQTextDocument t1) -> IO (Ptr (TQTextFrame ()))
childFrames :: QTextFrame a -> (()) -> IO ([QTextFrame ()])
childFrames x0 ()
= withQListQTextFrameResult $ \arr ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_childFrames cobj_x0 arr
foreign import ccall "qtc_QTextFrame_childFrames" qtc_QTextFrame_childFrames :: Ptr (TQTextFrame a) -> Ptr (Ptr (TQTextFrame ())) -> IO CInt
instance QfirstCursorPosition (QTextFrame a) (()) where
firstCursorPosition x0 ()
= withQTextCursorResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_firstCursorPosition cobj_x0
foreign import ccall "qtc_QTextFrame_firstCursorPosition" qtc_QTextFrame_firstCursorPosition :: Ptr (TQTextFrame a) -> IO (Ptr (TQTextCursor ()))
instance QfirstPosition (QTextFrame a) (()) where
firstPosition x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_firstPosition cobj_x0
foreign import ccall "qtc_QTextFrame_firstPosition" qtc_QTextFrame_firstPosition :: Ptr (TQTextFrame a) -> IO CInt
frameFormat :: QTextFrame a -> (()) -> IO (QTextFrameFormat ())
frameFormat x0 ()
= withQTextFrameFormatResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_frameFormat cobj_x0
foreign import ccall "qtc_QTextFrame_frameFormat" qtc_QTextFrame_frameFormat :: Ptr (TQTextFrame a) -> IO (Ptr (TQTextFrameFormat ()))
instance QlastCursorPosition (QTextFrame a) (()) where
lastCursorPosition x0 ()
= withQTextCursorResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_lastCursorPosition cobj_x0
foreign import ccall "qtc_QTextFrame_lastCursorPosition" qtc_QTextFrame_lastCursorPosition :: Ptr (TQTextFrame a) -> IO (Ptr (TQTextCursor ()))
instance QlastPosition (QTextFrame a) (()) where
lastPosition x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_lastPosition cobj_x0
foreign import ccall "qtc_QTextFrame_lastPosition" qtc_QTextFrame_lastPosition :: Ptr (TQTextFrame a) -> IO CInt
layoutData :: QTextFrame a -> (()) -> IO (QTextFrameLayoutData ())
layoutData x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_layoutData cobj_x0
foreign import ccall "qtc_QTextFrame_layoutData" qtc_QTextFrame_layoutData :: Ptr (TQTextFrame a) -> IO (Ptr (TQTextFrameLayoutData ()))
parentFrame :: QTextFrame a -> (()) -> IO (QTextFrame ())
parentFrame x0 ()
= withQTextFrameResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_parentFrame cobj_x0
foreign import ccall "qtc_QTextFrame_parentFrame" qtc_QTextFrame_parentFrame :: Ptr (TQTextFrame a) -> IO (Ptr (TQTextFrame ()))
setFrameFormat :: QTextFrame a -> ((QTextFrameFormat t1)) -> IO ()
setFrameFormat x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_setFrameFormat cobj_x0 cobj_x1
foreign import ccall "qtc_QTextFrame_setFrameFormat" qtc_QTextFrame_setFrameFormat :: Ptr (TQTextFrame a) -> Ptr (TQTextFrameFormat t1) -> IO ()
setLayoutData :: QTextFrame a -> ((QTextFrameLayoutData t1)) -> IO ()
setLayoutData x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_setLayoutData cobj_x0 cobj_x1
foreign import ccall "qtc_QTextFrame_setLayoutData" qtc_QTextFrame_setLayoutData :: Ptr (TQTextFrame a) -> Ptr (TQTextFrameLayoutData t1) -> IO ()
qTextFrame_delete :: QTextFrame a -> IO ()
qTextFrame_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_delete cobj_x0
foreign import ccall "qtc_QTextFrame_delete" qtc_QTextFrame_delete :: Ptr (TQTextFrame a) -> IO ()
qTextFrame_deleteLater :: QTextFrame a -> IO ()
qTextFrame_deleteLater x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_deleteLater cobj_x0
foreign import ccall "qtc_QTextFrame_deleteLater" qtc_QTextFrame_deleteLater :: Ptr (TQTextFrame a) -> IO ()
instance QsetFormat (QTextFrame ()) ((QTextFormat t1)) where
setFormat x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_setFormat cobj_x0 cobj_x1
foreign import ccall "qtc_QTextFrame_setFormat" qtc_QTextFrame_setFormat :: Ptr (TQTextFrame a) -> Ptr (TQTextFormat t1) -> IO ()
instance QsetFormat (QTextFrameSc a) ((QTextFormat t1)) where
setFormat x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_setFormat cobj_x0 cobj_x1
instance QchildEvent (QTextFrame ()) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_childEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QTextFrame_childEvent" qtc_QTextFrame_childEvent :: Ptr (TQTextFrame a) -> Ptr (TQChildEvent t1) -> IO ()
instance QchildEvent (QTextFrameSc a) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_childEvent cobj_x0 cobj_x1
instance QconnectNotify (QTextFrame ()) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTextFrame_connectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QTextFrame_connectNotify" qtc_QTextFrame_connectNotify :: Ptr (TQTextFrame a) -> CWString -> IO ()
instance QconnectNotify (QTextFrameSc a) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTextFrame_connectNotify cobj_x0 cstr_x1
instance QcustomEvent (QTextFrame ()) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_customEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QTextFrame_customEvent" qtc_QTextFrame_customEvent :: Ptr (TQTextFrame a) -> Ptr (TQEvent t1) -> IO ()
instance QcustomEvent (QTextFrameSc a) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_customEvent cobj_x0 cobj_x1
instance QdisconnectNotify (QTextFrame ()) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTextFrame_disconnectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QTextFrame_disconnectNotify" qtc_QTextFrame_disconnectNotify :: Ptr (TQTextFrame a) -> CWString -> IO ()
instance QdisconnectNotify (QTextFrameSc a) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTextFrame_disconnectNotify cobj_x0 cstr_x1
instance Qevent (QTextFrame ()) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_event_h cobj_x0 cobj_x1
foreign import ccall "qtc_QTextFrame_event_h" qtc_QTextFrame_event_h :: Ptr (TQTextFrame a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent (QTextFrameSc a) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_event_h cobj_x0 cobj_x1
instance QeventFilter (QTextFrame ()) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QTextFrame_eventFilter_h cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QTextFrame_eventFilter_h" qtc_QTextFrame_eventFilter_h :: Ptr (TQTextFrame a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter (QTextFrameSc a) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QTextFrame_eventFilter_h cobj_x0 cobj_x1 cobj_x2
instance Qreceivers (QTextFrame ()) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTextFrame_receivers cobj_x0 cstr_x1
foreign import ccall "qtc_QTextFrame_receivers" qtc_QTextFrame_receivers :: Ptr (TQTextFrame a) -> CWString -> IO CInt
instance Qreceivers (QTextFrameSc a) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QTextFrame_receivers cobj_x0 cstr_x1
instance Qsender (QTextFrame ()) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_sender cobj_x0
foreign import ccall "qtc_QTextFrame_sender" qtc_QTextFrame_sender :: Ptr (TQTextFrame a) -> IO (Ptr (TQObject ()))
instance Qsender (QTextFrameSc a) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QTextFrame_sender cobj_x0
instance QtimerEvent (QTextFrame ()) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_timerEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QTextFrame_timerEvent" qtc_QTextFrame_timerEvent :: Ptr (TQTextFrame a) -> Ptr (TQTimerEvent t1) -> IO ()
instance QtimerEvent (QTextFrameSc a) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QTextFrame_timerEvent cobj_x0 cobj_x1
| uduki/hsQt | Qtc/Gui/QTextFrame.hs | bsd-2-clause | 11,437 | 0 | 14 | 1,867 | 3,559 | 1,800 | 1,759 | -1 | -1 |
-- {-# INCLUDE <FTGL/ftgl.h> #-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# OPTIONS_GHC -O2 -fglasgow-exts #-}
-- | * Author: Jefferson Heard (jefferson.r.heard at gmail.com)
--
-- * Copyright 2008 Renaissance Computing Institute < http://www.renci.org >
--
-- * License: GNU LGPL
--
-- * Compatibility GHC (I could change the data declarations to not be empty and that would make it more generally compatible, I believe)
--
-- * Description:
--
-- Use FreeType 2 Fonts in OpenGL. Requires the FTGL library and FreeType libraries.
-- available at < http://ftgl.wiki.sourceforge.net/ > . The most important functions for
-- everyday use are renderFont and the create*Font family of functions. To render a
-- simple string inside OpenGL, assuming you have OpenGL initialized and a current
-- pen color, all you need is:
--
-- > do font <- createTextureFont "Font.ttf"
-- > setFontFaceSize font 24 72
-- > renderFont font "Hello world!"
--
-- Fonts are rendered so that a single point is an OpenGL unit, and a point is 1:72 of
-- an inch.
module Graphics.Rendering.FTGL
where
-- import Foreign (unsafePerformIO)
import System.IO.Unsafe(unsafePerformIO)
import Foreign.C
import Foreign.Ptr
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
import Data.Bits
import Data.Char (ord)
import qualified Graphics.Rendering.OpenGL.GL as GL
import Control.Applicative ((<$>))
foreign import ccall unsafe "ftglCreateBitmapFont" fcreateBitmapFont :: CString -> IO Font
-- | Create a bitmapped version of a TrueType font. Bitmapped versions will not
-- | respond to matrix transformations, but rather must be transformed using the
-- | raster positioning functions in OpenGL
createBitmapFont :: String -> IO Font
createBitmapFont file = withCString file $ \p -> fcreateBitmapFont p
foreign import ccall unsafe "ftglCreateBufferFont" fcreateBufferFont :: CString -> IO Font
-- | Create a buffered version of a TrueType font. This stores the entirety of
-- | a string in a texture, "buffering" it before rendering. Very fast if you
-- | will be repeatedly rendering the same strings over and over.
createBufferFont :: String -> IO Font
createBufferFont file = withCString file $ \p -> fcreateBufferFont p
foreign import ccall unsafe "ftglCreateOutlineFont" fcreateOutlineFont :: CString -> IO Font
-- | Create an outline version of a TrueType font. This uses actual geometry
-- | and will scale independently without loss of quality. Faster than polygons
-- | but slower than texture or buffer fonts.
createOutlineFont :: String -> IO Font
createOutlineFont file = withCString file $ \p -> fcreateOutlineFont p
foreign import ccall unsafe "ftglCreatePixmapFont" fcreatePixmapFont :: CString -> IO Font
-- | Create a pixmap version of a TrueType font. Higher quality than the bitmap
-- | font without losing any performance. Use this if you don't mind using
-- | set and get RasterPosition.
createPixmapFont :: String -> IO Font
createPixmapFont file = withCString file $ \p -> fcreatePixmapFont p
foreign import ccall unsafe "ftglCreatePolygonFont" fcreatePolygonFont :: CString -> IO Font
-- | Create polygonal display list fonts. These scale independently without
-- | losing quality, unlike texture or buffer fonts, but can be impractical
-- | for large amounts of text because of the high number of polygons needed.
-- | Additionally, they do not, unlike the textured fonts, create artifacts
-- | within the square formed at the edge of each character.
createPolygonFont :: String -> IO Font
createPolygonFont file = withCString file $ \p -> fcreatePolygonFont p
foreign import ccall unsafe "ftglCreateTextureFont" fcreateTextureFont :: CString -> IO Font
-- | Create textured display list fonts. These can scale somewhat well,
-- | but lose quality quickly. They are much faster than polygonal fonts,
-- | though, so are suitable for large quantities of text. Especially suited
-- | well to text that changes with most frames, because it doesn't incur the
-- | (normally helpful) overhead of buffering.
createTextureFont :: String -> IO Font
createTextureFont file = withCString file $ \p -> fcreateTextureFont p
foreign import ccall unsafe "ftglCreateExtrudeFont" fcreateExtrudeFont :: CString -> IO Font
-- | Create a 3D extruded font. This is the only way of creating 3D fonts
-- | within FTGL. Could be fun to use a geometry shader to get different
-- | effects by warping the otherwise square nature of the font. Polygonal.
-- | Scales without losing quality. Slower than all other fonts.
createExtrudeFont :: String -> IO Font
createExtrudeFont file = withCString file $ \p -> fcreateExtrudeFont p
-- | Create a simple layout
foreign import ccall unsafe "ftglCreateSimpleLayout" createSimpleLayout :: IO Layout
-- | Set the layout's font.
foreign import ccall unsafe "ftglSetLayoutFont" setLayoutFont :: Layout -> Font -> IO ()
foreign import ccall unsafe "ftglGetLayoutFont" fgetLayoutFont :: Layout -> IO Font
-- | Get the embedded font from the Layout
getLayoutFont f = fgetLayoutFont f
-- | Set the line length, I believe in OpenGL units, although I'm not sure.
foreign import ccall unsafe "ftglSetLayoutLineLength" setLayoutLineLength :: Layout -> CFloat -> IO ()
foreign import ccall unsafe "ftglGetLayoutLineLength" fgetLayoutLineLength :: Layout -> IO CFloat
-- | Get the line length in points (1:72in) of lines in the layout
getLayoutLineLength :: Layout -> IO Float
getLayoutLineLength f = realToFrac <$> fgetLayoutLineLength f
foreign import ccall unsafe "ftglSetLayoutAlignment" fsetLayoutAlignment :: Layout -> CInt -> IO ()
-- | Set the layout alignment
setLayoutAlignment layout alignment = fsetLayoutAlignment layout (marshalTextAlignment alignment)
foreign import ccall unsafe "ftglGetLayoutAlignement" fgetLayoutAlignment :: Layout -> IO CInt
-- | Get the alignment of text in this layout.
getLayoutAlignment f = readTextAlignment <$> fgetLayoutAlignment f
foreign import ccall unsafe "ftglSetLayoutLineSpacing" fsetLayoutLineSpacing :: Layout -> CFloat -> IO ()
-- | Set layout line spacing in OpenGL units.
setLayoutLineSpacing :: Layout -> Float -> IO ()
setLayoutLineSpacing layout spacing = setLayoutLineSpacing layout (realToFrac spacing)
-- | Destroy a font
foreign import ccall unsafe "ftglDestroyFont" destroyFont :: Font -> IO ()
foreign import ccall unsafe "ftglAttachFile" fattachFile :: Font -> CString -> IO ()
-- | Attach a metadata file to a font.
attachFile :: Font -> String -> IO ()
attachFile font str = withCString str $ \p -> fattachFile font p
-- | Attach some external data (often kerning) to the font
foreign import ccall unsafe "ftglAttachData" attachData :: Font -> Ptr () -> IO ()
-- | Set the font's character map
foreign import ccall unsafe "ftglSetFontCharMap" fsetFontCharMap :: Font -> CInt -> IO ()
setCharMap :: Font -> CharMap -> IO ()
setCharMap font charmap = fsetFontCharMap font (marshalCharMap charmap)
foreign import ccall unsafe "ftglGetFontCharMapCount" fgetFontCharMapCount :: Font -> IO CInt
-- | Get the number of characters loaded into the current charmap for the font.
getFontCharMapCount :: Font -> Int
getFontCharMapCount f = fromIntegral . unsafePerformIO $ fgetFontCharMapCount f
foreign import ccall unsafe "ftglGetFontCharMapList" fgetFontCharMapList :: Font -> IO (Ptr CInt)
-- | Get the different character mappings available in this font.
getFontCharMapList f = unsafePerformIO $ fgetFontCharMapList f
foreign import ccall unsafe "ftglSetFontFaceSize" fsetFontFaceSize :: Font -> CInt -> CInt -> IO CInt
setFontFaceSize :: Font -> Int -> Int -> IO CInt
setFontFaceSize f s x = fsetFontFaceSize f (fromIntegral s) (fromIntegral x)
foreign import ccall unsafe "ftglGetFontFaceSize" fgetFontFaceSize :: Font -> IO CInt
-- | Get the current font face size in points.
getFontFaceSize :: Font -> IO Int
getFontFaceSize f = fromIntegral <$> fgetFontFaceSize f
foreign import ccall unsafe "ftglSetFontDepth" fsetFontDepth :: Font -> CFloat -> IO ()
setFontDepth :: Font -> Float -> IO ()
setFontDepth font depth = fsetFontDepth font (realToFrac depth)
foreign import ccall unsafe "ftglSetFontOutset" fsetFontOutset :: Font -> CFloat -> CFloat -> IO ()
setFontOutset :: Font -> Float -> Float -> IO ()
setFontOutset font d o = fsetFontOutset font (realToFrac d) (realToFrac o)
foreign import ccall unsafe "ftglGetFontBBox" fgetFontBBox :: Font -> CString -> Int -> Ptr CFloat -> IO ()
-- | Get the text extents of a string as a list of (llx,lly,lly,urx,ury,urz)
getFontBBox :: Font -> String -> IO [Float]
getFontBBox f s = allocaBytes 24 $ \pf ->
withCString s $ \ps -> do
fgetFontBBox f ps (-1) pf
map realToFrac <$> peekArray 6 pf
foreign import ccall unsafe "ftglGetFontAscender" fgetFontAscender :: Font -> CFloat
-- | Get the global ascender height for the face.
getFontAscender :: Font -> Float
getFontAscender = realToFrac . fgetFontAscender
foreign import ccall unsafe "ftglGetFontDescender" fgetFontDescender :: Font -> CFloat
-- | Gets the global descender height for the face.
getFontDescender :: Font -> Float
getFontDescender = realToFrac . fgetFontDescender
foreign import ccall unsafe "ftglGetFontLineHeight" fgetFontLineHeight :: Font -> CFloat
-- | Gets the global line spacing for the face.
getFontLineHeight :: Font -> Float
getFontLineHeight = realToFrac . fgetFontLineHeight
foreign import ccall unsafe "ftglGetFontAdvance" fgetFontAdvance :: Font -> CString -> IO CFloat
-- | Get the horizontal span of a string of text using the current font. Input as the xcoord
-- | in any translate operation
getFontAdvance :: Font -> String -> IO Float
getFontAdvance font str = realToFrac <$> (withCString str $ \p -> fgetFontAdvance font p )
foreign import ccall unsafe "ftglRenderFont" frenderFont :: Font -> CString -> CInt -> IO ()
-- | Render a string of text in the current font.
renderFont :: Font -> String -> RenderMode -> IO ()
renderFont font str mode = withCString str $ \p -> do
frenderFont font p (marshalRenderMode mode)
foreign import ccall unsafe "ftglGetFontError" fgetFontError :: Font -> IO CInt
-- | Get any errors associated with loading a font. FIXME return should be a type, not an Int.
getFontError :: Font -> IO Int
getFontError f = fromIntegral <$> fgetFontError f
foreign import ccall unsafe "ftglDestroyLayout" destroyLayout :: Layout -> IO ()
foreign import ccall unsafe "ftglRenderLayout" frenderLayout :: Layout -> CString -> IO ()
-- | Render a string of text within a layout.
renderLayout layout str = withCString str $ \strPtr -> do frenderLayout layout strPtr
foreign import ccall unsafe "ftglGetLayoutError" fgetLayoutError :: Layout -> IO CInt
-- | Get any errors associated with a layout.
getLayoutError f = fgetLayoutError f
-- | Whether or not in polygonal or extrusion mode, the font will render equally front and back
data RenderMode = Front | Back | Side | All
-- | In a Layout directed render, the layout mode of the text
data TextAlignment = AlignLeft | AlignCenter | AlignRight | Justify
marshalRenderMode :: RenderMode -> CInt
marshalRenderMode Front = 0x0001
marshalRenderMode Back = 0x0002
marshalRenderMode Side = 0x004
marshalRenderMode All = 0xffff
marshalTextAlignment :: TextAlignment -> CInt
marshalTextAlignment AlignLeft = 0
marshalTextAlignment AlignCenter = 1
marshalTextAlignment AlignRight = 2
marshalTextAlignment Justify = 3
readTextAlignment :: CInt -> TextAlignment
readTextAlignment 0 = AlignLeft
readTextAlignment 1 = AlignCenter
readTextAlignment 2 = AlignRight
readTextAlignment 3 = Justify
-- | An opaque type encapsulating a glyph in C. Currently the glyph functions are unimplemented in Haskell.
data Glyph_Opaque
-- | An opaque type encapsulating a font in C.
data Font_Opaque
-- | An opaque type encapsulating a layout in C
data Layout_Opaque
type Glyph = Ptr Glyph_Opaque
type Font = Ptr Font_Opaque
type Layout = Ptr Layout_Opaque
data CharMap =
EncodingNone
| EncodingMSSymbol
| EncodingUnicode
| EncodingSJIS
| EncodingGB2312
| EncodingBig5
| EncodingWanSung
| EncodingJohab
| EncodingAdobeStandard
| EncodingAdobeExpert
| EncodingAdobeCustom
| EncodingAdobeLatin1
| EncodingOldLatin2
| EncodingAppleRoman
encodeTag :: Char -> Char -> Char -> Char -> CInt
encodeTag a b c d =
(fromIntegral (ord a) `shift` 24)
.|. (fromIntegral (ord b) `shift` 16)
.|. (fromIntegral (ord c) `shift` 8)
.|. (fromIntegral (ord d))
marshalCharMap EncodingNone = 0
marshalCharMap EncodingMSSymbol = encodeTag 's' 'y' 'm' 'b'
marshalCharMap EncodingUnicode =encodeTag 'u' 'n' 'i' 'c'
marshalCharMap EncodingSJIS = encodeTag 's' 'j' 'i' 's'
marshalCharMap EncodingGB2312 = encodeTag 'g' 'b' ' ' ' '
marshalCharMap EncodingBig5= encodeTag 'b' 'i' 'g' '5'
marshalCharMap EncodingWanSung= encodeTag 'w' 'a' 'n' 's'
marshalCharMap EncodingJohab= encodeTag 'j' 'o' 'h' 'a'
marshalCharMap EncodingAdobeStandard= encodeTag 'A' 'D' 'O' 'B'
marshalCharMap EncodingAdobeExpert= encodeTag 'A' 'D' 'B' 'E'
marshalCharMap EncodingAdobeCustom= encodeTag 'A' 'D' 'B' 'C'
marshalCharMap EncodingAdobeLatin1= encodeTag 'l' 'a' 't' '1'
marshalCharMap EncodingOldLatin2= encodeTag 'l' 'a' 't' '2'
marshalCharMap EncodingAppleRoman= encodeTag 'a' 'r' 'm' 'n'
| rvion/ftgl-haskell | Graphics/Rendering/FTGL.hs | bsd-2-clause | 13,377 | 0 | 13 | 2,283 | 2,552 | 1,349 | 1,203 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilyDependencies #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE EmptyCase #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# OPTIONS_GHC -fno-warn-unticked-promoted-constructors #-}
{-# OPTIONS_GHC -fno-warn-missing-pattern-synonym-signatures #-}
module Numeric.Expr where
import Data.Functor.Foldable
import Data.Proxy
import GHC.TypeLits
import Data.Functor.Classes
import Control.Lens hiding (Contains(..))
import Data.Monoid (Endo(..), (<>))
import Control.Arrow
data Elem (x :: k) (xs :: [k]) where
Here :: Elem x (x : ys)
There :: Elem x ys -> Elem x (y : ys)
type family EqHead (x :: k) (xs :: [k]) where
EqHead x (x : xs) = 'True
EqHead x (y : xs) = 'False
class ContainsD (b :: Bool) (x :: k) (xs :: [k]) | xs b -> x where
contains' :: Proxy b -> Elem x xs
instance (EqHead x (y : xs) ~ True, x ~ y) => ContainsD True x (y : xs) where
contains' _ = Here
instance (EqHead x (y : xs) ~ False, Contains x xs) => ContainsD False x (y : xs) where
contains' _ = There contains
class Contains (x :: k) (xs :: [k]) where
contains :: Elem x xs
instance (b ~ EqHead x (y : xs), ContainsD b x (y : xs)) => Contains x (y : xs) where
contains = contains' (Proxy :: Proxy (EqHead x (y : xs)))
class Remove x xs ys | x xs -> ys where
replace :: Proxy x -> Elem y xs -> Maybe (Elem y ys)
instance Remove x (x : xs) xs where
replace _ Here = Nothing
replace _ (There ys) = Just ys
instance Remove x xs ys => Remove x (y : xs) (y : ys) where
replace _ Here = Just Here
replace p (There xs) = fmap There (replace p xs)
-- | Unfixed expression type
data ExprF n vs r where
VarF :: (KnownSymbol var) => Elem var vs -> ExprF n vs r
-- Num
(:+) :: Num n => r -> r -> ExprF n vs r
(:-) :: Num n => r -> r -> ExprF n vs r
(:*) :: Num n => r -> r -> ExprF n vs r
AbsF :: Num n => r -> ExprF n vs r
SigF :: Num n => r -> ExprF n vs r
NegF :: Num n => r -> ExprF n vs r
LitF :: Num n => Integer -> ExprF n vs r
-- Integral
(:÷) :: Integral n => r -> r -> ExprF n vs r
(:%) :: Integral n => r -> r -> ExprF n vs r
-- Fractional
(:/) :: Fractional n => r -> r -> ExprF n v r
RatF :: Fractional n => Rational -> ExprF n v r
-- Floating
(:$) :: Floating n => Func -> r -> ExprF n v r
(:^) :: Floating n => r -> r -> ExprF n v r
PiF :: Floating n => ExprF n v r
deriving instance Functor (ExprF n vs)
deriving instance Foldable (ExprF n vs)
deriving instance Traversable (ExprF n vs)
zipExpr :: (Integer -> Integer -> res)
-> (Rational -> Rational -> res)
-> (Func -> Func -> res)
-> (forall x y. (KnownSymbol x, KnownSymbol y) => Elem x v -> Elem y v -> res)
-> (ra -> rb -> res)
-> (res -> res -> res)
-> res
-> res
-> ExprF lit v ra
-> ExprF lit v rb
-> res
zipExpr l i f v r c t d = (~=) where
PiF ~= PiF = t
RatF a ~= RatF b = i a b
LitF a ~= LitF b = l a b
(w :+ x) ~= (y :+ z) = r w y `c` r x z
(w :- x) ~= (y :- z) = r w y `c` r x z
(w :^ x) ~= (y :^ z) = r w y `c` r x z
(w :* x) ~= (y :* z) = r w y `c` r x z
AbsF x ~= AbsF y = r x y
SigF x ~= SigF y = r x y
NegF x ~= NegF y = r x y
(w :÷ x) ~= (y :÷ z) = r w y `c` r x z
(w :% x) ~= (y :% z) = r w y `c` r x z
(w :/ x) ~= (y :/ z) = r w y `c` r x z
(w :$ x) ~= (y :$ z) = f w y `c` r x z
VarF x ~= VarF y = v x y
_ ~= _ = d
prec :: ExprF l v r -> Int
prec = \case
PiF -> 14; RatF _ -> 13
LitF _ -> 12; VarF _ -> 11; _ :+ _ -> 10; _ :- _ -> 9; _ :* _ -> 8
AbsF _ -> 7; SigF _ -> 6; NegF _ -> 5; _ :÷ _ -> 4; _ :% _ -> 3
_ :/ _ -> 2; _ :$ _ -> 1; _ :^ _ -> 0
instance Eq1 (ExprF n vs) where
liftEq eq = zipExpr (==) (==) (==) cmp eq (&&) True False where
cmp :: forall x y v. Elem x v -> Elem y v -> Bool
cmp Here Here = True
cmp (There _) Here = False
cmp Here (There _) = False
cmp (There x) (There y) = cmp x y
instance Ord1 (ExprF n vs) where
liftCompare cmp xs ys = zipExpr compare compare compare vs cmp mappend EQ (compare (prec xs) (prec ys)) xs ys where
vs :: forall x y v. Elem x v -> Elem y v -> Ordering
vs Here Here = EQ
vs (There _) Here = LT
vs Here (There _) = GT
vs (There x) (There y) = vs x y
type instance Base (Expr n vs) = ExprF n vs
newtype Expr n vs = Expr { getExpr :: ExprF n vs (Expr n vs) }
instance Recursive (Expr n vs) where
project = getExpr
instance Corecursive (Expr n vs) where
embed = Expr
instance Eq (Expr n vs) where
Expr xs == Expr ys = eq1 xs ys
instance Ord (Expr n vs) where
compare (Expr xs) (Expr ys) = compare1 xs ys
evalAlg :: ExprF n '[] n -> n
evalAlg = \case
PiF -> pi
RatF x -> fromRational x
LitF a -> fromInteger a
x :+ y -> x + y
x :- y -> x - y
x :* y -> x * y
AbsF x -> abs x
SigF x -> signum x
NegF x -> negate x
x :÷ y -> quot x y
x :% y -> rem x y
x :/ y -> x / y
f :$ x -> appF f x
x :^ y -> x ** y
VarF e -> case e of {}
appF :: Floating a => Func -> a -> a
appF = \case
Exp -> exp; Sin -> sin; Cos -> cos; Tan -> tan; Log -> log
Atn -> atan; Snh -> sinh; Csh -> cosh; Tnh -> tanh; Asn -> asin
Acs -> acos; Ach -> acosh; Ash -> asinh; Ath -> atanh
data Func =
Sin | Cos | Exp | Log | Tan | Atn | Asn
| Acs | Snh | Csh | Tnh | Ach | Ash | Ath
deriving (Eq, Ord, Enum, Bounded)
instance Show Func where
show = \case
Exp -> "exp"; Sin -> "sin"; Cos -> "cos"; Tan -> "tan";
Log -> "log"; Atn -> "atan"; Snh -> "sinh"; Csh -> "cosh"
Tnh -> "tanh"; Asn -> "asin"; Acs -> "acos"; Ach -> "acosh"
Ash -> "asinh"; Ath -> "atanh"
instance Num n => Num (Expr n vs) where
(+) = (:+:)
(*) = (:*:)
abs = Abs
signum = Sig
negate = Neg
fromInteger = Lit
(-) = (:-:)
instance Real a => Real (Expr a '[]) where
toRational = toRational . cata evalAlg
instance (Num a, Enum a) => Enum (Expr a '[]) where
toEnum = Lit . toEnum
fromEnum = fromEnum . cata evalAlg
instance Integral a => Integral (Expr a '[]) where
toInteger = toInteger . cata evalAlg
quotRem x y = (x :÷: y, x :%: y)
quot = (:÷:)
rem = (:%:)
instance Fractional a => Fractional (Expr a v) where
(/) = (:/:)
fromRational = Rat
instance Floating a => Floating (Expr a v) where
pi = Pi
exp = (:$:) Exp
log = (:$:) Log
sin = (:$:) Sin
cos = (:$:) Cos
asin = (:$:) Asn
acos = (:$:) Acs
atan = (:$:) Atn
sinh = (:$:) Snh
cosh = (:$:) Csh
asinh = (:$:) Ash
acosh = (:$:) Ach
atanh = (:$:) Ath
(**) = (:^:)
var :: (Contains var vs, KnownSymbol var) => Proxy var -> Expr n vs
var (_ :: Proxy var) = Expr (VarF (contains :: Contains var vs => Elem var vs)) :: Contains var vs => Expr n vs
matchVar :: (Contains var vs, KnownSymbol var) => Expr n vs -> Maybe (Proxy var)
matchVar (Expr (VarF (vs :: Elem v vs))) = run vs (contains :: Contains var vs => Elem var vs) where
run :: forall x y vars. Elem x vars -> Elem y vars -> Maybe (Proxy y)
run Here Here = Just Proxy
run Here (There _) = Nothing
run (There _) Here = Nothing
run (There x) (There y) = run x y
matchVar _ = Nothing
pattern x :*: y = Expr (x :* y)
pattern x :+: y = Expr (x :+ y)
pattern x :-: y = Expr (x :- y)
pattern x :/: y = Expr (x :/ y)
pattern x :%: y = Expr (x :% y)
pattern x :÷: y = Expr (x :÷ y)
pattern x :^: y = Expr (x :^ y)
pattern x :$: y = Expr (x :$ y)
pattern Pi = Expr PiF
pattern Abs x = Expr (AbsF x)
pattern Sig x = Expr (SigF x)
pattern Lit x = Expr (LitF x)
pattern Rat x = Expr (RatF x)
pattern Neg x = Expr (NegF x)
pattern Var :: (Contains var vs, KnownSymbol var) => Proxy var -> Expr n vs
pattern Var x <- (matchVar -> Just x) where
Var x = var x
_RemVar
:: (Remove x xs ys)
=> Proxy x -> Setter (Expr n xs) (Expr n ys) (Proxy x) (Expr n ys)
_RemVar _ (f :: Proxy x -> f (Expr n ys)) =
let v = f Proxy
in cata $
\case
PiF -> pure Pi
RatF a -> pure (Rat a)
x :+ y -> (:+:) <$> x <*> y
x :* y -> (:*:) <$> x <*> y
x :- y -> (:-:) <$> x <*> y
AbsF x -> fmap Abs x
SigF x -> fmap Sig x
NegF x -> fmap Neg x
LitF x -> pure (Lit x)
x :÷ y -> (:÷:) <$> x <*> y
x :% y -> (:%:) <$> x <*> y
x :/ y -> (:/:) <$> x <*> y
fn :$ x -> fmap (fn :$:) x
x :^ y -> (:^:) <$> x <*> y
VarF (q :: Elem y xs) ->
case (replace (Proxy :: Proxy x) q :: Maybe (Elem y ys)) of
Nothing -> v
Just vs -> pure (Expr (VarF vs))
_Var
:: (Contains x xs, Contains y xs, KnownSymbol y)
=> Proxy x -> Traversal (Expr n xs) (Expr n xs) (Proxy x) (Proxy y)
_Var p (f :: Proxy x -> f (Proxy y)) =
let v = fmap (Expr . VarF . toElem) (f Proxy)
in cata $
\case
PiF -> pure Pi
RatF a -> pure (Rat a)
x :+ y -> (:+:) <$> x <*> y
x :* y -> (:*:) <$> x <*> y
x :- y -> (:-:) <$> x <*> y
AbsF x -> fmap Abs x
SigF x -> fmap Sig x
NegF x -> fmap Neg x
LitF x -> pure (Lit x)
x :÷ y -> (:÷:) <$> x <*> y
x :% y -> (:%:) <$> x <*> y
x :/ y -> (:/:) <$> x <*> y
fn :$ x -> fmap (fn :$:) x
x :^ y -> (:^:) <$> x <*> y
VarF q -> rep q (toElem p) v (pure (Expr (VarF q)))
where rep
:: forall xv yv xs a.
Elem xv xs -> Elem yv xs -> a -> a -> a
rep Here Here t _ = t
rep (There x) (There y) t n = rep x y t n
rep _ _ _ n = n
toElem :: Contains x xs => Proxy x -> Elem x xs
toElem _ = contains
example :: Expr Integer '["a","b"]
example = 1 + 2 * Var (Proxy @ "a")
instance Show (Expr n vs) where
showsPrec n =
showParen (n >= 1) .
appEndo . showExpr (Endo . showParen True . appEndo) (proj . getExpr)
where
proj =
\case
RatF x -> PrintLit (ff x)
PiF -> PrintLit (Endo (showChar 'ᴨ'))
LitF x -> PrintLit (ff x)
VarF (_ :: Elem var vs) ->
PrintLit (fs (symbolVal (Proxy :: Proxy var)))
AbsF x -> Prefix (Operator L 10 (fs "abs ")) x
SigF x -> Prefix (Operator L 10 (fs "signum ")) x
NegF x -> Prefix (Operator L 10 (fs "-")) x
f :$ x -> Prefix (Operator L 10 (ff f <> fs " ")) x
x :^ y -> Binary (Operator R 8 (fs " ^ ")) x y
x :÷ y -> Binary (Operator L 7 (fs " ÷ ")) x y
x :% y -> Binary (Operator L 7 (fs " % ")) x y
x :/ y -> Binary (Operator L 7 (fs " / ")) x y
x :* y -> Binary (Operator L 7 (fs " * ")) x y
x :+ y -> Binary (Operator L 6 (fs " + ")) x y
x :- y -> Binary (Operator L 6 (fs " - ")) x y
fs = Endo . showString
ff
:: forall a.
Show a
=> a -> Endo String
ff = Endo . shows
data Side = L | R deriving Eq
-- | This datatype represents a level of an expression tree, and it
-- contains all the information needed to properly print that given
-- expression.
data ShowExpr t e
-- | An expression with no children. The argument is the expression's textual representation.
= PrintLit t
-- | A prefix expression with one child.
| Prefix (Operator t) e
-- | A postfix expression with one child.
| Postfix (Operator t) e
-- | An expression with two children.
| Binary (Operator t) e e
deriving Functor
-- | This datatype represents the necessary information for pretty-printing an operator
data Operator t = Operator
{ -- | The associativity of an operator. Most are left-associative. Exponentiation is one of the exceptions.
_associativity :: Side
-- | Precedence is assumed to be unique.
, _precedence :: Int
-- | The textual representation of the operator.
, _representation :: t }
showExpr :: Monoid t
=> (t -> t) -- ^ This argument should be a function which parenthesizes its input.
-> (e -> ShowExpr t e)
-> e -> t
showExpr prns projc = rec . projc where
rec = showAlg . fmap ((prec' &&& rec) . projc)
showAlg = \case
PrintLit t -> t
Prefix (Operator s r t) (q,y) -> t <> ifPrns R s r q y
Postfix (Operator s r t) (p,x) -> ifPrns L s r p x <> t
Binary (Operator s r t) (p,x) (q,y) -> ifPrns L s r p x <> t <> ifPrns R s r q y
ifPrns sid oa op' (Just (ia,ip))
| ip < op' || ip == op' && (ia /= oa || oa /= sid) = prns
ifPrns _ _ _ _ = id
prec' = \case
PrintLit _ -> Nothing
Prefix (Operator s r _) _ -> Just (s,r)
Postfix (Operator s r _) _ -> Just (s,r)
Binary (Operator s r _) _ _ -> Just (s,r)
{-# INLINABLE showExpr #-}
instance Plated (Expr n vs) where
plate f (Expr xs) = fmap Expr (traverse f xs)
assoc :: Expr n vs -> Expr n vs
assoc = rewrite $ \case
x :*: (y :*: z) -> Just $ (x :*: y) :*: z
x :*: (y :/: z) -> Just $ (x :*: y) :/: z
x :+: (y :-: z) -> Just $ (x :+: y) :-: z
x :+: (y :+: z) -> Just $ (x :+: y) :+: z
_ -> Nothing
simplify :: Expr n vs -> Expr n vs
simplify = rewrite $ \case
x :+: 0 -> Just x
0 :+: x -> Just x
x :/: 1 -> Just x
1 :*: x -> Just x
x :*: 1 -> Just x
x :^: 1 -> Just x
1 :^: _ -> Just 1
_ :^: 0 -> Just 1
0 :*: _ -> Just 0
_ :*: 0 -> Just 0
_ :%: 1 -> Just 0
Neg 0 -> Just 0
x :-: y | x == y -> Just 0
x :/: y | x == y -> Just 1
x :%: y | x == y -> Just 0
x :÷: y | x == y -> Just 1
_ -> Nothing
safeEvalAlg :: Eq n => ExprF n '[] n -> Maybe n
safeEvalAlg = \case
_ :/ 0 -> Nothing
_ :÷ 0 -> Nothing
_ :% 0 -> Nothing
x -> Just $ evalAlg x
class KnownSymbols (xs :: [Symbol]) where
getSymbols :: Proxy xs -> [VarFunc xs]
instance KnownSymbols '[] where
getSymbols _ = []
instance (KnownSymbol x, KnownSymbols xs) =>
KnownSymbols (x : xs) where
getSymbols (Proxy :: Proxy (x : xs)) =
fromSymbol (Proxy :: Proxy x) : map mapVarFunc (getSymbols (Proxy :: Proxy xs))
newtype VarFunc vs = VarFunc
{ runVarFunc :: forall b. (forall v. KnownSymbol v => Elem v vs -> String -> b) -> b }
mapVarFunc :: VarFunc vs -> VarFunc (v : vs)
mapVarFunc (VarFunc f) = VarFunc (\c -> f (\y -> c (There y)))
fromSymbol :: KnownSymbol v => Proxy v -> VarFunc (v : vs)
fromSymbol (p :: Proxy v) = VarFunc (\c -> c (Here :: Elem v (v : vs)) (symbolVal p))
| oisdk/Expr | expr-playgrounds/src/Numeric/Expr.hs | bsd-3-clause | 15,483 | 0 | 18 | 5,124 | 7,232 | 3,712 | 3,520 | -1 | -1 |
module ParsecExpr(module Text.ParserCombinators.Parsec.Expr) where
import Text.ParserCombinators.Parsec.Expr
| OS2World/DEV-UTIL-HUGS | oldlib/ParsecExpr.hs | bsd-3-clause | 109 | 0 | 5 | 6 | 21 | 15 | 6 | 2 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Distance.KO.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Testing.Asserts
import Duckling.Distance.KO.Corpus
tests :: TestTree
tests = testGroup "KO Tests"
[ makeCorpusTest [This Distance] corpus
]
| rfranek/duckling | tests/Duckling/Distance/KO/Tests.hs | bsd-3-clause | 603 | 0 | 9 | 96 | 80 | 51 | 29 | 11 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Path.CheckInstall where
import Control.Monad.Extra (anyM, (&&^))
import qualified Data.Text as T
import Stack.Prelude
import qualified System.Directory as D
import qualified System.FilePath as FP
-- | Checks if the installed executable will be available on the user's
-- PATH. This doesn't use @envSearchPath menv@ because it includes paths
-- only visible when running in the stack environment.
warnInstallSearchPathIssues :: (MonadIO m, MonadLogger m) => FilePath -> [Text] -> m ()
warnInstallSearchPathIssues destDir installed = do
searchPath <- liftIO FP.getSearchPath
destDirIsInPATH <- liftIO $
anyM (\dir -> D.doesDirectoryExist dir &&^ fmap (FP.equalFilePath destDir) (D.canonicalizePath dir)) searchPath
if destDirIsInPATH
then forM_ installed $ \exe -> do
mexePath <- (liftIO . D.findExecutable . T.unpack) exe
case mexePath of
Just exePath -> do
exeDir <- (liftIO . fmap FP.takeDirectory . D.canonicalizePath) exePath
unless (exeDir `FP.equalFilePath` destDir) $ do
logWarn ""
logWarn $ T.concat
[ "WARNING: The \""
, exe
, "\" executable found on the PATH environment variable is "
, T.pack exePath
, ", and not the version that was just installed."
]
logWarn $ T.concat
[ "This means that \""
, exe
, "\" calls on the command line will not use this version."
]
Nothing -> do
logWarn ""
logWarn $ T.concat
[ "WARNING: Installation path "
, T.pack destDir
, " is on the PATH but the \""
, exe
, "\" executable that was just installed could not be found on the PATH."
]
else do
logWarn ""
logWarn $ T.concat
[ "WARNING: Installation path "
, T.pack destDir
, " not found on the PATH environment variable"
]
| MichielDerhaeg/stack | src/Path/CheckInstall.hs | bsd-3-clause | 2,507 | 0 | 24 | 1,051 | 417 | 219 | 198 | 46 | 3 |
{-# LANGUAGE RankNTypes #-}
module FreeM where
import Control.Monad
import Data.Profunctor
newtype FreeM a = FreeM { foldM :: forall m. Monoid m => (a -> m) -> m }
instance Monoid (FreeM a) where
mempty = FreeM $ const mempty
mappend (FreeM a) (FreeM b) = FreeM $ liftM2 mappend a b
lift :: a -> FreeM a
lift a = FreeM ($ a)
instance Functor FreeM where
fmap f (FreeM fm) = FreeM $ lmap (lmap f) fm
| isovector/category-theory | src/FreeM.hs | bsd-3-clause | 412 | 0 | 11 | 93 | 177 | 94 | 83 | 12 | 1 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 800
{-# LANGUAGE TemplateHaskellQuotes #-}
#else
{-# LANGUAGE TemplateHaskell #-}
#endif
------------------------------------------------------------------------------
-- |
-- Module: Database.PostgreSQL.Simple.TypeInfo.Macro
-- Copyright: (c) 2013 Leon P Smith
-- License: BSD3
-- Maintainer: Leon P Smith <[email protected]>
-- Stability: experimental
--
-- A Template Haskell macro for efficiently checking membership in
-- a set of type oids.
--
------------------------------------------------------------------------------
module Database.PostgreSQL.Simple.TypeInfo.Macro
( mkCompats
, inlineTypoid
) where
import Database.PostgreSQL.Simple.TypeInfo.Static
import Database.PostgreSQL.Simple.Types (Oid(..))
import Language.Haskell.TH
-- | Returns an expression that has type @'Oid' -> 'Bool'@, true if the
-- oid is equal to any one of the 'typoid's of the given 'TypeInfo's.
mkCompats :: [TypeInfo] -> ExpQ
mkCompats tys = do
x <- newName "x"
lamE [conP 'Oid [varP x]] $ caseE (varE x) (map alt tys ++ [catchAll])
where
alt :: TypeInfo -> MatchQ
alt ty = match (inlineTypoidP ty) (normalB [| True |]) []
catchAll :: MatchQ
catchAll = match wildP (normalB [| False |]) []
-- | Literally substitute the 'typoid' of a 'TypeInfo' expression.
-- Returns an expression of type 'Oid'. Useful because GHC tends
-- not to fold constants.
inlineTypoid :: TypeInfo -> ExpQ
inlineTypoid ty = conE 'Oid `appE` litE (getTypoid ty)
inlineTypoidP :: TypeInfo -> PatQ
inlineTypoidP ty = litP (getTypoid ty)
getTypoid :: TypeInfo -> Lit
getTypoid ty = let (Oid x) = typoid ty in integerL (fromIntegral x)
| tomjaguarpaw/postgresql-simple | src/Database/PostgreSQL/Simple/TypeInfo/Macro.hs | bsd-3-clause | 1,721 | 0 | 12 | 303 | 330 | 188 | 142 | 22 | 1 |
module System.Timeout.Resettable ( module System.Timeout.Resettable.ADT ) where
import System.Timeout.Resettable.ADT
| basvandijk/resettable-timeouts | System/Timeout/Resettable.hs | bsd-3-clause | 117 | 0 | 5 | 9 | 24 | 17 | 7 | 2 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
--
-- Photon map generator
--
module Main where
import Control.Monad
import NumericPrelude
import Ray.Algebra
import Ray.Physics
import Ray.Light
import Tracer
import Scene
nphoton = 10000 :: Int
main :: IO ()
main = do
let tflux = sum $ map flux lgts
putStrLn $ show (tflux / (fromIntegral nphoton))
let l = head lgts
ps <- mapM generatePhoton (replicate nphoton l)
prs <- mapM (tracePhoton objs) ps
a <- forM (concat prs) $ \i -> do
putStrLn $ show i
putStrLn "end"
| eijian/raytracer | app/old/Main-0.hs | bsd-3-clause | 529 | 0 | 13 | 113 | 192 | 97 | 95 | 20 | 1 |
{-# Language DeriveDataTypeable #-}
{-# Language LambdaCase #-}
{-# Language OverloadedStrings #-}
{-# Language TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Test.River.Arbitrary (
X(..)
) where
import Data.Char (ord)
import qualified Data.HashSet as HashSet
import qualified Data.Text as T
import River.Source.Parser
import River.Source.Syntax
import Test.Feat
import Test.Feat.Class
import Test.Feat.Modifiers
import Test.Feat.Enumerate
import Test.QuickCheck
------------------------------------------------------------------------
-- Annotation which has a quiet show instance to reduce clutter.
data X =
X
deriving (Eq, Ord, Typeable)
instance Arbitrary X where
arbitrary =
return X
shrink _ =
[]
instance Show X where
showsPrec _ X =
("△" ++)
------------------------------------------------------------------------
deriveEnumerable ''X
deriveEnumerable ''Type
deriveEnumerable ''UnaryOp
deriveEnumerable ''BinaryOp
deriveEnumerable ''Expression
deriveEnumerable ''Statement
deriveEnumerable ''Block
-- Literals cannot be negative
deriveEnumerable' .
dExcept 'LiteralInt [| unary $ LiteralInt . nat |] $
dAll ''Literal
-- Identifiers must not be reserved words or empty
instance Enumerable Identifier where
enumerate =
let
mkIdent xs =
let
ident =
fmap identChar $ nonEmpty xs
in
if legalIdent ident then
Identifier (T.pack ident)
else
Identifier "xxx"
in
fmap mkIdent enumerate
------------------------------------------------------------------------
instance Arbitrary UnaryOp where
arbitrary =
sized uniform
shrink =
genericShrink
instance Arbitrary BinaryOp where
arbitrary =
sized uniform
shrink =
genericShrink
instance Arbitrary Type where
arbitrary =
sized uniform
shrink =
genericShrink
instance Arbitrary Identifier where
arbitrary =
sized uniform `suchThat` \(Identifier ns) ->
legalIdent (T.unpack ns)
shrink = \case
Identifier n ->
fmap (Identifier . T.pack) .
filter legalIdent $
shrink (T.unpack n)
instance Arbitrary Literal where
arbitrary =
sized uniform
shrink =
genericShrink
instance (Enumerable a, Arbitrary a) => Arbitrary (Expression a) where
arbitrary =
sized uniform
shrink =
genericShrink
instance (Enumerable a, Arbitrary a) => Arbitrary (Statement a) where
arbitrary =
sized uniform
shrink =
genericShrink
instance (Enumerable a, Arbitrary a) => Arbitrary (Block a) where
arbitrary =
fmap fixBlock $
Block <$> arbitrary <*> arbitrary
shrink =
genericShrink
instance (Enumerable a, Arbitrary a) => Arbitrary (Program a) where
arbitrary =
fmap fixProgram $
Program <$> arbitrary <*> arbitrary
shrink =
genericShrink
------------------------------------------------------------------------
fixProgram :: Program a -> Program a
fixProgram = \case
Program a b ->
Program a (fixBlock b)
fixBlock :: Block a -> Block a
fixBlock = \case
Block a ss ->
Block a (fixStatements ss)
fixStatements :: [Statement a] -> [Statement a]
fixStatements = \case
[] ->
[]
Declare a t n (Block ab ss1) : ss2 ->
[Declare a t n . Block ab . fixStatements $ ss1 ++ ss2]
s : ss ->
s : fixStatements ss
------------------------------------------------------------------------
legalIdent :: String -> Bool
legalIdent ident =
not (null ident) &&
not (HashSet.member ident reservedNames)
newtype IdentChar =
IdentChar {
identChar :: Char
} deriving (Typeable, Show)
instance Enumerable IdentChar where
enumerate =
fmap IdentChar $ enumerateBounded (ord 'a') (ord 'z')
-- I have no idea what this does (stolen from Test.Feat.Modifiers)
enumerateBounded :: Enum a => Int -> Int -> Enumerate a
enumerateBounded from to =
let
nats =
[0..] :: [Integer]
prts =
toRev $ fmap (\p -> Finite (crd p) (sel p)) nats
crd p =
if p <= 0 then
0
else if p == 1 then
1
else if 2 ^ (p - 1) > num then
max 0 (num - 2 ^ (p-2))
else
2 ^ (p - 2)
sel 1 0 =
toEnum from
sel p i =
toEnum $ 2^(p-2) + fromInteger i + from
num =
toInteger $ to - from
enum =
Enumerate prts (return enum)
in
enum
| jystic/river | test/Test/River/Arbitrary.hs | bsd-3-clause | 4,479 | 0 | 16 | 1,157 | 1,257 | 653 | 604 | 151 | 5 |
{-# LANGUAGE Rank2Types, TemplateHaskell, BangPatterns, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, UndecidableInstances, ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.AD.Mode.Chain
-- Copyright : (c) Edward Kmett 2010
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC only
--
-- Reverse Automatic Differentiation using Data.Reflection
--
-----------------------------------------------------------------------------
module Numeric.AD.Mode.Chain
(
-- * Gradient
grad
, grad'
, gradWith
, gradWith'
-- * Jacobian
, jacobian
, jacobian'
, jacobianWith
, jacobianWith'
-- * Hessian
, hessian
, hessianF
-- * Derivatives
, diff
, diff'
, diffF
, diffF'
) where
import Control.Applicative ((<$>))
import Data.Traversable (Traversable)
import Numeric.AD.Types
import Numeric.AD.Internal.Classes
import Numeric.AD.Internal.Composition
import Numeric.AD.Internal.Chain
import Numeric.AD.Internal.Var
-- | The 'grad' function calculates the gradient of a non-scalar-to-scalar function with reverse-mode AD in a single pass.
--
--
-- >>> grad (\[x,y,z] -> x*y+z) [1,2,3]
-- [2,1,1]
grad :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f a
grad f as = reifyTape (snd bds) $ \p -> unbind vs $! partialArrayOf p bds $! f $ vary <$> vs
where (vs, bds) = bind as
{-# INLINE grad #-}
-- | The 'grad'' function calculates the result and gradient of a non-scalar-to-scalar function with reverse-mode AD in a single pass.
--
-- >>> grad' (\[x,y,z] -> x*y+z) [1,2,3]
-- (5,[2,1,1])
grad' :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f a)
grad' f as = reifyTape (snd bds) $ \p ->
let r = f (fmap vary vs) in (primal r, unbind vs $! partialArrayOf p bds $! r)
where (vs, bds) = bind as
{-# INLINE grad' #-}
-- | @'grad' g f@ function calculates the gradient of a non-scalar-to-scalar function @f@ with reverse-mode AD in a single pass.
-- The gradient is combined element-wise with the argument using the function @g@.
--
-- @
-- 'grad' == 'gradWith' (\_ dx -> dx)
-- 'id' == 'gradWith' 'const'
-- @
gradWith :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f b
gradWith g f as = reifyTape (snd bds) $ \p -> unbindWith g vs $! partialArrayOf p bds $! f $ vary <$> vs
where (vs,bds) = bind as
{-# INLINE gradWith #-}
-- | @'grad'' g f@ calculates the result and gradient of a non-scalar-to-scalar function @f@ with reverse-mode AD in a single pass
-- the gradient is combined element-wise with the argument using the function @g@.
--
-- @
-- 'grad'' == 'gradWith'' (\_ dx -> dx)
-- @
gradWith' :: (Traversable f, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> (a, f b)
gradWith' g f as = reifyTape (snd bds) $ \p ->
let r = f (fmap vary vs) in (primal r, unbindWith g vs $! partialArrayOf p bds $! r)
where (vs, bds) = bind as
{-# INLINE gradWith' #-}
-- | The 'jacobian' function calculates the jacobian of a non-scalar-to-non-scalar function with reverse AD lazily in @m@ passes for @m@ outputs.
--
-- >>> jacobian (\[x,y] -> [y,x,x*y]) [2,1]
-- [[0,1],[1,0],[1,2]]
jacobian :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f a)
jacobian f as = reifyTape (snd bds) $ \p -> unbind vs . partialArrayOf p bds <$> f (fmap vary vs)
where (vs, bds) = bind as
{-# INLINE jacobian #-}
-- | The 'jacobian'' function calculates both the result and the Jacobian of a nonscalar-to-nonscalar function, using @m@ invocations of reverse AD,
-- where @m@ is the output dimensionality. Applying @fmap snd@ to the result will recover the result of 'jacobian'
-- | An alias for 'gradF''
--
-- >>> jacobian' (\[x,y] -> [y,x,x*y]) [2,1]
-- [(1,[0,1]),(2,[1,0]),(2,[1,2])]
jacobian' :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f a)
jacobian' f as = reifyTape (snd bds) $ \p ->
let row a = (primal a, unbind vs $! partialArrayOf p bds $! a)
in row <$> f (vary <$> vs)
where (vs, bds) = bind as
{-# INLINE jacobian' #-}
-- | 'jacobianWith g f' calculates the Jacobian of a non-scalar-to-non-scalar function @f@ with reverse AD lazily in @m@ passes for @m@ outputs.
--
-- Instead of returning the Jacobian matrix, the elements of the matrix are combined with the input using the @g@.
--
-- @
-- 'jacobian' == 'jacobianWith' (\_ dx -> dx)
-- 'jacobianWith' 'const' == (\f x -> 'const' x '<$>' f x)
-- @
jacobianWith :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f b)
jacobianWith g f as = reifyTape (snd bds) $ \p -> unbindWith g vs . partialArrayOf p bds <$> f (fmap vary vs) where
(vs, bds) = bind as
{-# INLINE jacobianWith #-}
-- | 'jacobianWith' g f' calculates both the result and the Jacobian of a nonscalar-to-nonscalar function @f@, using @m@ invocations of reverse AD,
-- where @m@ is the output dimensionality. Applying @fmap snd@ to the result will recover the result of 'jacobianWith'
--
-- Instead of returning the Jacobian matrix, the elements of the matrix are combined with the input using the @g@.
--
-- @'jacobian'' == 'jacobianWith'' (\_ dx -> dx)@
--
jacobianWith' :: (Traversable f, Functor g, Num a) => (a -> a -> b) -> (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (a, f b)
jacobianWith' g f as = reifyTape (snd bds) $ \p ->
let row a = (primal a, unbindWith g vs $! partialArrayOf p bds $! a)
in row <$> f (vary <$> vs)
where (vs, bds) = bind as
{-# INLINE jacobianWith' #-}
-- | Compute the derivative of a function.
--
-- >>> diff sin 0
-- 1.0
diff :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> a
diff f a = reifyTape 1 $ \p -> derivativeOf p $! f (var a 0)
{-# INLINE diff #-}
-- | The 'diff'' function calculates the result and derivative, as a pair, of a scalar-to-scalar function.
--
-- >>> diff' sin 0
-- (0.0,1.0)
--
-- >>> diff' exp 0
-- (1.0,1.0)
diff' :: Num a => (forall s. Mode s => AD s a -> AD s a) -> a -> (a, a)
diff' f a = reifyTape 1 $ \p -> derivativeOf' p $! f (var a 0)
{-# INLINE diff' #-}
-- | Compute the derivatives of each result of a scalar-to-vector function with regards to its input.
--
-- >>> diffF (\a -> [sin a, cos a]) 0
-- [1.0,0.0]
--
diffF :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f a
diffF f a = reifyTape 1 $ \p -> derivativeOf p <$> f (var a 0)
{-# INLINE diffF #-}
-- | Compute the derivatives of each result of a scalar-to-vector function with regards to its input along with the answer.
--
-- >>> diffF' (\a -> [sin a, cos a]) 0
-- [(0.0,1.0),(1.0,0.0)]
diffF' :: (Functor f, Num a) => (forall s. Mode s => AD s a -> f (AD s a)) -> a -> f (a, a)
diffF' f a = reifyTape 1 $ \p -> derivativeOf' p <$> f (var a 0)
{-# INLINE diffF' #-}
-- | Compute the hessian via the jacobian of the gradient. gradient is computed in reverse mode and then the jacobian is computed in reverse mode.
--
-- However, since the @'grad' f :: f a -> f a@ is square this is not as fast as using the forward-mode Jacobian of a reverse mode gradient provided by 'Numeric.AD.hessian'.
--
-- >>> hessian (\[x,y] -> x*y) [1,2]
-- [[0,1],[1,0]]
hessian :: (Traversable f, Num a) => (forall s. Mode s => f (AD s a) -> AD s a) -> f a -> f (f a)
hessian f = jacobian (grad (decomposeMode . f . fmap composeMode))
-- | Compute the order 3 Hessian tensor on a non-scalar-to-non-scalar function via the reverse-mode Jacobian of the reverse-mode Jacobian of the function.
--
-- Less efficient than 'Numeric.AD.Mode.Mixed.hessianF'.
--
-- >>> hessianF (\[x,y] -> [x*y,x+y,exp x*cos y]) [1,2]
-- [[[0.0,1.0],[1.0,0.0]],[[0.0,0.0],[0.0,0.0]],[[-1.1312043837568135,-2.4717266720048188],[-2.4717266720048188,1.1312043837568135]]]
hessianF :: (Traversable f, Functor g, Num a) => (forall s. Mode s => f (AD s a) -> g (AD s a)) -> f a -> g (f (f a))
hessianF f = decomposeFunctor . jacobian (ComposeFunctor . jacobian (fmap decomposeMode . f . fmap composeMode))
| yairchu/ad | src/Numeric/AD/Mode/Chain.hs | bsd-3-clause | 8,247 | 0 | 14 | 1,684 | 2,239 | 1,181 | 1,058 | 78 | 1 |
{-# LANGUAGE CPP, NamedFieldPuns, NondecreasingIndentation #-}
{-# OPTIONS_GHC -fno-cse #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- GHC Driver
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module DriverPipeline (
-- Run a series of compilation steps in a pipeline, for a
-- collection of source files.
oneShot, compileFile,
-- Interfaces for the batch-mode driver
linkBinary,
-- Interfaces for the compilation manager (interpreted/batch-mode)
preprocess,
compileOne, compileOne',
link,
-- Exports for hooks to override runPhase and link
PhasePlus(..), CompPipeline(..), PipeEnv(..), PipeState(..),
phaseOutputFilename, getPipeState, getPipeEnv,
hscPostBackendPhase, getLocation, setModLocation, setDynFlags,
runPhase, exeFileName,
mkExtraObjToLinkIntoBinary, mkNoteObjsToLinkIntoBinary,
maybeCreateManifest, runPhase_MoveBinary,
linkingNeeded, checkLinkInfo
) where
#include "HsVersions.h"
import PipelineMonad
import Packages
import HeaderInfo
import DriverPhases
import SysTools
import HscMain
import Finder
import HscTypes hiding ( Hsc )
import Outputable
import Module
import UniqFM ( eltsUFM )
import ErrUtils
import DynFlags
import Config
import Panic
import Util
import StringBuffer ( hGetStringBuffer )
import BasicTypes ( SuccessFlag(..) )
import Maybes ( expectJust )
import SrcLoc
import FastString
import LlvmCodeGen ( llvmFixupAsm )
import MonadUtils
import Platform
import TcRnTypes
import Hooks
import Exception
import Data.IORef ( readIORef )
import System.Directory
import System.FilePath
import System.IO
import Control.Monad
import Data.List ( isSuffixOf )
import Data.Maybe
import System.Environment
import Data.Char
-- ---------------------------------------------------------------------------
-- Pre-process
-- | Just preprocess a file, put the result in a temp. file (used by the
-- compilation manager during the summary phase).
--
-- We return the augmented DynFlags, because they contain the result
-- of slurping in the OPTIONS pragmas
preprocess :: HscEnv
-> (FilePath, Maybe Phase) -- ^ filename and starting phase
-> IO (DynFlags, FilePath)
preprocess hsc_env (filename, mb_phase) =
ASSERT2(isJust mb_phase || isHaskellSrcFilename filename, text filename)
runPipeline anyHsc hsc_env (filename, fmap RealPhase mb_phase)
Nothing Temporary Nothing{-no ModLocation-} Nothing{-no stub-}
-- ---------------------------------------------------------------------------
-- | Compile
--
-- Compile a single module, under the control of the compilation manager.
--
-- This is the interface between the compilation manager and the
-- compiler proper (hsc), where we deal with tedious details like
-- reading the OPTIONS pragma from the source file, converting the
-- C or assembly that GHC produces into an object file, and compiling
-- FFI stub files.
--
-- NB. No old interface can also mean that the source has changed.
compileOne :: HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne = compileOne' Nothing (Just batchMsg)
compileOne' :: Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne' m_tc_result mHscMessage
hsc_env0 summary mod_index nmods mb_old_iface maybe_old_linkable
source_modified0
= do
let dflags0 = ms_hspp_opts summary
this_mod = ms_mod summary
src_flavour = ms_hsc_src summary
location = ms_location summary
input_fn = expectJust "compile:hs" (ml_hs_file location)
input_fnpp = ms_hspp_file summary
mod_graph = hsc_mod_graph hsc_env0
needsTH = any (xopt Opt_TemplateHaskell . ms_hspp_opts) mod_graph
needsQQ = any (xopt Opt_QuasiQuotes . ms_hspp_opts) mod_graph
needsLinker = needsTH || needsQQ
isDynWay = any (== WayDyn) (ways dflags0)
isProfWay = any (== WayProf) (ways dflags0)
-- #8180 - when using TemplateHaskell, switch on -dynamic-too so
-- the linker can correctly load the object files.
let dflags1 = if needsLinker && dynamicGhc && not isDynWay && not isProfWay
then gopt_set dflags0 Opt_BuildDynamicToo
else dflags0
debugTraceMsg dflags1 2 (text "compile: input file" <+> text input_fnpp)
let basename = dropExtension input_fn
-- We add the directory in which the .hs files resides) to the import path.
-- This is needed when we try to compile the .hc file later, if it
-- imports a _stub.h file that we created here.
let current_dir = takeDirectory basename
old_paths = includePaths dflags1
dflags = dflags1 { includePaths = current_dir : old_paths }
hsc_env = hsc_env0 {hsc_dflags = dflags}
-- Figure out what lang we're generating
let hsc_lang = hscTarget dflags
-- ... and what the next phase should be
let next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
-- ... and what file to generate the output into
output_fn <- getOutputFilename next_phase
Temporary basename dflags next_phase (Just location)
-- -fforce-recomp should also work with --make
let force_recomp = gopt Opt_ForceRecomp dflags
source_modified
| force_recomp || isNothing maybe_old_linkable = SourceModified
| otherwise = source_modified0
object_filename = ml_obj_file location
let always_do_basic_recompilation_check = case hsc_lang of
HscInterpreted -> True
_ -> False
e <- genericHscCompileGetFrontendResult
always_do_basic_recompilation_check
m_tc_result mHscMessage
hsc_env summary source_modified mb_old_iface (mod_index, nmods)
case e of
Left iface ->
do details <- genModDetails hsc_env iface
MASSERT(isJust maybe_old_linkable)
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
Right (tc_result, mb_old_hash) ->
-- run the compiler
case hsc_lang of
HscInterpreted ->
case ms_hsc_src summary of
HsBootFile ->
do (iface, _changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
_ -> do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, _changed, details, cgguts) <- hscNormalIface hsc_env guts mb_old_hash
(hasStub, comp_bc, modBreaks) <- hscInteractive hsc_env cgguts summary
stub_o <- case hasStub of
Nothing -> return []
Just stub_c -> do
stub_o <- compileStub hsc_env stub_c
return [DotO stub_o]
let hs_unlinked = [BCOs comp_bc modBreaks]
unlinked_time = ms_hs_date summary
-- Why do we use the timestamp of the source file here,
-- rather than the current time? This works better in
-- the case where the local clock is out of sync
-- with the filesystem's clock. It's just as accurate:
-- if the source is modified, then the linkable will
-- be out of date.
let linkable = LM unlinked_time this_mod
(hs_unlinked ++ stub_o)
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
HscNothing ->
do (iface, changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
when (gopt Opt_WriteInterface dflags) $
hscWriteIface dflags iface changed summary
let linkable = if isHsBoot src_flavour
then maybe_old_linkable
else Just (LM (ms_hs_date summary) this_mod [])
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = linkable })
_ ->
case ms_hsc_src summary of
HsBootFile ->
do (iface, changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
hscWriteIface dflags iface changed summary
touchObjectFile dflags object_filename
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
_ -> do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, changed, details, cgguts) <- hscNormalIface hsc_env guts mb_old_hash
hscWriteIface dflags iface changed summary
-- We're in --make mode: finish the compilation pipeline.
let mod_name = ms_mod_name summary
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour mod_name (HscRecomp cgguts summary)))
(Just basename)
Persistent
(Just location)
Nothing
-- The object filename comes from the ModLocation
o_time <- getModificationUTCTime object_filename
let linkable = LM o_time this_mod [DotO object_filename]
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
-----------------------------------------------------------------------------
-- stub .h and .c files (for foreign export support)
-- The _stub.c file is derived from the haskell source file, possibly taking
-- into account the -stubdir option.
--
-- The object file created by compiling the _stub.c file is put into a
-- temporary file, which will be later combined with the main .o file
-- (see the MergeStubs phase).
compileStub :: HscEnv -> FilePath -> IO FilePath
compileStub hsc_env stub_c = do
(_, stub_o) <- runPipeline StopLn hsc_env (stub_c,Nothing) Nothing
Temporary Nothing{-no ModLocation-} Nothing
return stub_o
-- ---------------------------------------------------------------------------
-- Link
link :: GhcLink -- interactive or batch
-> DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
-- For the moment, in the batch linker, we don't bother to tell doLink
-- which packages to link -- it just tries all that are available.
-- batch_attempt_linking should only be *looked at* in batch mode. It
-- should only be True if the upsweep was successful and someone
-- exports main, i.e., we have good reason to believe that linking
-- will succeed.
link ghcLink dflags
= lookupHook linkHook l dflags ghcLink dflags
where
l LinkInMemory _ _ _
= if cGhcWithInterpreter == "YES"
then -- Not Linking...(demand linker will do the job)
return Succeeded
else panicBadLink LinkInMemory
l NoLink _ _ _
= return Succeeded
l LinkBinary dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkStaticLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkDynLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
panicBadLink :: GhcLink -> a
panicBadLink other = panic ("link: GHC not built to link this way: " ++
show other)
link' :: DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
link' dflags batch_attempt_linking hpt
| batch_attempt_linking
= do
let
staticLink = case ghcLink dflags of
LinkStaticLib -> True
_ -> platformBinariesAreStaticLibs (targetPlatform dflags)
home_mod_infos = eltsUFM hpt
-- the packages we depend on
pkg_deps = concatMap (map fst . dep_pkgs . mi_deps . hm_iface) home_mod_infos
-- the linkables to link
linkables = map (expectJust "link".hm_linkable) home_mod_infos
debugTraceMsg dflags 3 (text "link: linkables are ..." $$ vcat (map ppr linkables))
-- check for the -no-link flag
if isNoLink (ghcLink dflags)
then do debugTraceMsg dflags 3 (text "link(batch): linking omitted (-c flag given).")
return Succeeded
else do
let getOfiles (LM _ _ us) = map nameOfObject (filter isObject us)
obj_files = concatMap getOfiles linkables
exe_file = exeFileName staticLink dflags
linking_needed <- linkingNeeded dflags staticLink linkables pkg_deps
if not (gopt Opt_ForceRecomp dflags) && not linking_needed
then do debugTraceMsg dflags 2 (text exe_file <+> ptext (sLit "is up to date, linking not required."))
return Succeeded
else do
compilationProgressMsg dflags ("Linking " ++ exe_file ++ " ...")
-- Don't showPass in Batch mode; doLink will do that for us.
let link = case ghcLink dflags of
LinkBinary -> linkBinary
LinkStaticLib -> linkStaticLibCheck
LinkDynLib -> linkDynLibCheck
other -> panicBadLink other
link dflags obj_files pkg_deps
debugTraceMsg dflags 3 (text "link: done")
-- linkBinary only returns if it succeeds
return Succeeded
| otherwise
= do debugTraceMsg dflags 3 (text "link(batch): upsweep (partially) failed OR" $$
text " Main.main not exported; not linking.")
return Succeeded
linkingNeeded :: DynFlags -> Bool -> [Linkable] -> [PackageKey] -> IO Bool
linkingNeeded dflags staticLink linkables pkg_deps = do
-- if the modification time on the executable is later than the
-- modification times on all of the objects and libraries, then omit
-- linking (unless the -fforce-recomp flag was given).
let exe_file = exeFileName staticLink dflags
e_exe_time <- tryIO $ getModificationUTCTime exe_file
case e_exe_time of
Left _ -> return True
Right t -> do
-- first check object files and extra_ld_inputs
let extra_ld_inputs = [ f | FileOption _ f <- ldInputs dflags ]
e_extra_times <- mapM (tryIO . getModificationUTCTime) extra_ld_inputs
let (errs,extra_times) = splitEithers e_extra_times
let obj_times = map linkableTime linkables ++ extra_times
if not (null errs) || any (t <) obj_times
then return True
else do
-- next, check libraries. XXX this only checks Haskell libraries,
-- not extra_libraries or -l things from the command line.
let pkg_hslibs = [ (libraryDirs c, lib)
| Just c <- map (lookupPackage dflags) pkg_deps,
lib <- packageHsLibs dflags c ]
pkg_libfiles <- mapM (uncurry (findHSLib dflags)) pkg_hslibs
if any isNothing pkg_libfiles then return True else do
e_lib_times <- mapM (tryIO . getModificationUTCTime)
(catMaybes pkg_libfiles)
let (lib_errs,lib_times) = splitEithers e_lib_times
if not (null lib_errs) || any (t <) lib_times
then return True
else checkLinkInfo dflags pkg_deps exe_file
-- Returns 'False' if it was, and we can avoid linking, because the
-- previous binary was linked with "the same options".
checkLinkInfo :: DynFlags -> [PackageKey] -> FilePath -> IO Bool
checkLinkInfo dflags pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
-- ToDo: Windows and OS X do not use the ELF binary format, so
-- readelf does not work there. We need to find another way to do
-- this.
= return False -- conservatively we should return True, but not
-- linking in this case was the behaviour for a long
-- time so we leave it as-is.
| otherwise
= do
link_info <- getLinkInfo dflags pkg_deps
debugTraceMsg dflags 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfSection dflags ghcLinkInfoSectionName exe_file
debugTraceMsg dflags 3 $ text ("Exe link info: " ++ show m_exe_link_info)
return (Just link_info /= m_exe_link_info)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
| os == OSSolaris2 = False -- see #5382
| otherwise = osElfTarget os
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
-- if we use the ".debug" prefix, then strip will strip it by default
findHSLib :: DynFlags -> [String] -> String -> IO (Maybe FilePath)
findHSLib dflags dirs lib = do
let batch_lib_file = if gopt Opt_Static dflags
then "lib" ++ lib <.> "a"
else mkSOName (targetPlatform dflags) lib
found <- filterM doesFileExist (map (</> batch_lib_file) dirs)
case found of
[] -> return Nothing
(x:_) -> return (Just x)
-- -----------------------------------------------------------------------------
-- Compile files in one-shot mode.
oneShot :: HscEnv -> Phase -> [(String, Maybe Phase)] -> IO ()
oneShot hsc_env stop_phase srcs = do
o_files <- mapM (compileFile hsc_env stop_phase) srcs
doLink (hsc_dflags hsc_env) stop_phase o_files
compileFile :: HscEnv -> Phase -> (FilePath, Maybe Phase) -> IO FilePath
compileFile hsc_env stop_phase (src, mb_phase) = do
exists <- doesFileExist src
when (not exists) $
throwGhcExceptionIO (CmdLineError ("does not exist: " ++ src))
let
dflags = hsc_dflags hsc_env
split = gopt Opt_SplitObjs dflags
mb_o_file = outputFile dflags
ghc_link = ghcLink dflags -- Set by -c or -no-link
-- When linking, the -o argument refers to the linker's output.
-- otherwise, we use it as the name for the pipeline's output.
output
-- If we are dong -fno-code, then act as if the output is
-- 'Temporary'. This stops GHC trying to copy files to their
-- final location.
| HscNothing <- hscTarget dflags = Temporary
| StopLn <- stop_phase, not (isNoLink ghc_link) = Persistent
-- -o foo applies to linker
| isJust mb_o_file = SpecificFile
-- -o foo applies to the file we are compiling now
| otherwise = Persistent
stop_phase' = case stop_phase of
As _ | split -> SplitAs
_ -> stop_phase
( _, out_file) <- runPipeline stop_phase' hsc_env
(src, fmap RealPhase mb_phase) Nothing output
Nothing{-no ModLocation-} Nothing
return out_file
doLink :: DynFlags -> Phase -> [FilePath] -> IO ()
doLink dflags stop_phase o_files
| not (isStopLn stop_phase)
= return () -- We stopped before the linking phase
| otherwise
= case ghcLink dflags of
NoLink -> return ()
LinkBinary -> linkBinary dflags o_files []
LinkStaticLib -> linkStaticLibCheck dflags o_files []
LinkDynLib -> linkDynLibCheck dflags o_files []
other -> panicBadLink other
-- ---------------------------------------------------------------------------
-- | Run a compilation pipeline, consisting of multiple phases.
--
-- This is the interface to the compilation pipeline, which runs
-- a series of compilation steps on a single source file, specifying
-- at which stage to stop.
--
-- The DynFlags can be modified by phases in the pipeline (eg. by
-- OPTIONS_GHC pragmas), and the changes affect later phases in the
-- pipeline.
runPipeline
:: Phase -- ^ When to stop
-> HscEnv -- ^ Compilation environment
-> (FilePath,Maybe PhasePlus) -- ^ Input filename (and maybe -x suffix)
-> Maybe FilePath -- ^ original basename (if different from ^^^)
-> PipelineOutput -- ^ Output filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline stop_phase hsc_env0 (input_fn, mb_phase)
mb_basename output maybe_loc maybe_stub_o
= do let
dflags0 = hsc_dflags hsc_env0
-- Decide where dump files should go based on the pipeline output
dflags = dflags0 { dumpPrefix = Just (basename ++ ".") }
hsc_env = hsc_env0 {hsc_dflags = dflags}
(input_basename, suffix) = splitExtension input_fn
suffix' = drop 1 suffix -- strip off the .
basename | Just b <- mb_basename = b
| otherwise = input_basename
-- If we were given a -x flag, then use that phase to start from
start_phase = fromMaybe (RealPhase (startPhase suffix')) mb_phase
isHaskell (RealPhase (Unlit _)) = True
isHaskell (RealPhase (Cpp _)) = True
isHaskell (RealPhase (HsPp _)) = True
isHaskell (RealPhase (Hsc _)) = True
isHaskell (HscOut {}) = True
isHaskell _ = False
isHaskellishFile = isHaskell start_phase
env = PipeEnv{ pe_isHaskellishFile = isHaskellishFile,
stop_phase,
src_filename = input_fn,
src_basename = basename,
src_suffix = suffix',
output_spec = output }
-- We want to catch cases of "you can't get there from here" before
-- we start the pipeline, because otherwise it will just run off the
-- end.
--
-- There is a partial ordering on phases, where A < B iff A occurs
-- before B in a normal compilation pipeline.
let happensBefore' = happensBefore dflags
case start_phase of
RealPhase start_phase' ->
when (not (start_phase' `happensBefore'` stop_phase)) $
throwGhcExceptionIO (UsageError
("cannot compile this file to desired target: "
++ input_fn))
HscOut {} -> return ()
debugTraceMsg dflags 4 (text "Running the pipeline")
r <- runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
-- If we are compiling a Haskell module, and doing
-- -dynamic-too, but couldn't do the -dynamic-too fast
-- path, then rerun the pipeline for the dyn way
let dflags = extractDynFlags hsc_env
-- NB: Currently disabled on Windows (ref #7134, #8228, and #5987)
when (not $ platformOS (targetPlatform dflags) == OSMinGW32) $ do
when isHaskellishFile $ whenCannotGenerateDynamicToo dflags $ do
debugTraceMsg dflags 4
(text "Running the pipeline again for -dynamic-too")
let dflags' = dynamicTooMkDynamicDynFlags dflags
hsc_env' <- newHscEnv dflags'
_ <- runPipeline' start_phase hsc_env' env input_fn
maybe_loc maybe_stub_o
return ()
return r
runPipeline'
:: PhasePlus -- ^ When to start
-> HscEnv -- ^ Compilation environment
-> PipeEnv
-> FilePath -- ^ Input filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
= do
-- Execute the pipeline...
let state = PipeState{ hsc_env, maybe_loc, maybe_stub_o = maybe_stub_o }
evalP (pipeLoop start_phase input_fn) env state
-- ---------------------------------------------------------------------------
-- outer pipeline loop
-- | pipeLoop runs phases until we reach the stop phase
pipeLoop :: PhasePlus -> FilePath -> CompPipeline (DynFlags, FilePath)
pipeLoop phase input_fn = do
env <- getPipeEnv
dflags <- getDynFlags
let happensBefore' = happensBefore dflags
stopPhase = stop_phase env
case phase of
RealPhase realPhase | realPhase `eqPhase` stopPhase -- All done
-> -- Sometimes, a compilation phase doesn't actually generate any output
-- (eg. the CPP phase when -fcpp is not turned on). If we end on this
-- stage, but we wanted to keep the output, then we have to explicitly
-- copy the file, remembering to prepend a {-# LINE #-} pragma so that
-- further compilation stages can tell what the original filename was.
case output_spec env of
Temporary ->
return (dflags, input_fn)
output ->
do pst <- getPipeState
final_fn <- liftIO $ getOutputFilename
stopPhase output (src_basename env)
dflags stopPhase (maybe_loc pst)
when (final_fn /= input_fn) $ do
let msg = ("Copying `" ++ input_fn ++"' to `" ++ final_fn ++ "'")
line_prag = Just ("{-# LINE 1 \"" ++ src_filename env ++ "\" #-}\n")
liftIO $ copyWithHeader dflags msg line_prag input_fn final_fn
return (dflags, final_fn)
| not (realPhase `happensBefore'` stopPhase)
-- Something has gone wrong. We'll try to cover all the cases when
-- this could happen, so if we reach here it is a panic.
-- eg. it might happen if the -C flag is used on a source file that
-- has {-# OPTIONS -fasm #-}.
-> panic ("pipeLoop: at phase " ++ show realPhase ++
" but I wanted to stop at phase " ++ show stopPhase)
_
-> do liftIO $ debugTraceMsg dflags 4
(ptext (sLit "Running phase") <+> ppr phase)
(next_phase, output_fn) <- runHookedPhase phase input_fn dflags
r <- pipeLoop next_phase output_fn
case phase of
HscOut {} ->
whenGeneratingDynamicToo dflags $ do
setDynFlags $ dynamicTooMkDynamicDynFlags dflags
-- TODO shouldn't ignore result:
_ <- pipeLoop phase input_fn
return ()
_ ->
return ()
return r
runHookedPhase :: PhasePlus -> FilePath -> DynFlags
-> CompPipeline (PhasePlus, FilePath)
runHookedPhase pp input dflags =
lookupHook runPhaseHook runPhase dflags pp input dflags
-- -----------------------------------------------------------------------------
-- In each phase, we need to know into what filename to generate the
-- output. All the logic about which filenames we generate output
-- into is embodied in the following function.
phaseOutputFilename :: Phase{-next phase-} -> CompPipeline FilePath
phaseOutputFilename next_phase = do
PipeEnv{stop_phase, src_basename, output_spec} <- getPipeEnv
PipeState{maybe_loc, hsc_env} <- getPipeState
let dflags = hsc_dflags hsc_env
liftIO $ getOutputFilename stop_phase output_spec
src_basename dflags next_phase maybe_loc
getOutputFilename
:: Phase -> PipelineOutput -> String
-> DynFlags -> Phase{-next phase-} -> Maybe ModLocation -> IO FilePath
getOutputFilename stop_phase output basename dflags next_phase maybe_location
| is_last_phase, Persistent <- output = persistent_fn
| is_last_phase, SpecificFile <- output = case outputFile dflags of
Just f -> return f
Nothing ->
panic "SpecificFile: No filename"
| keep_this_output = persistent_fn
| otherwise = newTempName dflags suffix
where
hcsuf = hcSuf dflags
odir = objectDir dflags
osuf = objectSuf dflags
keep_hc = gopt Opt_KeepHcFiles dflags
keep_s = gopt Opt_KeepSFiles dflags
keep_bc = gopt Opt_KeepLlvmFiles dflags
myPhaseInputExt HCc = hcsuf
myPhaseInputExt MergeStub = osuf
myPhaseInputExt StopLn = osuf
myPhaseInputExt other = phaseInputExt other
is_last_phase = next_phase `eqPhase` stop_phase
-- sometimes, we keep output from intermediate stages
keep_this_output =
case next_phase of
As _ | keep_s -> True
LlvmOpt | keep_bc -> True
HCc | keep_hc -> True
_other -> False
suffix = myPhaseInputExt next_phase
-- persistent object files get put in odir
persistent_fn
| StopLn <- next_phase = return odir_persistent
| otherwise = return persistent
persistent = basename <.> suffix
odir_persistent
| Just loc <- maybe_location = ml_obj_file loc
| Just d <- odir = d </> persistent
| otherwise = persistent
-- -----------------------------------------------------------------------------
-- | Each phase in the pipeline returns the next phase to execute, and the
-- name of the file in which the output was placed.
--
-- We must do things dynamically this way, because we often don't know
-- what the rest of the phases will be until part-way through the
-- compilation: for example, an {-# OPTIONS -fasm #-} at the beginning
-- of a source file can change the latter stages of the pipeline from
-- taking the LLVM route to using the native code generator.
--
runPhase :: PhasePlus -- ^ Run this phase
-> FilePath -- ^ name of the input file
-> DynFlags -- ^ for convenience, we pass the current dflags in
-> CompPipeline (PhasePlus, -- next phase to run
FilePath) -- output filename
-- Invariant: the output filename always contains the output
-- Interesting case: Hsc when there is no recompilation to do
-- Then the output filename is still a .o file
-------------------------------------------------------------------------------
-- Unlit phase
runPhase (RealPhase (Unlit sf)) input_fn dflags
= do
output_fn <- phaseOutputFilename (Cpp sf)
let flags = [ -- The -h option passes the file name for unlit to
-- put in a #line directive
SysTools.Option "-h"
, SysTools.Option $ escape $ normalise input_fn
, SysTools.FileOption "" input_fn
, SysTools.FileOption "" output_fn
]
liftIO $ SysTools.runUnlit dflags flags
return (RealPhase (Cpp sf), output_fn)
where
-- escape the characters \, ", and ', but don't try to escape
-- Unicode or anything else (so we don't use Util.charToC
-- here). If we get this wrong, then in
-- Coverage.addTicksToBinds where we check that the filename in
-- a SrcLoc is the same as the source filenaame, the two will
-- look bogusly different. See test:
-- libraries/hpc/tests/function/subdir/tough2.lhs
escape ('\\':cs) = '\\':'\\': escape cs
escape ('\"':cs) = '\\':'\"': escape cs
escape ('\'':cs) = '\\':'\'': escape cs
escape (c:cs) = c : escape cs
escape [] = []
-------------------------------------------------------------------------------
-- Cpp phase : (a) gets OPTIONS out of file
-- (b) runs cpp if necessary
runPhase (RealPhase (Cpp sf)) input_fn dflags0
= do
src_opts <- liftIO $ getOptionsFromFile dflags0 input_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
if not (xopt Opt_Cpp dflags1) then do
-- we have to be careful to emit warnings only once.
unless (gopt Opt_Pp dflags1) $
liftIO $ handleFlagWarnings dflags1 warns
-- no need to preprocess CPP, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (HsPp sf), input_fn)
else do
output_fn <- phaseOutputFilename (HsPp sf)
liftIO $ doCpp dflags1 True{-raw-}
input_fn output_fn
-- re-read the pragmas now that we've preprocessed the file
-- See #2464,#3457
src_opts <- liftIO $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
liftIO $ checkProcessArgsResult dflags2 unhandled_flags
unless (gopt Opt_Pp dflags2) $
liftIO $ handleFlagWarnings dflags2 warns
-- the HsPp pass below will emit warnings
setDynFlags dflags2
return (RealPhase (HsPp sf), output_fn)
-------------------------------------------------------------------------------
-- HsPp phase
runPhase (RealPhase (HsPp sf)) input_fn dflags
= do
if not (gopt Opt_Pp dflags) then
-- no need to preprocess, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (Hsc sf), input_fn)
else do
PipeEnv{src_basename, src_suffix} <- getPipeEnv
let orig_fn = src_basename <.> src_suffix
output_fn <- phaseOutputFilename (Hsc sf)
liftIO $ SysTools.runPp dflags
( [ SysTools.Option orig_fn
, SysTools.Option input_fn
, SysTools.FileOption "" output_fn
]
)
-- re-read pragmas now that we've parsed the file (see #3674)
src_opts <- liftIO $ getOptionsFromFile dflags output_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
liftIO $ handleFlagWarnings dflags1 warns
return (RealPhase (Hsc sf), output_fn)
-----------------------------------------------------------------------------
-- Hsc phase
-- Compilation of a single module, in "legacy" mode (_not_ under
-- the direction of the compilation manager).
runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0
= do -- normal Hsc mode, not mkdependHS
PipeEnv{ stop_phase=stop,
src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- we add the current directory (i.e. the directory in which
-- the .hs files resides) to the include path, since this is
-- what gcc does, and it's probably what you want.
let current_dir = takeDirectory basename
paths = includePaths dflags0
dflags = dflags0 { includePaths = current_dir : paths }
setDynFlags dflags
-- gather the imports and module name
(hspp_buf,mod_name,imps,src_imps) <- liftIO $ do
do
buf <- hGetStringBuffer input_fn
(src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff)
return (Just buf, mod_name, imps, src_imps)
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
location <- getLocation src_flavour mod_name
let o_file = ml_obj_file location -- The real object file
-- Figure out if the source has changed, for recompilation avoidance.
--
-- Setting source_unchanged to True means that M.o seems
-- to be up to date wrt M.hs; so no need to recompile unless imports have
-- changed (which the compiler itself figures out).
-- Setting source_unchanged to False tells the compiler that M.o is out of
-- date wrt M.hs (or M.o doesn't exist) so we must recompile regardless.
src_timestamp <- liftIO $ getModificationUTCTime (basename <.> suff)
source_unchanged <- liftIO $
if not (isStopLn stop)
-- SourceModified unconditionally if
-- (a) recompilation checker is off, or
-- (b) we aren't going all the way to .o file (e.g. ghc -S)
then return SourceModified
-- Otherwise look at file modification dates
else do o_file_exists <- doesFileExist o_file
if not o_file_exists
then return SourceModified -- Need to recompile
else do t2 <- getModificationUTCTime o_file
if t2 > src_timestamp
then return SourceUnmodified
else return SourceModified
PipeState{hsc_env=hsc_env'} <- getPipeState
-- Tell the finder cache about this module
mod <- liftIO $ addHomeModuleToFinder hsc_env' mod_name location
-- Make the ModSummary to hand to hscMain
let
mod_summary = ModSummary { ms_mod = mod,
ms_hsc_src = src_flavour,
ms_hspp_file = input_fn,
ms_hspp_opts = dflags,
ms_hspp_buf = hspp_buf,
ms_location = location,
ms_hs_date = src_timestamp,
ms_obj_date = Nothing,
ms_textual_imps = imps,
ms_srcimps = src_imps }
-- run the compiler!
result <- liftIO $ hscCompileOneShot hsc_env'
mod_summary source_unchanged
return (HscOut src_flavour mod_name result,
panic "HscOut doesn't have an input filename")
runPhase (HscOut src_flavour mod_name result) _ dflags = do
location <- getLocation src_flavour mod_name
setModLocation location
let o_file = ml_obj_file location -- The real object file
hsc_lang = hscTarget dflags
next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
case result of
HscNotGeneratingCode ->
return (RealPhase next_phase,
panic "No output filename from Hsc when no-code")
HscUpToDate ->
do liftIO $ touchObjectFile dflags o_file
-- The .o file must have a later modification date
-- than the source file (else we wouldn't get Nothing)
-- but we touch it anyway, to keep 'make' happy (we think).
return (RealPhase StopLn, o_file)
HscUpdateBoot ->
do -- In the case of hs-boot files, generate a dummy .o-boot
-- stamp file for the benefit of Make
liftIO $ touchObjectFile dflags o_file
return (RealPhase next_phase, o_file)
HscRecomp cgguts mod_summary
-> do output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env=hsc_env'} <- getPipeState
(outputFilename, mStub) <- liftIO $ hscGenHardCode hsc_env' cgguts mod_summary output_fn
case mStub of
Nothing -> return ()
Just stub_c ->
do stub_o <- liftIO $ compileStub hsc_env' stub_c
setStubO stub_o
return (RealPhase next_phase, outputFilename)
-----------------------------------------------------------------------------
-- Cmm phase
runPhase (RealPhase CmmCpp) input_fn dflags
= do
output_fn <- phaseOutputFilename Cmm
liftIO $ doCpp dflags False{-not raw-}
input_fn output_fn
return (RealPhase Cmm, output_fn)
runPhase (RealPhase Cmm) input_fn dflags
= do
let hsc_lang = hscTarget dflags
let next_phase = hscPostBackendPhase dflags HsSrcFile hsc_lang
output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env} <- getPipeState
liftIO $ hscCompileCmmFile hsc_env input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Cc phase
-- we don't support preprocessing .c files (with -E) now. Doing so introduces
-- way too many hacks, and I can't say I've ever used it anyway.
runPhase (RealPhase cc_phase) input_fn dflags
| any (cc_phase `eqPhase`) [Cc, Ccpp, HCc, Cobjc, Cobjcpp]
= do
let platform = targetPlatform dflags
hcc = cc_phase `eqPhase` HCc
let cmdline_include_paths = includePaths dflags
-- HC files have the dependent packages stamped into them
pkgs <- if hcc then liftIO $ getHCFilePackages input_fn else return []
-- add package include paths even if we're just compiling .c
-- files; this is the Value Add(TM) that using ghc instead of
-- gcc gives you :)
pkg_include_dirs <- liftIO $ getPackageIncludePath dflags pkgs
let include_paths = foldr (\ x xs -> ("-I" ++ x) : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
-- cc-options are not passed when compiling .hc files. Our
-- hc code doesn't not #include any header files anyway, so these
-- options aren't necessary.
pkg_extra_cc_opts <- liftIO $
if cc_phase `eqPhase` HCc
then return []
else getPackageExtraCcOpts dflags pkgs
framework_paths <-
if platformUsesFrameworks platform
then do pkgFrameworkPaths <- liftIO $ getPackageFrameworkPath dflags pkgs
let cmdlineFrameworkPaths = frameworkPaths dflags
return $ map ("-F"++)
(cmdlineFrameworkPaths ++ pkgFrameworkPaths)
else return []
let split_objs = gopt Opt_SplitObjs dflags
split_opt | hcc && split_objs = [ "-DUSE_SPLIT_MARKERS" ]
| otherwise = [ ]
let cc_opt | optLevel dflags >= 2 = [ "-O2" ]
| optLevel dflags >= 1 = [ "-O" ]
| otherwise = []
-- Decide next phase
let next_phase = As False
output_fn <- phaseOutputFilename next_phase
let
more_hcc_opts =
-- on x86 the floating point regs have greater precision
-- than a double, which leads to unpredictable results.
-- By default, we turn this off with -ffloat-store unless
-- the user specified -fexcess-precision.
(if platformArch platform == ArchX86 &&
not (gopt Opt_ExcessPrecision dflags)
then [ "-ffloat-store" ]
else []) ++
-- gcc's -fstrict-aliasing allows two accesses to memory
-- to be considered non-aliasing if they have different types.
-- This interacts badly with the C code we generate, which is
-- very weakly typed, being derived from C--.
["-fno-strict-aliasing"]
let gcc_lang_opt | cc_phase `eqPhase` Ccpp = "c++"
| cc_phase `eqPhase` Cobjc = "objective-c"
| cc_phase `eqPhase` Cobjcpp = "objective-c++"
| otherwise = "c"
liftIO $ SysTools.runCc dflags (
-- force the C compiler to interpret this file as C when
-- compiling .hc files, by adding the -x c option.
-- Also useful for plain .c files, just in case GHC saw a
-- -x c option.
[ SysTools.Option "-x", SysTools.Option gcc_lang_opt
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
pic_c_flags
-- Stub files generated for foreign exports references the runIO_closure
-- and runNonIO_closure symbols, which are defined in the base package.
-- These symbols are imported into the stub.c file via RtsAPI.h, and the
-- way we do the import depends on whether we're currently compiling
-- the base package or not.
++ (if platformOS platform == OSMinGW32 &&
thisPackage dflags == basePackageKey
then [ "-DCOMPILING_BASE_PACKAGE" ]
else [])
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc) as GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack. See #2872, commit
-- 5bd3072ac30216a505151601884ac88bf404c9f2
++ (if platformArch platform == ArchSPARC
then ["-mcpu=v9"]
else [])
-- GCC 4.6+ doesn't like -Wimplicit when compiling C++.
++ (if (cc_phase /= Ccpp && cc_phase /= Cobjcpp)
then ["-Wimplicit"]
else [])
++ (if hcc
then gcc_extra_viac_flags ++ more_hcc_opts
else [])
++ verbFlags
++ [ "-S" ]
++ cc_opt
++ [ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
++ framework_paths
++ split_opt
++ include_paths
++ pkg_extra_cc_opts
))
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Splitting phase
runPhase (RealPhase Splitter) input_fn dflags
= do -- tmp_pfx is the prefix used for the split .s files
split_s_prefix <- liftIO $ SysTools.newTempName dflags "split"
let n_files_fn = split_s_prefix
liftIO $ SysTools.runSplit dflags
[ SysTools.FileOption "" input_fn
, SysTools.FileOption "" split_s_prefix
, SysTools.FileOption "" n_files_fn
]
-- Save the number of split files for future references
s <- liftIO $ readFile n_files_fn
let n_files = read s :: Int
dflags' = dflags { splitInfo = Just (split_s_prefix, n_files) }
setDynFlags dflags'
-- Remember to delete all these files
liftIO $ addFilesToClean dflags'
[ split_s_prefix ++ "__" ++ show n ++ ".s"
| n <- [1..n_files]]
return (RealPhase SplitAs,
"**splitter**") -- we don't use the filename in SplitAs
-----------------------------------------------------------------------------
-- As, SpitAs phase : Assembler
-- This is for calling the assembler on a regular assembly file (not split).
runPhase (RealPhase (As with_cpp)) input_fn dflags
= do
-- LLVM from version 3.0 onwards doesn't support the OS X system
-- assembler, so we use clang as the assembler instead. (#5636)
let whichAsProg | hscTarget dflags == HscLlvm &&
platformOS (targetPlatform dflags) == OSDarwin
= do
-- be careful what options we call clang with
-- see #5903 and #7617 for bugs caused by this.
llvmVer <- liftIO $ figureLlvmVersion dflags
return $ case llvmVer of
Just n | n >= 30 -> SysTools.runClang
_ -> SysTools.runAs
| otherwise = return SysTools.runAs
as_prog <- whichAsProg
let cmdline_include_paths = includePaths dflags
next_phase <- maybeMergeStub
output_fn <- phaseOutputFilename next_phase
-- we create directories for the object file, because it
-- might be a hierarchical module.
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
ccInfo <- liftIO $ getCompilerInfo dflags
let runAssembler inputFilename outputFilename
= liftIO $ as_prog dflags
([ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ]
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else [])
++ (if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [SysTools.Option "-Qunused-arguments"]
else [])
++ [ SysTools.Option "-x"
, if with_cpp
then SysTools.Option "assembler-with-cpp"
else SysTools.Option "assembler"
, SysTools.Option "-c"
, SysTools.FileOption "" inputFilename
, SysTools.Option "-o"
, SysTools.FileOption "" outputFilename
])
liftIO $ debugTraceMsg dflags 4 (text "Running the assembler")
runAssembler input_fn output_fn
return (RealPhase next_phase, output_fn)
-- This is for calling the assembler on a split assembly file (so a collection
-- of assembly files)
runPhase (RealPhase SplitAs) _input_fn dflags
= do
-- we'll handle the stub_o file in this phase, so don't MergeStub,
-- just jump straight to StopLn afterwards.
let next_phase = StopLn
output_fn <- phaseOutputFilename next_phase
let base_o = dropExtension output_fn
osuf = objectSuf dflags
split_odir = base_o ++ "_" ++ osuf ++ "_split"
-- this also creates the hierarchy
liftIO $ createDirectoryIfMissing True split_odir
-- remove M_split/ *.o, because we're going to archive M_split/ *.o
-- later and we don't want to pick up any old objects.
fs <- liftIO $ getDirectoryContents split_odir
liftIO $ mapM_ removeFile $
map (split_odir </>) $ filter (osuf `isSuffixOf`) fs
let (split_s_prefix, n) = case splitInfo dflags of
Nothing -> panic "No split info"
Just x -> x
let split_s n = split_s_prefix ++ "__" ++ show n <.> "s"
split_obj :: Int -> FilePath
split_obj n = split_odir </>
takeFileName base_o ++ "__" ++ show n <.> osuf
let assemble_file n
= SysTools.runAs dflags (
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
(if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else []) ++
[ SysTools.Option "-c"
, SysTools.Option "-o"
, SysTools.FileOption "" (split_obj n)
, SysTools.FileOption "" (split_s n)
])
liftIO $ mapM_ assemble_file [1..n]
-- Note [pipeline-split-init]
-- If we have a stub file, it may contain constructor
-- functions for initialisation of this module. We can't
-- simply leave the stub as a separate object file, because it
-- will never be linked in: nothing refers to it. We need to
-- ensure that if we ever refer to the data in this module
-- that needs initialisation, then we also pull in the
-- initialisation routine.
--
-- To that end, we make a DANGEROUS ASSUMPTION here: the data
-- that needs to be initialised is all in the FIRST split
-- object. See Note [codegen-split-init].
PipeState{maybe_stub_o} <- getPipeState
case maybe_stub_o of
Nothing -> return ()
Just stub_o -> liftIO $ do
tmp_split_1 <- newTempName dflags osuf
let split_1 = split_obj 1
copyFile split_1 tmp_split_1
removeFile split_1
joinObjectFiles dflags [tmp_split_1, stub_o] split_1
-- join them into a single .o file
liftIO $ joinObjectFiles dflags (map split_obj [1..n]) output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- LlvmOpt phase
runPhase (RealPhase LlvmOpt) input_fn dflags
= do
ver <- liftIO $ readIORef (llvmVersion dflags)
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- don't specify anything if user has specified commands. We do this
-- for opt but not llc since opt is very specifically for optimisation
-- passes only, so if the user is passing us extra options we assume
-- they know what they are doing and don't get in the way.
optFlag = if null (getOpts dflags opt_lo)
then map SysTools.Option $ words (llvmOpts ver !! opt_lvl)
else []
tbaa | ver < 29 = "" -- no tbaa in 2.8 and earlier
| gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
output_fn <- phaseOutputFilename LlvmLlc
liftIO $ SysTools.runLlvmOpt dflags
([ SysTools.FileOption "" input_fn,
SysTools.Option "-o",
SysTools.FileOption "" output_fn]
++ optFlag
++ [SysTools.Option tbaa])
return (RealPhase LlvmLlc, output_fn)
where
-- we always (unless -optlo specified) run Opt since we rely on it to
-- fix up some pretty big deficiencies in the code we generate
llvmOpts ver = [ "-mem2reg -globalopt"
, if ver >= 34 then "-O1 -globalopt" else "-O1"
-- LLVM 3.4 -O1 doesn't eliminate aliases reliably (bug #8855)
, "-O2"
]
-----------------------------------------------------------------------------
-- LlvmLlc phase
runPhase (RealPhase LlvmLlc) input_fn dflags
= do
ver <- liftIO $ readIORef (llvmVersion dflags)
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- iOS requires external references to be loaded indirectly from the
-- DATA segment or dyld traps at runtime writing into TEXT: see #7722
rmodel | platformOS (targetPlatform dflags) == OSiOS = "dynamic-no-pic"
| gopt Opt_PIC dflags = "pic"
| not (gopt Opt_Static dflags) = "dynamic-no-pic"
| otherwise = "static"
tbaa | ver < 29 = "" -- no tbaa in 2.8 and earlier
| gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
-- hidden debugging flag '-dno-llvm-mangler' to skip mangling
let next_phase = case gopt Opt_NoLlvmMangler dflags of
False -> LlvmMangle
True | gopt Opt_SplitObjs dflags -> Splitter
True -> As False
output_fn <- phaseOutputFilename next_phase
liftIO $ SysTools.runLlvmLlc dflags
([ SysTools.Option (llvmOpts !! opt_lvl),
SysTools.Option $ "-relocation-model=" ++ rmodel,
SysTools.FileOption "" input_fn,
SysTools.Option "-o", SysTools.FileOption "" output_fn]
++ [SysTools.Option tbaa]
++ map SysTools.Option fpOpts
++ map SysTools.Option abiOpts
++ map SysTools.Option sseOpts
++ map SysTools.Option avxOpts
++ map SysTools.Option avx512Opts
++ map SysTools.Option stackAlignOpts)
return (RealPhase next_phase, output_fn)
where
-- Bug in LLVM at O3 on OSX.
llvmOpts = if platformOS (targetPlatform dflags) == OSDarwin
then ["-O1", "-O2", "-O2"]
else ["-O1", "-O2", "-O3"]
-- On ARMv7 using LLVM, LLVM fails to allocate floating point registers
-- while compiling GHC source code. It's probably due to fact that it
-- does not enable VFP by default. Let's do this manually here
fpOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 ext _ -> if (elem VFPv3 ext)
then ["-mattr=+v7,+vfp3"]
else if (elem VFPv3D16 ext)
then ["-mattr=+v7,+vfp3,+d16"]
else []
ArchARM ARMv6 ext _ -> if (elem VFPv2 ext)
then ["-mattr=+v6,+vfp2"]
else ["-mattr=+v6"]
_ -> []
-- On Ubuntu/Debian with ARM hard float ABI, LLVM's llc still
-- compiles into soft-float ABI. We need to explicitly set abi
-- to hard
abiOpts = case platformArch (targetPlatform dflags) of
ArchARM _ _ HARD -> ["-float-abi=hard"]
ArchARM _ _ _ -> []
_ -> []
sseOpts | isSse4_2Enabled dflags = ["-mattr=+sse42"]
| isSse2Enabled dflags = ["-mattr=+sse2"]
| isSseEnabled dflags = ["-mattr=+sse"]
| otherwise = []
avxOpts | isAvx512fEnabled dflags = ["-mattr=+avx512f"]
| isAvx2Enabled dflags = ["-mattr=+avx2"]
| isAvxEnabled dflags = ["-mattr=+avx"]
| otherwise = []
avx512Opts =
[ "-mattr=+avx512cd" | isAvx512cdEnabled dflags ] ++
[ "-mattr=+avx512er" | isAvx512erEnabled dflags ] ++
[ "-mattr=+avx512pf" | isAvx512pfEnabled dflags ]
stackAlignOpts =
case platformArch (targetPlatform dflags) of
ArchX86_64 | isAvxEnabled dflags -> ["-stack-alignment=32"]
_ -> []
-----------------------------------------------------------------------------
-- LlvmMangle phase
runPhase (RealPhase LlvmMangle) input_fn dflags
= do
let next_phase = if gopt Opt_SplitObjs dflags then Splitter else As False
output_fn <- phaseOutputFilename next_phase
liftIO $ llvmFixupAsm dflags input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- merge in stub objects
runPhase (RealPhase MergeStub) input_fn dflags
= do
PipeState{maybe_stub_o} <- getPipeState
output_fn <- phaseOutputFilename StopLn
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
case maybe_stub_o of
Nothing ->
panic "runPhase(MergeStub): no stub"
Just stub_o -> do
liftIO $ joinObjectFiles dflags [input_fn, stub_o] output_fn
return (RealPhase StopLn, output_fn)
-- warning suppression
runPhase (RealPhase other) _input_fn _dflags =
panic ("runPhase: don't know how to run phase " ++ show other)
maybeMergeStub :: CompPipeline Phase
maybeMergeStub
= do
PipeState{maybe_stub_o} <- getPipeState
if isJust maybe_stub_o then return MergeStub else return StopLn
getLocation :: HscSource -> ModuleName -> CompPipeline ModLocation
getLocation src_flavour mod_name = do
dflags <- getDynFlags
PipeEnv{ src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- Build a ModLocation to pass to hscMain.
-- The source filename is rather irrelevant by now, but it's used
-- by hscMain for messages. hscMain also needs
-- the .hi and .o filenames, and this is as good a way
-- as any to generate them, and better than most. (e.g. takes
-- into account the -osuf flags)
location1 <- liftIO $ mkHomeModLocation2 dflags mod_name basename suff
-- Boot-ify it if necessary
let location2 | isHsBoot src_flavour = addBootSuffixLocn location1
| otherwise = location1
-- Take -ohi into account if present
-- This can't be done in mkHomeModuleLocation because
-- it only applies to the module being compiles
let ohi = outputHi dflags
location3 | Just fn <- ohi = location2{ ml_hi_file = fn }
| otherwise = location2
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
let expl_o_file = outputFile dflags
location4 | Just ofile <- expl_o_file
, isNoLink (ghcLink dflags)
= location3 { ml_obj_file = ofile }
| otherwise = location3
return location4
-----------------------------------------------------------------------------
-- MoveBinary sort-of-phase
-- After having produced a binary, move it somewhere else and generate a
-- wrapper script calling the binary. Currently, we need this only in
-- a parallel way (i.e. in GUM), because PVM expects the binary in a
-- central directory.
-- This is called from linkBinary below, after linking. I haven't made it
-- a separate phase to minimise interfering with other modules, and
-- we don't need the generality of a phase (MoveBinary is always
-- done after linking and makes only sense in a parallel setup) -- HWL
runPhase_MoveBinary :: DynFlags -> FilePath -> IO Bool
runPhase_MoveBinary dflags input_fn
| WayPar `elem` ways dflags && not (gopt Opt_Static dflags) =
panic ("Don't know how to combine PVM wrapper and dynamic wrapper")
| WayPar `elem` ways dflags = do
let sysMan = pgm_sysman dflags
pvm_root <- getEnv "PVM_ROOT"
pvm_arch <- getEnv "PVM_ARCH"
let
pvm_executable_base = "=" ++ input_fn
pvm_executable = pvm_root ++ "/bin/" ++ pvm_arch ++ "/" ++ pvm_executable_base
-- nuke old binary; maybe use configur'ed names for cp and rm?
_ <- tryIO (removeFile pvm_executable)
-- move the newly created binary into PVM land
copy dflags "copying PVM executable" input_fn pvm_executable
-- generate a wrapper script for running a parallel prg under PVM
writeFile input_fn (mk_pvm_wrapper_script pvm_executable pvm_executable_base sysMan)
return True
| otherwise = return True
mkExtraObj :: DynFlags -> Suffix -> String -> IO FilePath
mkExtraObj dflags extn xs
= do cFile <- newTempName dflags extn
oFile <- newTempName dflags "o"
writeFile cFile xs
let rtsDetails = getPackageDetails dflags rtsPackageKey
SysTools.runCc dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ map (FileOption "-I") (includeDirs rtsDetails))
return oFile
-- When linking a binary, we need to create a C main() function that
-- starts everything off. This used to be compiled statically as part
-- of the RTS, but that made it hard to change the -rtsopts setting,
-- so now we generate and compile a main() stub as part of every
-- binary and pass the -rtsopts setting directly to the RTS (#5373)
--
mkExtraObjToLinkIntoBinary :: DynFlags -> IO FilePath
mkExtraObjToLinkIntoBinary dflags = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
mkExtraObj dflags "c" (showSDoc dflags main)
where
main
| gopt Opt_NoHsMain dflags = empty
| otherwise = vcat [
ptext (sLit "#include \"Rts.h\""),
ptext (sLit "extern StgClosure ZCMain_main_closure;"),
ptext (sLit "int main(int argc, char *argv[])"),
char '{',
ptext (sLit " RtsConfig __conf = defaultRtsConfig;"),
ptext (sLit " __conf.rts_opts_enabled = ")
<> text (show (rtsOptsEnabled dflags)) <> semi,
case rtsOpts dflags of
Nothing -> empty
Just opts -> ptext (sLit " __conf.rts_opts= ") <>
text (show opts) <> semi,
ptext (sLit " __conf.rts_hs_main = rtsTrue;"),
ptext (sLit " return hs_main(argc, argv, &ZCMain_main_closure,__conf);"),
char '}',
char '\n' -- final newline, to keep gcc happy
]
-- Write out the link info section into a new assembly file. Previously
-- this was included as inline assembly in the main.c file but this
-- is pretty fragile. gas gets upset trying to calculate relative offsets
-- that span the .note section (notably .text) when debug info is present
mkNoteObjsToLinkIntoBinary :: DynFlags -> [PackageKey] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary dflags dep_packages = do
link_info <- getLinkInfo dflags dep_packages
if (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
then fmap (:[]) $ mkExtraObj dflags "s" (showSDoc dflags (link_opts link_info))
else return []
where
link_opts info = hcat [
text "\t.section ", text ghcLinkInfoSectionName,
text ",\"\",",
text elfSectionNote,
text "\n",
text "\t.ascii \"", info', text "\"\n",
-- ALL generated assembly must have this section to disable
-- executable stacks. See also
-- compiler/nativeGen/AsmCodeGen.lhs for another instance
-- where we need to do this.
(if platformHasGnuNonexecStack (targetPlatform dflags)
then text ".section .note.GNU-stack,\"\",@progbits\n"
else empty)
]
where
info' = text $ escape info
escape :: String -> String
escape = concatMap (charToC.fromIntegral.ord)
elfSectionNote :: String
elfSectionNote = case platformArch (targetPlatform dflags) of
ArchARM _ _ _ -> "%note"
_ -> "@note"
-- The "link info" is a string representing the parameters of the
-- link. We save this information in the binary, and the next time we
-- link, if nothing else has changed, we use the link info stored in
-- the existing binary to decide whether to re-link or not.
getLinkInfo :: DynFlags -> [PackageKey] -> IO String
getLinkInfo dflags dep_packages = do
package_link_opts <- getPackageLinkOpts dflags dep_packages
pkg_frameworks <- if platformUsesFrameworks (targetPlatform dflags)
then getPackageFrameworks dflags dep_packages
else return []
let extra_ld_inputs = ldInputs dflags
let
link_info = (package_link_opts,
pkg_frameworks,
rtsOpts dflags,
rtsOptsEnabled dflags,
gopt Opt_NoHsMain dflags,
map showOpt extra_ld_inputs,
getOpts dflags opt_l)
--
return (show link_info)
-- generates a Perl skript starting a parallel prg under PVM
mk_pvm_wrapper_script :: String -> String -> String -> String
mk_pvm_wrapper_script pvm_executable pvm_executable_base sysMan = unlines $
[
"eval 'exec perl -S $0 ${1+\"$@\"}'",
" if $running_under_some_shell;",
"# =!=!=!=!=!=!=!=!=!=!=!",
"# This script is automatically generated: DO NOT EDIT!!!",
"# Generated by Glasgow Haskell Compiler",
"# ngoqvam choHbogh vaj' vIHoHnISbej !!!!",
"#",
"$pvm_executable = '" ++ pvm_executable ++ "';",
"$pvm_executable_base = '" ++ pvm_executable_base ++ "';",
"$SysMan = '" ++ sysMan ++ "';",
"",
{- ToDo: add the magical shortcuts again iff we actually use them -- HWL
"# first, some magical shortcuts to run "commands" on the binary",
"# (which is hidden)",
"if ($#ARGV == 1 && $ARGV[0] eq '+RTS' && $ARGV[1] =~ /^--((size|file|strip|rm|nm).*)/ ) {",
" local($cmd) = $1;",
" system("$cmd $pvm_executable");",
" exit(0); # all done",
"}", -}
"",
"# Now, run the real binary; process the args first",
"$ENV{'PE'} = $pvm_executable_base;", -- ++ pvm_executable_base,
"$debug = '';",
"$nprocessors = 0; # the default: as many PEs as machines in PVM config",
"@nonPVM_args = ();",
"$in_RTS_args = 0;",
"",
"args: while ($a = shift(@ARGV)) {",
" if ( $a eq '+RTS' ) {",
" $in_RTS_args = 1;",
" } elsif ( $a eq '-RTS' ) {",
" $in_RTS_args = 0;",
" }",
" if ( $a eq '-d' && $in_RTS_args ) {",
" $debug = '-';",
" } elsif ( $a =~ /^-qN(\\d+)/ && $in_RTS_args ) {",
" $nprocessors = $1;",
" } elsif ( $a =~ /^-qp(\\d+)/ && $in_RTS_args ) {",
" $nprocessors = $1;",
" } else {",
" push(@nonPVM_args, $a);",
" }",
"}",
"",
"local($return_val) = 0;",
"# Start the parallel execution by calling SysMan",
"system(\"$SysMan $debug $pvm_executable $nprocessors @nonPVM_args\");",
"$return_val = $?;",
"# ToDo: fix race condition moving files and flushing them!!",
"system(\"cp $ENV{'HOME'}/$pvm_executable_base.???.gr .\") if -f \"$ENV{'HOME'}/$pvm_executable_base.002.gr\";",
"exit($return_val);"
]
-----------------------------------------------------------------------------
-- Look for the /* GHC_PACKAGES ... */ comment at the top of a .hc file
getHCFilePackages :: FilePath -> IO [PackageKey]
getHCFilePackages filename =
Exception.bracket (openFile filename ReadMode) hClose $ \h -> do
l <- hGetLine h
case l of
'/':'*':' ':'G':'H':'C':'_':'P':'A':'C':'K':'A':'G':'E':'S':rest ->
return (map stringToPackageKey (words rest))
_other ->
return []
-----------------------------------------------------------------------------
-- Static linking, of .o files
-- The list of packages passed to link is the list of packages on
-- which this program depends, as discovered by the compilation
-- manager. It is combined with the list of packages that the user
-- specifies on the command line with -package flags.
--
-- In one-shot linking mode, we can't discover the package
-- dependencies (because we haven't actually done any compilation or
-- read any interface files), so the user must explicitly specify all
-- the packages.
linkBinary :: DynFlags -> [FilePath] -> [PackageKey] -> IO ()
linkBinary = linkBinary' False
linkBinary' :: Bool -> DynFlags -> [FilePath] -> [PackageKey] -> IO ()
linkBinary' staticLink dflags o_files dep_packages = do
let platform = targetPlatform dflags
mySettings = settings dflags
verbFlags = getVerbFlags dflags
output_fn = exeFileName staticLink dflags
-- get the full list of packages to link with, by combining the
-- explicit packages with the auto packages and all of their
-- dependencies, and eliminating duplicates.
full_output_fn <- if isAbsolute output_fn
then return output_fn
else do d <- getCurrentDirectory
return $ normalise (d </> output_fn)
pkg_lib_paths <- getPackageLibraryPath dflags dep_packages
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| osElfTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags)
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "$ORIGIN" </>
(l `makeRelativeTo` full_output_fn)
else l
rpath = if gopt Opt_RPath dflags
then ["-Wl,-rpath", "-Wl," ++ libpath]
else []
-- Solaris 11's linker does not support -rpath-link option. It silently
-- ignores it and then complains about next option which is -l<some
-- dir> as being a directory and not expected object file, E.g
-- ld: elf error: file
-- /tmp/ghc-src/libraries/base/dist-install/build:
-- elf_begin: I/O error: region read: Is a directory
rpathlink = if (platformOS platform) == OSSolaris2
then []
else ["-Wl,-rpath-link", "-Wl," ++ l]
in ["-L" ++ l] ++ rpathlink ++ rpath
| osMachOTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags) &&
gopt Opt_RPath dflags
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "@loader_path" </>
(l `makeRelativeTo` full_output_fn)
else l
in ["-L" ++ l] ++ ["-Wl,-rpath", "-Wl," ++ libpath]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
extraLinkObj <- mkExtraObjToLinkIntoBinary dflags
noteLinkObjs <- mkNoteObjsToLinkIntoBinary dflags dep_packages
pkg_link_opts <- do
(package_hs_libs, extra_libs, other_flags) <- getPackageLinkOpts dflags dep_packages
return $ if staticLink
then package_hs_libs -- If building an executable really means making a static
-- library (e.g. iOS), then we only keep the -l options for
-- HS packages, because libtool doesn't accept other options.
-- In the case of iOS these need to be added by hand to the
-- final link in Xcode.
else other_flags ++ package_hs_libs ++ extra_libs -- -Wl,-u,<sym> contained in other_flags
-- needs to be put before -l<package>,
-- otherwise Solaris linker fails linking
-- a binary with unresolved symbols in RTS
-- which are defined in base package
-- the reason for this is a note in ld(1) about
-- '-u' option: "The placement of this option
-- on the command line is significant.
-- This option must be placed before the library
-- that defines the symbol."
pkg_framework_path_opts <-
if platformUsesFrameworks platform
then do pkg_framework_paths <- getPackageFrameworkPath dflags dep_packages
return $ map ("-F" ++) pkg_framework_paths
else return []
framework_path_opts <-
if platformUsesFrameworks platform
then do let framework_paths = frameworkPaths dflags
return $ map ("-F" ++) framework_paths
else return []
pkg_framework_opts <-
if platformUsesFrameworks platform
then do pkg_frameworks <- getPackageFrameworks dflags dep_packages
return $ concat [ ["-framework", fw] | fw <- pkg_frameworks ]
else return []
framework_opts <-
if platformUsesFrameworks platform
then do let frameworks = cmdlineFrameworks dflags
-- reverse because they're added in reverse order from
-- the cmd line:
return $ concat [ ["-framework", fw]
| fw <- reverse frameworks ]
else return []
-- probably _stub.o files
let extra_ld_inputs = ldInputs dflags
-- Here are some libs that need to be linked at the *end* of
-- the command line, because they contain symbols that are referred to
-- by the RTS. We can't therefore use the ordinary way opts for these.
let
debug_opts | WayDebug `elem` ways dflags = [
#if defined(HAVE_LIBBFD)
"-lbfd", "-liberty"
#endif
]
| otherwise = []
let thread_opts
| WayThreaded `elem` ways dflags =
let os = platformOS (targetPlatform dflags)
in if os == OSOsf3 then ["-lpthread", "-lexc"]
else if os `elem` [OSMinGW32, OSFreeBSD, OSOpenBSD,
OSNetBSD, OSHaiku, OSQNXNTO, OSiOS, OSDarwin]
then []
else ["-lpthread"]
| otherwise = []
rc_objs <- maybeCreateManifest dflags output_fn
let link = if staticLink
then SysTools.runLibtool
else SysTools.runLink
link dflags (
map SysTools.Option verbFlags
++ [ SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
[]
-- Permit the linker to auto link _symbol to _imp_symbol.
-- This lets us link against DLLs without needing an "import library".
++ (if platformOS platform == OSMinGW32
then ["-Wl,--enable-auto-import"]
else [])
-- '-no_compact_unwind'
-- C++/Objective-C exceptions cannot use optimised
-- stack unwinding code. The optimised form is the
-- default in Xcode 4 on at least x86_64, and
-- without this flag we're also seeing warnings
-- like
-- ld: warning: could not create compact unwind for .LFB3: non-standard register 5 being saved in prolog
-- on x86.
++ (if sLdSupportsCompactUnwind mySettings &&
not staticLink &&
(platformOS platform == OSDarwin || platformOS platform == OSiOS) &&
case platformArch platform of
ArchX86 -> True
ArchX86_64 -> True
ArchARM {} -> True
_ -> False
then ["-Wl,-no_compact_unwind"]
else [])
-- '-no_pie'
-- iOS uses 'dynamic-no-pic', so we must pass this to ld to suppress a warning; see #7722
++ (if platformOS platform == OSiOS &&
not staticLink
then ["-Wl,-no_pie"]
else [])
-- '-Wl,-read_only_relocs,suppress'
-- ld gives loads of warnings like:
-- ld: warning: text reloc in _base_GHCziArr_unsafeArray_info to _base_GHCziArr_unsafeArray_closure
-- when linking any program. We're not sure
-- whether this is something we ought to fix, but
-- for now this flags silences them.
++ (if platformOS platform == OSDarwin &&
platformArch platform == ArchX86 &&
not staticLink
then ["-Wl,-read_only_relocs,suppress"]
else [])
++ o_files
++ lib_path_opts)
++ extra_ld_inputs
++ map SysTools.Option (
rc_objs
++ framework_path_opts
++ framework_opts
++ pkg_lib_path_opts
++ extraLinkObj:noteLinkObjs
++ pkg_link_opts
++ pkg_framework_path_opts
++ pkg_framework_opts
++ debug_opts
++ thread_opts
))
-- parallel only: move binary to another dir -- HWL
success <- runPhase_MoveBinary dflags output_fn
unless success $
throwGhcExceptionIO (InstallationError ("cannot move binary"))
exeFileName :: Bool -> DynFlags -> FilePath
exeFileName staticLink dflags
| Just s <- outputFile dflags =
case platformOS (targetPlatform dflags) of
OSMinGW32 -> s <?.> "exe"
_ -> if staticLink
then s <?.> "a"
else s
| otherwise =
if platformOS (targetPlatform dflags) == OSMinGW32
then "main.exe"
else if staticLink
then "liba.a"
else "a.out"
where s <?.> ext | null (takeExtension s) = s <.> ext
| otherwise = s
maybeCreateManifest
:: DynFlags
-> FilePath -- filename of executable
-> IO [FilePath] -- extra objects to embed, maybe
maybeCreateManifest dflags exe_filename
| platformOS (targetPlatform dflags) == OSMinGW32 &&
gopt Opt_GenManifest dflags
= do let manifest_filename = exe_filename <.> "manifest"
writeFile manifest_filename $
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n"++
" <assembly xmlns=\"urn:schemas-microsoft-com:asm.v1\" manifestVersion=\"1.0\">\n"++
" <assemblyIdentity version=\"1.0.0.0\"\n"++
" processorArchitecture=\"X86\"\n"++
" name=\"" ++ dropExtension exe_filename ++ "\"\n"++
" type=\"win32\"/>\n\n"++
" <trustInfo xmlns=\"urn:schemas-microsoft-com:asm.v3\">\n"++
" <security>\n"++
" <requestedPrivileges>\n"++
" <requestedExecutionLevel level=\"asInvoker\" uiAccess=\"false\"/>\n"++
" </requestedPrivileges>\n"++
" </security>\n"++
" </trustInfo>\n"++
"</assembly>\n"
-- Windows will find the manifest file if it is named
-- foo.exe.manifest. However, for extra robustness, and so that
-- we can move the binary around, we can embed the manifest in
-- the binary itself using windres:
if not (gopt Opt_EmbedManifest dflags) then return [] else do
rc_filename <- newTempName dflags "rc"
rc_obj_filename <- newTempName dflags (objectSuf dflags)
writeFile rc_filename $
"1 24 MOVEABLE PURE " ++ show manifest_filename ++ "\n"
-- magic numbers :-)
-- show is a bit hackish above, but we need to escape the
-- backslashes in the path.
runWindres dflags $ map SysTools.Option $
["--input="++rc_filename,
"--output="++rc_obj_filename,
"--output-format=coff"]
-- no FileOptions here: windres doesn't like seeing
-- backslashes, apparently
removeFile manifest_filename
return [rc_obj_filename]
| otherwise = return []
linkDynLibCheck :: DynFlags -> [String] -> [PackageKey] -> IO ()
linkDynLibCheck dflags o_files dep_packages
= do
when (haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -shared." $$
text " Call hs_init_ghc() from your main() function to set these options.")
linkDynLib dflags o_files dep_packages
linkStaticLibCheck :: DynFlags -> [String] -> [PackageKey] -> IO ()
linkStaticLibCheck dflags o_files dep_packages
= do
when (platformOS (targetPlatform dflags) `notElem` [OSiOS, OSDarwin]) $
throwGhcExceptionIO (ProgramError "Static archive creation only supported on Darwin/OS X/iOS")
linkBinary' True dflags o_files dep_packages
-- -----------------------------------------------------------------------------
-- Running CPP
doCpp :: DynFlags -> Bool -> FilePath -> FilePath -> IO ()
doCpp dflags raw input_fn output_fn = do
let hscpp_opts = picPOpts dflags
let cmdline_include_paths = includePaths dflags
pkg_include_dirs <- getPackageIncludePath dflags []
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let verbFlags = getVerbFlags dflags
let cpp_prog args | raw = SysTools.runCpp dflags args
| otherwise = SysTools.runCc dflags (SysTools.Option "-E" : args)
let target_defs =
[ "-D" ++ HOST_OS ++ "_BUILD_OS=1",
"-D" ++ HOST_ARCH ++ "_BUILD_ARCH=1",
"-D" ++ TARGET_OS ++ "_HOST_OS=1",
"-D" ++ TARGET_ARCH ++ "_HOST_ARCH=1" ]
-- remember, in code we *compile*, the HOST is the same our TARGET,
-- and BUILD is the same as our HOST.
let sse_defs =
[ "-D__SSE__=1" | isSseEnabled dflags ] ++
[ "-D__SSE2__=1" | isSse2Enabled dflags ] ++
[ "-D__SSE4_2__=1" | isSse4_2Enabled dflags ]
let avx_defs =
[ "-D__AVX__=1" | isAvxEnabled dflags ] ++
[ "-D__AVX2__=1" | isAvx2Enabled dflags ] ++
[ "-D__AVX512CD__=1" | isAvx512cdEnabled dflags ] ++
[ "-D__AVX512ER__=1" | isAvx512erEnabled dflags ] ++
[ "-D__AVX512F__=1" | isAvx512fEnabled dflags ] ++
[ "-D__AVX512PF__=1" | isAvx512pfEnabled dflags ]
backend_defs <- getBackendDefs dflags
cpp_prog ( map SysTools.Option verbFlags
++ map SysTools.Option include_paths
++ map SysTools.Option hsSourceCppOpts
++ map SysTools.Option target_defs
++ map SysTools.Option backend_defs
++ map SysTools.Option hscpp_opts
++ map SysTools.Option sse_defs
++ map SysTools.Option avx_defs
-- Set the language mode to assembler-with-cpp when preprocessing. This
-- alleviates some of the C99 macro rules relating to whitespace and the hash
-- operator, which we tend to abuse. Clang in particular is not very happy
-- about this.
++ [ SysTools.Option "-x"
, SysTools.Option "assembler-with-cpp"
, SysTools.Option input_fn
-- We hackily use Option instead of FileOption here, so that the file
-- name is not back-slashed on Windows. cpp is capable of
-- dealing with / in filenames, so it works fine. Furthermore
-- if we put in backslashes, cpp outputs #line directives
-- with *double* backslashes. And that in turn means that
-- our error messages get double backslashes in them.
-- In due course we should arrange that the lexer deals
-- with these \\ escapes properly.
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
getBackendDefs :: DynFlags -> IO [String]
getBackendDefs dflags | hscTarget dflags == HscLlvm = do
llvmVer <- figureLlvmVersion dflags
return $ case llvmVer of
Just n -> [ "-D__GLASGOW_HASKELL_LLVM__="++show n ]
_ -> []
getBackendDefs _ =
return []
hsSourceCppOpts :: [String]
-- Default CPP defines in Haskell source
hsSourceCppOpts =
[ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
-- ---------------------------------------------------------------------------
-- join object files into a single relocatable object file, using ld -r
joinObjectFiles :: DynFlags -> [FilePath] -> FilePath -> IO ()
joinObjectFiles dflags o_files output_fn = do
let mySettings = settings dflags
ldIsGnuLd = sLdIsGnuLd mySettings
osInfo = platformOS (targetPlatform dflags)
ld_r args cc = SysTools.runLink dflags ([
SysTools.Option "-nostdlib",
SysTools.Option "-Wl,-r"
]
++ (if any (cc ==) [Clang, AppleClang, AppleClang51]
then []
else [SysTools.Option "-nodefaultlibs"])
++ (if osInfo == OSFreeBSD
then [SysTools.Option "-L/usr/lib"]
else [])
-- gcc on sparc sets -Wl,--relax implicitly, but
-- -r and --relax are incompatible for ld, so
-- disable --relax explicitly.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
&& ldIsGnuLd
then [SysTools.Option "-Wl,-no-relax"]
else [])
++ map SysTools.Option ld_build_id
++ [ SysTools.Option "-o",
SysTools.FileOption "" output_fn ]
++ args)
-- suppress the generation of the .note.gnu.build-id section,
-- which we don't need and sometimes causes ld to emit a
-- warning:
ld_build_id | sLdSupportsBuildId mySettings = ["-Wl,--build-id=none"]
| otherwise = []
ccInfo <- getCompilerInfo dflags
if ldIsGnuLd
then do
script <- newTempName dflags "ldscript"
cwd <- getCurrentDirectory
let o_files_abs = map (cwd </>) o_files
writeFile script $ "INPUT(" ++ unwords o_files_abs ++ ")"
ld_r [SysTools.FileOption "" script] ccInfo
else if sLdSupportsFilelist mySettings
then do
filelist <- newTempName dflags "filelist"
writeFile filelist $ unlines o_files
ld_r [SysTools.Option "-Wl,-filelist",
SysTools.FileOption "-Wl," filelist] ccInfo
else do
ld_r (map (SysTools.FileOption "") o_files) ccInfo
-- -----------------------------------------------------------------------------
-- Misc.
-- | What phase to run after one of the backend code generators has run
hscPostBackendPhase :: DynFlags -> HscSource -> HscTarget -> Phase
hscPostBackendPhase _ HsBootFile _ = StopLn
hscPostBackendPhase dflags _ hsc_lang =
case hsc_lang of
HscC -> HCc
HscAsm | gopt Opt_SplitObjs dflags -> Splitter
| otherwise -> As False
HscLlvm -> LlvmOpt
HscNothing -> StopLn
HscInterpreted -> StopLn
touchObjectFile :: DynFlags -> FilePath -> IO ()
touchObjectFile dflags path = do
createDirectoryIfMissing True $ takeDirectory path
SysTools.touch dflags "Touching object file" path
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
| holzensp/ghc | compiler/main/DriverPipeline.hs | bsd-3-clause | 97,028 | 0 | 31 | 33,916 | 15,933 | 8,042 | 7,891 | 1,383 | 44 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Rules.IT
( rules
) where
import Duckling.Dimensions.Types
import qualified Duckling.Duration.IT.Rules as Duration
import qualified Duckling.Email.IT.Rules as Email
import qualified Duckling.Numeral.IT.Rules as Numeral
import qualified Duckling.Ordinal.IT.Rules as Ordinal
import qualified Duckling.Temperature.IT.Rules as Temperature
import qualified Duckling.Time.IT.Rules as Time
import qualified Duckling.TimeGrain.IT.Rules as TimeGrain
import qualified Duckling.Volume.IT.Rules as Volume
import Duckling.Types
rules :: Some Dimension -> [Rule]
rules (This Distance) = []
rules (This Duration) = Duration.rules
rules (This Numeral) = Numeral.rules
rules (This Email) = Email.rules
rules (This AmountOfMoney) = []
rules (This Ordinal) = Ordinal.rules
rules (This PhoneNumber) = []
rules (This Quantity) = []
rules (This RegexMatch) = []
rules (This Temperature) = Temperature.rules
rules (This Time) = Time.rules
rules (This TimeGrain) = TimeGrain.rules
rules (This Url) = []
rules (This Volume) = Volume.rules
| rfranek/duckling | Duckling/Rules/IT.hs | bsd-3-clause | 1,397 | 0 | 7 | 196 | 350 | 207 | 143 | 29 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Numeral.CS.Corpus
( corpus ) where
import Data.String
import Prelude
import Duckling.Lang
import Duckling.Numeral.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {lang = CS}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (NumeralValue 0)
[ "0"
, "nula"
]
, examples (NumeralValue 1)
[ "1"
, "jeden"
, "jedna"
, "jedno"
]
, examples (NumeralValue 2)
[ "dva"
, "dvĕ"
]
, examples (NumeralValue 3)
[ "tři"
]
, examples (NumeralValue 4)
[ "čtyři"
]
]
| rfranek/duckling | Duckling/Numeral/CS/Corpus.hs | bsd-3-clause | 1,094 | 0 | 9 | 353 | 190 | 114 | 76 | 28 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Is_Point_In_Path where
import Graphics.Blank
import Wiki -- (400,400)
main :: IO ()
main = blankCanvas 3000 $ \ context -> do
send context $ do
strokeStyle "blue";
beginPath();
rect(100,100,200,200)
cmds <- sequence [ do
b <- isPointInPath (x,y)
return $ do
beginPath()
fillStyle $ if b then "red" else "green"
arc(x, y, 5, 0, pi*2, False)
fill()
| x <- take 8 [25,25+50..]
, y <- take 8 [25,25+50..]
]
stroke()
-- Now draw the points
sequence_ cmds
wiki $ snapShot context "images/Is_Point_In_Path.png"
wiki $ close context
| ku-fpg/blank-canvas | wiki-suite/Is_Point_In_Path.hs | bsd-3-clause | 824 | 0 | 22 | 349 | 263 | 131 | 132 | 23 | 2 |
module ScrabbleScoreKata.Day8Spec (spec) where
import Test.Hspec
import ScrabbleScoreKata.Day8 (score)
spec :: Spec
spec = do
it "is zero when given an empty input" $ do
score "" `shouldBe` 0
it "is 1 when given lowercase 'a'" $ do
score "a" `shouldBe` 1
it "is 1 when given uppercase 'A'" $ do
score "A" `shouldBe` 1
it "is 4 when given 'f'" $ do
score "f" `shouldBe` 4
it "is 2 when given the word 'at'" $ do
score "at" `shouldBe` 2
it "is 12 when given the word 'zoo'" $ do
score "zoo" `shouldBe` 12
it "is 6 when given the word 'street'" $ do
score "street" `shouldBe` 6
it "is 22 when given the word 'quirky'" $ do
score "quirky" `shouldBe` 22
it "is 41 when given the word 'OxyphenButazone'" $ do
score "OxyphenButazone" `shouldBe` 41
it "scores only english-like letters" $ do
score "pinata" `shouldBe` 8
score "piñata" `shouldBe` 7
| Alex-Diez/haskell-tdd-kata | old-katas/test/ScrabbleScoreKata/Day8Spec.hs | bsd-3-clause | 1,080 | 0 | 11 | 391 | 268 | 128 | 140 | 26 | 1 |
{-# LANGUAGE FlexibleContexts #-}
import Plots
import Plots.Axis
import Plots.Types hiding (B)
import Data.List
import Dataset
import Diagrams.Prelude
import Diagrams.Backend.Rasterific
mydata1 = [(1,3), (2,5.5), (3.2, 6), (3.5, 6.1)]
mydata2 = mydata1 & each . _1 *~ 0.5
mydata3 = [V2 1.2 2.7, V2 2 5.1, V2 3.2 2.6, V2 3.5 5]
myaxis :: Axis B V2 Double
myaxis = r2Axis &~ do
linerangevPlotL "vertical linerange" (1,3) 1
errorbarvPlotwithPoint (3.2, 6) 0.2 0.4
errorbarhPlot (3.5, 6.1) 0.5 0.5
crossbarvPlotwithPoint (2, 1) 0.2 0.4
crossbarhPlot (1.2, 4) 0.2 0.4
boxplotvPlot (0.5, 1.2) 0.2 0.4 0.7
_LinePlot' :: Plotable (LinePlot v n) b => Traversal' (Plot' b v n) (LinePlot v n)
_LinePlot' = _Plot'
make :: Diagram B -> IO ()
make = renderRasterific "test.png" (mkWidth 600) . frame 20
main :: IO ()
main = make $ renderAxis myaxis
foo1 = ([(111.0,0.1),(140.0,1.2),(150.0,2.3)],"typeA")
foo2 = ([(155.0,3.5),(167.0,5.1),(200.0,6.4),(211.0,7.5)],"typeB")
foo3 = ([(191.0,5.8),(233.0,8.5),(250.0,9.1),(270.0,9.6)],"typeC")
| bergey/plots | examples/others.hs | bsd-3-clause | 1,113 | 8 | 9 | 236 | 527 | 283 | 244 | -1 | -1 |
{-#Language DeriveFunctor
, DeriveFoldable
, DeriveTraversable
#-}
module Language.TheExperiment.AST.Module where
import Text.Parsec.Pos
import Data.Foldable
import Data.Traversable
import Language.TheExperiment.AST.Statement
data Module a = Module SourcePos [Definition a]
deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
| jvranish/TheExperiment | src/Language/TheExperiment/AST/Module.hs | bsd-3-clause | 370 | 0 | 8 | 72 | 77 | 46 | 31 | 10 | 0 |
import Test.DocTest
main = doctest ["-isrc:console", "Main"] | garethrowlands/marsrover | src/Doctests.hs | bsd-3-clause | 61 | 0 | 6 | 7 | 20 | 11 | 9 | 2 | 1 |
-- Applicative parser for infix arithmetic expressions without any
-- dependency on hackage. Builds an explicit representation of the
-- syntax tree to fold over using client-supplied semantics.
module Spring13.Week5.Parser (parseExp) where
import Control.Applicative hiding (Const)
import Control.Arrow
import Data.Char
import Data.Monoid
import Data.List (foldl')
-- Building block of a computation with some state of type @s@
-- threaded through it, possibly resulting in a value of type @r@
-- along with some updated state.
newtype State s r = State (s -> Maybe (r, s))
-- Expressions
data Expr = Const Integer
| Add Expr Expr
| Mul Expr Expr
deriving Show
instance Functor (State s) where
fmap f (State g) = State $ fmap (first f) . g
instance Applicative (State s) where
pure x = State $ \s -> Just (x, s)
State f <*> State g = State $ \s ->
case f s of
Nothing -> Nothing
Just (r, s') -> fmap (first r) . g $ s'
instance Alternative (State s) where
empty = State $ const Nothing
State f <|> State g = State $ \s -> maybe (g s) Just (f s)
-- A parser threads some 'String' state through a computation that
-- produces some value of type @a@.
type Parser a = State String a
-- Parse one numerical digit.
digit :: Parser Integer
digit = State $ parseDigit
where parseDigit [] = Nothing
parseDigit s@(c:cs)
| isDigit c = Just (fromIntegral $ digitToInt c, cs)
| otherwise = Nothing
-- Parse an integer. The integer may be prefixed with a negative sign.
num :: Parser Integer
num = maybe id (const negate) <$> optional (char '-') <*> (toInteger <$> some digit)
where toInteger = foldl' ((+) . (* 10)) 0
-- Parse a single white space character.
space :: Parser ()
space = State $ parseSpace
where parseSpace [] = Nothing
parseSpace s@(c:cs)
| isSpace c = Just ((), cs)
| otherwise = Nothing
-- Consume zero or more white space characters.
eatSpace :: Parser ()
eatSpace = const () <$> many space
-- Parse a specific character.
char :: Char -> Parser Char
char c = State parseChar
where parseChar [] = Nothing
parseChar (x:xs) | x == c = Just (c, xs)
| otherwise = Nothing
-- Parse one of our two supported operator symbols.
op :: Parser (Expr -> Expr -> Expr)
op = const Add <$> (char '+') <|> const Mul <$> (char '*')
-- Succeed only if the end of the input has been reached.
eof :: Parser ()
eof = State parseEof
where parseEof [] = Just ((),[])
parseEof _ = Nothing
-- Parse an infix arithmetic expression consisting of integers, plus
-- signs, multiplication signs, and parentheses.
parseExpr :: Parser Expr
parseExpr = eatSpace *>
((buildOp <$> nonOp <*> (eatSpace *> op) <*> parseExpr) <|> nonOp)
where buildOp x op y = x `op` y
nonOp = char '(' *> parseExpr <* char ')' <|> Const <$> num
-- Run a parser over a 'String' returning the parsed value and the
-- remaining 'String' data.
execParser :: Parser a -> String -> Maybe (a, String)
execParser (State f) = f
-- Run a parser over a 'String' returning the parsed value.
evalParser :: Parser a -> String -> Maybe a
evalParser = (fmap fst .) . execParser
-- Parse an arithmetic expression using the supplied semantics for
-- integral constants, addition, and multiplication.
parseExp :: (Integer -> a) -> (a -> a -> a) -> (a -> a -> a) -> String -> Maybe a
parseExp con add mul = (convert <$>) . evalParser (parseExpr <* eof)
where convert (Const x) = con x
convert (Add x y) = add (convert x) (convert y)
convert (Mul x y) = mul (convert x) (convert y)
| bibaijin/cis194 | src/Spring13/Week5/Parser.hs | bsd-3-clause | 3,758 | 0 | 15 | 977 | 1,144 | 598 | 546 | 65 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeOperators #-}
-----------------------------------------------------------------------------
-- |
-- Module : Servant.Matlab
-- License : BSD3
-- Maintainer : Greg Hale <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- Generating Matlab code to query your APIs
--
-- Using this package is very simple. Say you have this API type around:
--
-- > type API = "users" :> Get '[JSON] [Users]
-- > :<|> "messages" :> Get '[JSON] [Message]
--
-- All you need to do to generate the Javascript code is to write a 'Proxy'
-- for this API type:
--
-- > api :: Proxy API
-- > api = Proxy
--
--
-- @
-- matlab :: String
-- matlab = 'matlabForAPI' api defaultOptions
-- @
--
-- That's it! If you want to write that code to a file:
--
-- @
-- writeMatlabCode :: IO ()
-- writeMatlabCode = 'writeMatlabForAPI' api defaultOptions "./my_api.m"
-- @
--
-- -- TODO change this section
-- If you want to customize the rendering options, take a look
-- at 'CommonGeneratorOptions' which are generic options common to all the
-- generators. the /xxxWith/ variants all take 'CommonGeneratorOptions' whereas
-- the /xxx/ versions use 'defCommonGeneratorOptions'. Once you have some custom
--
-- > myOptions :: 'CommonGeneratorOptions'
--
-- All you need to do to use it is to use 'vanillaJSWith' and pass it @myOptions@.
--
-- @
-- jsCodeWithMyOptions :: String
-- jsCodeWithMyOptions = 'jsForAPI' api ('vanillaJSWith' myOptions)
-- @
--
-- Follow the same pattern for any other generator.
--
-- /Note/: The Angular generators take an additional type of options,
-- namely 'AngularOptions', to let you tweak aspects of the code generation
-- that are specific to /Angular.js/.
module Servant.Matlab
( -- * Generating javascript code from an API type
matlabForAPI
, writeMatlabForAPI
, MatlabGenerator
, -- * Options common to all generators
CommonGeneratorOptions
, defCommonGeneratorOptions
, -- * Function renamers
concatCase
, snakeCase
, camelCase
, -- * Misc.
listFromAPI
, matlab
, GenerateList(..)
) where
import Data.Bool (bool)
import Data.Proxy
import Servant.Foreign
import Servant.Matlab.Internal
import System.Directory (createDirectory)
import System.FilePath ((</>))
-- | Generate the data necessary to generate javascript code
-- for all the endpoints of an API, as ':<|>'-separated values
-- of type 'AjaxReq'.
matlab :: HasForeign layout => Proxy layout -> Foreign layout
matlab p = foreignFor p defReq
-- | Directly generate all the javascript functions for your API
-- from a 'Proxy' for your API type. You can then write it to
-- a file or integrate it in a page, for example.
matlabForAPI :: (HasForeign api, GenerateList (Foreign api))
=> Proxy api
-- ^ proxy for your API type
-> MatlabGenerator
-- ^ matlab code generator to use
-> [(String, String)]
-- ^ a string that you can embed in your pages or write to a file
matlabForAPI p gen = gen (listFromAPI p)
-- | Directly generate all the javascript functions for your API
-- from a 'Proxy' for your API type using the given generator
-- and write the resulting code to a file at the given path.
writeMatlabForAPI :: (HasForeign api, GenerateList (Foreign api))
=> Proxy api
-- ^ proxy for your API type
-> MatlabGenerator
-- ^ matlab code generator to use
-> FilePath
-- ^ path to the file you want to write the resulting matlab code into
-> IO ()
writeMatlabForAPI p gen fp = do
mapM_ (\(f,c) -> writeFile (fp </> f) c) (matlabForAPI p gen)
-- | Utility class used by 'matlabForAPI' which computes
-- the data needed to generate a function for each endpoint
-- and hands it all back in a list.
class GenerateList reqs where
generateList :: reqs -> [AjaxReq]
instance GenerateList AjaxReq where
generateList r = [r]
instance (GenerateList start, GenerateList rest) => GenerateList (start :<|> rest) where
generateList (start :<|> rest) = (generateList start) ++ (generateList rest)
-- | Generate the necessary data for JS codegen as a list, each 'AjaxReq'
-- describing one endpoint from your API type.
listFromAPI :: (HasForeign api, GenerateList (Foreign api)) => Proxy api -> [AjaxReq]
listFromAPI p = generateList (matlab p)
| imalsogreg/servant-matlab | src/Servant/Matlab.hs | bsd-3-clause | 4,665 | 0 | 12 | 1,130 | 529 | 323 | 206 | 45 | 1 |
{-# LANGUAGE RecordWildCards #-}
module System.IO.Streams.Realtime where
------------------------------------------------------------------------------
import Control.Concurrent (threadDelay)
import Control.Monad (when,(>=>))
import Data.Time as Time
import System.Random
------------------------------------------------------------------------------
import System.IO.Streams (InputStream)
import qualified System.IO.Streams as Streams
import System.IO.Streams.Realtime.Internal (TimeOpts(..), runOpts)
------------------------------------------------------------------------------
atTimes :: InputStream a -> InputStream UTCTime -> IO (InputStream a)
atTimes = Streams.zipWithM returnAt
------------------------------------------------------------------------------
atTimes' :: TimeOpts
-> InputStream a
-> InputStream UTCTime
-> IO (InputStream a)
atTimes' opt inS tS = do
stampedStream <- Streams.zip inS tS
runOpts opt (\_ a -> return $ snd a) stampedStream >>= Streams.map fst
------------------------------------------------------------------------------
advance :: Double -> TimeOpts
advance dt = TimeOpts (realToFrac $ dt - 1)
(return . addUTCTime (realToFrac $ -1*dt))
------------------------------------------------------------------------------
delay :: Double -> TimeOpts
delay dt = TimeOpts (realToFrac $ dt + 1)
(return . addUTCTime (realToFrac dt))
------------------------------------------------------------------------------
compress :: UTCTime -> Double -> TimeOpts
compress t0 x = TimeOpts 1
(\t -> let dt = diffUTCTime t t0
in return $ addUTCTime (dt / realToFrac x) t0)
{-
(\f ->
(\t0 -> do
t' <- f t0
let dt' = diffUTCTime t' t0 / realToFrac x :: NominalDiffTime
return $ addUTCTime dt' t0))
-}
------------------------------------------------------------------------------
jitter :: Double -> TimeOpts -- TODO Fix this up
jitter stDev = TimeOpts (realToFrac (5 * stDev))
(\t0 -> do dt <- randomRIO (-2*stDev, 2*stDev)
return $ addUTCTime (realToFrac dt) t0)
{- (\f t0 -> do dt <- randomRIO (-2*stDev,2*stDev)
return $ addUTCTime (realToFrac dt) t
)-}
------------------------------------------------------------------------------
steady :: Double -> InputStream a -> IO (InputStream a)
steady rate inStream = do
t0 <- getCurrentTime
releaseTimes <- Streams.fromList
[addUTCTime (realToFrac $ n/rate) t0 | n <- [0..]]
atTimes inStream releaseTimes
------------------------------------------------------------------------------
returnAt :: a -> UTCTime -> IO a
returnAt a t = do
tNow <- getCurrentTime
let dt = diffUTCTime t tNow
when (dt > 0) $ threadDelay (floor $ dt * 1e6)
return a
| imalsogreg/realtime-streams | src/System/IO/Streams/Realtime.hs | bsd-3-clause | 3,100 | 0 | 14 | 796 | 670 | 350 | 320 | 44 | 1 |
{-
SockeyeASTMeta.hs: AST metadata for Sockeye
Part of Sockeye
Copyright (c) 2018, ETH Zurich.
All rights reserved.
This file is distributed under the terms in the attached LICENSE file.
If you do not find this file, copies can be found by writing to:
ETH Zurich D-INFK, CAB F.78, Universitaetstrasse 6, CH-8092 Zurich,
Attn: Systems Group.
-}
module SockeyeASTMeta where
import Data.List (intercalate)
import Text.Parsec.Pos
newtype ASTMeta = ParserMeta SourcePos
deriving (Eq, Ord)
instance Show ASTMeta where
show (ParserMeta pos) = intercalate ":" [sourceName pos, show $ sourceLine pos, show $ sourceColumn pos]
class MetaAST a where
meta :: a -> ASTMeta
| kishoredbn/barrelfish | tools/sockeye/SockeyeASTMeta.hs | mit | 741 | 0 | 9 | 182 | 111 | 60 | 51 | 9 | 0 |
{-# OPTIONS_GHC -F -pgmF htfpp -fno-warn-missing-signatures #-}
module MultiTrieTest where
import Prelude hiding (null, repeat, map)
import Data.MultiTrie
import Data.Int
import qualified Data.Map as M
import qualified Data.List as L
import Test.Framework
{-# ANN module "HLint: ignore Use camelCase" #-}
type TestMultiTrie = MultiTrie Char Int8
-- | properties of the empty MT
test_empty =
do
assertBool (L.null $ values u)
assertBool (M.null $ children u)
assertEqual 0 (size u)
assertBool (null u)
assertEqual u v
assertBool (null v)
assertEqual u w
assertBool (null w)
assertEqual u x
assertBool (null x)
assertEqual u y
assertBool (null y)
assertEqual u z
assertBool (null z)
assertEqual u t
assertBool (null t)
where
u = empty :: TestMultiTrie
v = leaf []
w = union u u
x = intersection u u
y = subnode "abc" u
z = subnodeReplace "abc" u u
t = fromList []
-- | properties of the singleton MT
test_singleton =
do
assertEqual (values u) [x]
assertBool (M.null $ children u)
assertBool (not $ null u)
assertEqual 1 (size u)
assertEqual u (fromList [("", x)])
assertEqual u (addValue x empty)
assertEqual u (union empty u)
assertEqual u (intersection u u)
assertBool (null $ subnode "abc" u)
assertEqual (subnodeDelete "" u) empty
assertEqual u (subnodeDelete "abc" u)
where
u = singleton x :: TestMultiTrie
x = 0
-- | properties of a leaf MT
test_leaf =
do
assertEqual l (values u)
assertBool (M.null $ children u)
assertEqual (length l) (size u)
assertEqual u (foldr addValue (empty :: TestMultiTrie) l)
assertEqual u (fromList $ L.map (\a -> ("", a)) l)
assertEqual (leaf $ 0 : l) (addValue 0 u)
assertEqual u (intersection u u)
assertEqual u (intersection u $ leaf [0..20])
assertEqual u (union empty u)
assertEqual u (union (leaf [1..5]) (leaf [6..10]))
assertEqual u (subnodeReplace "abc" empty u)
where
u = leaf l :: TestMultiTrie
l = [1..10]
-- | basic properties of a general case MT
test_general_basic =
do
assertBool (not $ null u)
assertEqual [0, 1, 2] (values u)
assertEqual ['a', 'b'] (M.keys $ children u)
assertEqual (length l) (size u)
assertEqual u (fromList $ q ++ p)
assertEqual u (subnode "" u)
assertEqual empty (subnode "zzz" u)
assertEqual (subnode "a" u) t
assertEqual u (subnodeDelete "zzz" u)
assertEqual v (subnodeDelete "a" u)
assertEqual u (subnodeReplace "a" t u)
assertEqual u (subnodeReplace "a" t v)
assertEqual u (union v w)
assertBool (u /= (union u u))
assertEqual empty (intersection v w)
assertEqual w (intersection u w)
assertEqual u (intersection u (union u u))
assertEqual y (map (+1) u)
assertEqual u (fromList $ toList u)
assertBool (listAsMultiSetEquals l $ toList u)
where
u = fromList l :: TestMultiTrie
v = fromList p
w = fromList q
t = fromList $ L.map (\(_:ns, x) -> (ns, x)) q
y = fromList $ L.map (\(ns, x) -> (ns, x + 1)) l
l = p ++ q
p = [("", 0), ("b", 9), ("", 1), ("b", 8), ("", 2), ("b", 7)]
q = [("a", 1), ("aa", 2), ("ab", 3), ("aaa", 4), ("aba", 5)]
-- | properties of an infinite MT
test_repeat =
do
assertBool (not $ null u)
assertEqual l (values u)
assertEqual s (M.keys $ children u)
assertEqual l (values v)
assertEqual s (M.keys $ children v)
assertEqual w (subnodeDelete "a" $ subnodeDelete "b" u)
assertEqual w (intersection w u)
assertEqual w (intersection u w)
where
u = repeat s l :: TestMultiTrie
v = subnode "baabbab" u
w = leaf l
l = [0, 1]
s = ['a', 'b']
-- | map a function over a multi-trie
test_mtmap =
do
assertEqual v (map f u)
assertEqual w (mapWithName g u)
where
u = fromList p :: TestMultiTrie
v = fromList q
w = fromList r
p = [("", 1), ("abc", 2), ("a", 3), ("", 4),
("ab", 5), ("b", 6), ("bc", 7)]
q = L.map (\(n, x) -> (n, f x)) p
r = L.map (\(n, x) -> (n, g n x)) p
f = (+7) . (*13)
g n x = (fromIntegral $ L.length n) + x
-- | union, intersection and cartesian product
test_binop =
do
assertEqual w (union u v)
assertEqual v (union empty v)
assertEqual u (union u empty)
assertEqual x (intersection u v)
assertBool (null $ intersection u empty)
assertBool (null $ intersection empty v)
assertEqual y (cartesian u v)
assertBool (null $ cartesian u empty)
assertBool (null $ cartesian empty v)
assertEqual u (map snd (cartesian z u))
assertEqual u (map fst (cartesian u z))
where
u = fromList p :: TestMultiTrie
v = fromList q
w = fromList (p ++ q)
x = fromList (L.intersect p q)
y = fromList (listProduct (toList u) (toList v))
z = leaf [()]
p = [("", 1), ("abc", 2), ("a", 3), ("", 4),
("ab", 5), ("b", 6), ("bc", 7)]
q = [("pqr", 9), ("ac", 8), ("bc", 7), ("", 6),
("", 4), ("abc", 3), ("abc", 2), ("p", 1)]
test_flatten =
do
assertEqual u (flatten v)
where
u = fromList p :: TestMultiTrie
v = fromList q
p = [(n1 ++ n2, x2) | (n1, l1) <- r, (n2, x2) <- l1]
q = L.map (\(n, l) -> (n, fromList l)) r
r = [
("", [("", 0), ("ab", 1), ("abcba", 2), ("", 3), ("abc", 4)]),
("ab", [("c", 1), ("", 2), ("b", 3), ("cba", 4)]),
("abcb", []),
("abc", [("", 2), ("b", 1), ("ba", 0)])
]
listProduct l1 l2 = [(n1 ++ n2, (v1, v2)) | (n1, v1) <- l1, (n2, v2) <- l2]
| vadimvinnik/multi-trie | tests/MultiTrieTest.hs | mit | 6,168 | 0 | 13 | 2,128 | 2,643 | 1,381 | 1,262 | 159 | 1 |
-- | Open a window and get an OpenGL context.
module Window (UI(..), initGL, terminate, EventKey(..)) where
import Control.Concurrent.STM (TQueue, atomically, newTQueueIO, tryReadTQueue, writeTQueue)
import Prelude hiding (init)
import Control.Applicative
import Control.Monad (when)
import Data.IORef
import Data.Maybe (isNothing)
import Data.Set (Set)
import qualified Data.Set as S
import Data.Time.Clock
import Graphics.UI.GLFW
import Linear
import System.Directory (getCurrentDirectory, setCurrentDirectory)
import FRP.Elerea.Simple
data EventKey = EventKey !Window !Key !Int !KeyState !ModifierKeys
-- | Interface updates provided to the party responsible for
-- generating each frame.
data UI = UI {
window :: Window
-- ^ window handle
, timeStep :: Double
-- ^ Time in seconds since last frame
, keys :: TQueue EventKey
-- Stream of keyboard events
, keysPressed :: Set Key
-- ^ All keys currently pressed
, buttonsPressed :: Set MouseButton
-- ^ All mouse buttons currently pressed
, mousePos :: V2 Double
-- ^ Current mouse position
, windowSize :: V2 Int
-- ^ Current window size
}
keyCallback :: IORef (Set Key) -> KeyCallback
keyCallback keys _w k _ KeyState'Pressed _mods = modifyIORef' keys (S.insert k)
keyCallback keys _w k _ KeyState'Released _mods = modifyIORef' keys (S.delete k)
keyCallback _ _ _ _ _ _ = return ()
keyCallback' :: TQueue EventKey -> KeyCallback
keyCallback' tc win k sc ka mk = atomically $ writeTQueue tc $ EventKey win k sc ka mk
mbCallback :: IORef (Set MouseButton) -> MouseButtonCallback
mbCallback mbs _w b MouseButtonState'Pressed _ = modifyIORef' mbs (S.insert b)
mbCallback mbs _w b MouseButtonState'Released _ = modifyIORef' mbs (S.delete b)
mpCallback :: IORef (V2 Double) -> CursorPosCallback
mpCallback mp _w x y = writeIORef mp (V2 x y)
wsCallback :: IORef (V2 Int) -> WindowSizeCallback
wsCallback ws _w w h = writeIORef ws (V2 w h)
-- | @initGL windowTitle width height@ creates a window with the given
-- title and dimensions. The action returned presents a new frame (by
-- calling the user defined renderer and performing a buffer swap), producing
-- an updated snapshot of the user interface.
initGL :: String -> Int -> Int -> IO (IO UI)
initGL windowTitle width height =
do currDir <- getCurrentDirectory
setErrorCallback $ Just simpleErrorCallback
r <- init
when (not r) (error "Error initializing GLFW!")
windowHint $ WindowHint'ClientAPI ClientAPI'OpenGL
windowHint $ WindowHint'OpenGLForwardCompat True
windowHint $ WindowHint'OpenGLProfile OpenGLProfile'Core
windowHint $ WindowHint'ContextVersionMajor 3
windowHint $ WindowHint'ContextVersionMinor 2
m@(~(Just w)) <- createWindow width height windowTitle Nothing Nothing
when (isNothing m) (error "Couldn't create window!")
makeContextCurrent m
kbState <- newIORef S.empty
mbState <- newIORef S.empty
mpState <- getCursorPos w >>= newIORef . uncurry V2
wsState <- getWindowSize w >>= newIORef . uncurry V2
lastTick <- getCurrentTime >>= newIORef
keyEventsChan <- newTQueueIO :: IO (TQueue EventKey)
-- setKeyCallback w (Just $ keyCallback kbState)
setKeyCallback w (Just $ keyCallback' keyEventsChan)
setMouseButtonCallback w (Just $ mbCallback mbState)
setCursorPosCallback w (Just $ mpCallback mpState)
setWindowSizeCallback w (Just $ wsCallback wsState)
setCurrentDirectory currDir
return $ do
pollEvents
t <- getCurrentTime
dt <- realToFrac . diffUTCTime t <$> readIORef lastTick
writeIORef lastTick t
ui <- UI w dt keyEventsChan <$> readIORef kbState
<*> readIORef mbState
<*> readIORef mpState
<*> readIORef wsState
return ui
where simpleErrorCallback e s = putStrLn $ unwords [show e, show s]
| bgaster/blocks | Window.hs | mit | 3,987 | 0 | 15 | 900 | 1,052 | 523 | 529 | 82 | 1 |
{-# LANGUAGE CPP #-}
{-
Copyright (C) 2009 John MacFarlane <[email protected]>,
Anton van Straaten <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- Functions for content conversion.
-}
module Network.Gitit.ContentTransformer
(
-- * ContentTransformer runners
runPageTransformer
, runFileTransformer
-- * Gitit responders
, showRawPage
, showFileAsText
, showPage
, exportPage
, showHighlightedSource
, showFile
, preview
, applyPreCommitPlugins
-- * Cache support for transformers
, cacheHtml
, cachedHtml
-- * Content retrieval combinators
, rawContents
-- * Response-generating combinators
, textResponse
, mimeFileResponse
, mimeResponse
, exportPandoc
, applyWikiTemplate
-- * Content-type transformation combinators
, pageToWikiPandoc
, pageToPandoc
, pandocToHtml
, highlightSource
-- * Content or context augmentation combinators
, applyPageTransforms
, wikiDivify
, addPageTitleToPandoc
, addMathSupport
, addScripts
-- * ContentTransformer context API
, getFileName
, getPageName
, getLayout
, getParams
, getCacheable
-- * Pandoc and wiki content conversion support
, inlinesToURL
, inlinesToString
)
where
import qualified Control.Exception as E
import Control.Monad.State
import Control.Monad.Reader (ask)
import Data.Foldable (traverse_)
import Data.List (stripPrefix)
import Data.Maybe (isNothing, mapMaybe)
import Network.Gitit.Cache (lookupCache, cacheContents)
import Network.Gitit.Export (exportFormats)
import Network.Gitit.Framework hiding (uriPath)
import Network.Gitit.Layout
import Network.Gitit.Page (stringToPage)
import Network.Gitit.Server
import Network.Gitit.State
import Network.Gitit.Types
import Network.HTTP (urlDecode)
import Network.URI (isUnescapedInURI)
import Network.URL (encString)
import System.FilePath
import qualified Text.Pandoc.Builder as B
import Text.HTML.SanitizeXSS (sanitizeBalance)
import Text.Highlighting.Kate
import Text.Pandoc hiding (MathML, WebTeX, MathJax)
import Text.XHtml hiding ( (</>), dir, method, password, rev )
import Text.XHtml.Strict (stringToHtmlString)
#if MIN_VERSION_blaze_html(0,5,0)
import Text.Blaze.Html.Renderer.String as Blaze ( renderHtml )
#else
import Text.Blaze.Renderer.String as Blaze ( renderHtml )
#endif
import qualified Data.Text as T
import qualified Data.Set as Set
import qualified Data.ByteString as S (concat)
import qualified Data.ByteString.Char8 as SC (unpack)
import qualified Data.ByteString.Lazy as L (toChunks, fromChunks)
import qualified Data.FileStore as FS
import qualified Text.Pandoc as Pandoc
import Text.URI (parseURI, URI(..), uriQueryItems)
#if MIN_VERSION_pandoc(1,14,0)
import Text.Pandoc.Error (handleError)
#else
handleError :: Pandoc -> Pandoc
handleError = id
#endif
--
-- ContentTransformer runners
--
runPageTransformer :: ToMessage a
=> ContentTransformer a
-> GititServerPart a
runPageTransformer xform = withData $ \params -> do
page <- getPage
cfg <- getConfig
evalStateT xform Context{ ctxFile = pathForPage page (defaultExtension cfg)
, ctxLayout = defaultPageLayout{
pgPageName = page
, pgTitle = page
, pgPrintable = pPrintable params
, pgMessages = pMessages params
, pgRevision = pRevision params
, pgLinkToFeed = useFeed cfg }
, ctxCacheable = True
, ctxTOC = tableOfContents cfg
, ctxBirdTracks = showLHSBirdTracks cfg
, ctxCategories = []
, ctxMeta = [] }
runFileTransformer :: ToMessage a
=> ContentTransformer a
-> GititServerPart a
runFileTransformer xform = withData $ \params -> do
page <- getPage
cfg <- getConfig
evalStateT xform Context{ ctxFile = id page
, ctxLayout = defaultPageLayout{
pgPageName = page
, pgTitle = page
, pgPrintable = pPrintable params
, pgMessages = pMessages params
, pgRevision = pRevision params
, pgLinkToFeed = useFeed cfg }
, ctxCacheable = True
, ctxTOC = tableOfContents cfg
, ctxBirdTracks = showLHSBirdTracks cfg
, ctxCategories = []
, ctxMeta = [] }
-- | Converts a @ContentTransformer@ into a @GititServerPart@;
-- specialized to wiki pages.
-- runPageTransformer :: ToMessage a
-- => ContentTransformer a
-- -> GititServerPart a
-- runPageTransformer = runTransformer pathForPage
-- | Converts a @ContentTransformer@ into a @GititServerPart@;
-- specialized to non-pages.
-- runFileTransformer :: ToMessage a
-- => ContentTransformer a
-- -> GititServerPart a
-- runFileTransformer = runTransformer id
--
-- Gitit responders
--
-- | Responds with raw page source.
showRawPage :: Handler
showRawPage = runPageTransformer rawTextResponse
-- | Responds with raw source (for non-pages such as source
-- code files).
showFileAsText :: Handler
showFileAsText = runFileTransformer rawTextResponse
-- | Responds with rendered wiki page.
showPage :: Handler
showPage = runPageTransformer htmlViaPandoc
-- | Responds with page exported into selected format.
exportPage :: Handler
exportPage = runPageTransformer exportViaPandoc
-- | Responds with highlighted source code.
showHighlightedSource :: Handler
showHighlightedSource = runFileTransformer highlightRawSource
-- | Responds with non-highlighted source code.
showFile :: Handler
showFile = runFileTransformer (rawContents >>= mimeFileResponse)
-- | Responds with rendered page derived from form data.
preview :: Handler
preview = runPageTransformer $
liftM (filter (/= '\r') . pRaw) getParams >>=
contentsToPage >>=
pageToWikiPandoc >>=
pandocToHtml >>=
return . toResponse . renderHtmlFragment
-- | Applies pre-commit plugins to raw page source, possibly
-- modifying it.
applyPreCommitPlugins :: String -> GititServerPart String
applyPreCommitPlugins = runPageTransformer . applyPreCommitTransforms
--
-- Top level, composed transformers
--
-- | Responds with raw source.
rawTextResponse :: ContentTransformer Response
rawTextResponse = rawContents >>= textResponse
-- | Responds with a wiki page in the format specified
-- by the @format@ parameter.
exportViaPandoc :: ContentTransformer Response
exportViaPandoc = rawContents >>=
maybe mzero return >>=
contentsToPage >>=
pageToWikiPandoc >>=
exportPandoc
-- | Responds with a wiki page. Uses the cache when
-- possible and caches the rendered page when appropriate.
htmlViaPandoc :: ContentTransformer Response
htmlViaPandoc = cachedHtml `mplus`
(rawContents >>=
maybe mzero return >>=
contentsToPage >>=
handleRedirects >>=
either return
(pageToWikiPandoc >=>
addMathSupport >=>
pandocToHtml >=>
wikiDivify >=>
applyWikiTemplate >=>
cacheHtml))
-- | Responds with highlighted source code in a wiki
-- page template. Uses the cache when possible and
-- caches the rendered page when appropriate.
highlightRawSource :: ContentTransformer Response
highlightRawSource =
cachedHtml `mplus`
(updateLayout (\l -> l { pgTabs = [ViewTab,HistoryTab] }) >>
rawContents >>=
highlightSource >>=
applyWikiTemplate >>=
cacheHtml)
--
-- Cache support for transformers
--
-- | Caches a response (actually just the response body) on disk,
-- unless the context indicates that the page is not cacheable.
cacheHtml :: Response -> ContentTransformer Response
cacheHtml resp' = do
params <- getParams
file <- getFileName
cacheable <- getCacheable
cfg <- lift getConfig
when (useCache cfg && cacheable && isNothing (pRevision params) && not (pPrintable params)) $
lift $ cacheContents file $ S.concat $ L.toChunks $ rsBody resp'
return resp'
-- | Returns cached page if available, otherwise mzero.
cachedHtml :: ContentTransformer Response
cachedHtml = do
file <- getFileName
params <- getParams
cfg <- lift getConfig
if useCache cfg && not (pPrintable params) && isNothing (pRevision params)
then do mbCached <- lift $ lookupCache file
let emptyResponse = setContentType "text/html; charset=utf-8" . toResponse $ ()
maybe mzero (\(_modtime, contents) -> lift . ok $ emptyResponse{rsBody = L.fromChunks [contents]}) mbCached
else mzero
--
-- Content retrieval combinators
--
-- | Returns raw file contents.
rawContents :: ContentTransformer (Maybe String)
rawContents = do
params <- getParams
file <- getFileName
fs <- lift getFileStore
let rev = pRevision params
liftIO $ E.catch (liftM Just $ FS.retrieve fs file rev)
(\e -> if e == FS.NotFound then return Nothing else E.throwIO e)
--
-- Response-generating combinators
--
-- | Converts raw contents to a text/plain response.
textResponse :: Maybe String -> ContentTransformer Response
textResponse Nothing = mzero -- fail quietly if file not found
textResponse (Just c) = mimeResponse c "text/plain; charset=utf-8"
-- | Converts raw contents to a response that is appropriate with
-- a mime type derived from the page's extension.
mimeFileResponse :: Maybe String -> ContentTransformer Response
mimeFileResponse Nothing = error "Unable to retrieve file contents."
mimeFileResponse (Just c) =
mimeResponse c =<< lift . getMimeTypeForExtension . takeExtension =<< getFileName
mimeResponse :: Monad m
=> String -- ^ Raw contents for response body
-> String -- ^ Mime type
-> m Response
mimeResponse c mimeType =
return . setContentType mimeType . toResponse $ c
-- | Converts Pandoc to response using format specified in parameters.
exportPandoc :: Pandoc -> ContentTransformer Response
exportPandoc doc = do
params <- getParams
page <- getPageName
cfg <- lift getConfig
let format = pFormat params
case lookup format (exportFormats cfg) of
Nothing -> error $ "Unknown export format: " ++ format
Just writer -> lift (writer page doc)
-- | Adds the sidebar, page tabs, and other elements of the wiki page
-- layout to the raw content.
applyWikiTemplate :: Html -> ContentTransformer Response
applyWikiTemplate c = do
Context { ctxLayout = layout } <- get
lift $ formattedPage layout c
--
-- Content-type transformation combinators
--
-- | Converts Page to Pandoc, applies page transforms, and adds page
-- title.
pageToWikiPandoc :: Page -> ContentTransformer Pandoc
pageToWikiPandoc page' =
pageToWikiPandoc' page' >>= addPageTitleToPandoc (pageTitle page')
pageToWikiPandoc' :: Page -> ContentTransformer Pandoc
pageToWikiPandoc' = applyPreParseTransforms >=>
pageToPandoc >=> applyPageTransforms
-- | Converts source text to Pandoc using default page type.
pageToPandoc :: Page -> ContentTransformer Pandoc
pageToPandoc page' = do
modifyContext $ \ctx -> ctx{ ctxTOC = pageTOC page'
, ctxCategories = pageCategories page'
, ctxMeta = pageMeta page' }
return $ readerFor (pageFormat page') (pageLHS page') (pageText page')
-- | Detects if the page is a redirect page and handles accordingly. The exact
-- behaviour is as follows:
--
-- If the page is /not/ a redirect page (the most common case), then check the
-- referer to see if the client came to this page as a result of a redirect
-- from another page. If so, then add a notice to the messages to notify the
-- user that they were redirected from another page, and provide a link back
-- to the original page, with an extra parameter to disable redirection
-- (e.g., to allow the original page to be edited).
--
-- If the page /is/ a redirect page, then check the query string for the
-- @redirect@ parameter. This can modify the behaviour of the redirect as
-- follows:
--
-- 1. If the @redirect@ parameter is unset, then check the referer to see if
-- client came to this page as a result of a redirect from another page. If
-- so, then do not redirect, and add a notice to the messages explaining
-- that this page is a redirect page, that would have redirected to the
-- destination given in the metadata (and provide a link thereto), but this
-- was stopped because a double-redirect was detected. This is a simple way
-- to prevent cyclical redirects and other abuses enabled by redirects.
-- redirect to the same page. If the client did /not/ come to this page as
-- a result of a redirect, then redirect back to the same page, except with
-- the redirect parameter set to @\"yes\"@.
--
-- 2. If the @redirect@ parameter is set to \"yes\", then redirect to the
-- destination specificed in the metadata. This uses a client-side (meta
-- refresh + javascript backup) redirect to make sure the referer is set to
-- this URL.
--
-- 3. If the @redirect@ parameter is set to \"no\", then do not redirect, but
-- add a notice to the messages that this page /would/ have redirected to
-- the destination given in the metadata had it not been disabled, and
-- provide a link to the destination given in the metadata. This behaviour
-- is the @revision@ parameter is present in the query string.
handleRedirects :: Page -> ContentTransformer (Either Response Page)
handleRedirects page = case lookup "redirect" (pageMeta page) of
Nothing -> isn'tRedirect
Just destination -> isRedirect destination
where
addMessage message = modifyContext $ \context -> context
{ ctxLayout = (ctxLayout context)
{ pgMessages = pgMessages (ctxLayout context) ++ [message]
}
}
redirectedFrom source = do
(url, html) <- processSource source
return $ concat
[ "Redirected from <a href=\""
, url
, "?redirect=no\" title=\"Go to original page\">"
, html
, "</a>"
]
doubleRedirect source destination = do
(url, html) <- processSource source
(url', html') <- processDestination destination
return $ concat
[ "This page normally redirects to <a href=\""
, url'
, "\" title=\"Continue to destination\">"
, html'
, "</a>, but as you were already redirected from <a href=\""
, url
, "?redirect=no\" title=\"Go to original page\">"
, html
, "</a>"
, ", this was stopped to prevent a double-redirect."
]
cancelledRedirect destination = do
(url', html') <- processDestination destination
return $ concat
[ "This page redirects to <a href=\""
, url'
, "\" title=\"Continue to destination\">"
, html'
, "</a>."
]
processSource source = do
base' <- getWikiBase
let url = stringToHtmlString $ base' ++ urlForPage source
let html = stringToHtmlString source
return (url, html)
processDestination destination = do
base' <- getWikiBase
let (page', fragment) = break (== '#') destination
let url = stringToHtmlString $ concat
[ base'
, urlForPage page'
, fragment
]
let html = stringToHtmlString page'
return (url, html)
getSource = do
cfg <- lift getConfig
base' <- getWikiBase
request <- askRq
return $ do
uri <- getHeader "referer" request >>= parseURI . SC.unpack
let params = uriQueryItems uri
redirect' <- lookup "redirect" params
guard $ redirect' == "yes"
path' <- stripPrefix (base' ++ "/") (uriPath uri)
let path'' = if null path' then frontPage cfg else urlDecode path'
guard $ isPage path''
return path''
withBody = setContentType "text/html; charset=utf-8" . toResponse
isn'tRedirect = do
getSource >>= traverse_ (redirectedFrom >=> addMessage)
return (Right page)
isRedirect destination = do
params <- getParams
case maybe (pRedirect params) (\_ -> Just False) (pRevision params) of
Nothing -> do
source <- getSource
case source of
Just source' -> do
doubleRedirect source' destination >>= addMessage
return (Right page)
Nothing -> fmap Left $ do
base' <- getWikiBase
let url' = concat
[ base'
, urlForPage (pageName page)
, "?redirect=yes"
]
lift $ seeOther url' $ withBody $ concat
[ "<!doctype html><html><head><title>307 Redirect"
, "</title></head><body><p>You are being <a href=\""
, stringToHtmlString url'
, "\">redirected</a>.</body></p></html>"
]
Just True -> fmap Left $ do
(url', html') <- processDestination destination
lift $ ok $ withBody $ concat
[ "<!doctype html><html><head><title>Redirecting to "
, html'
, "</title><meta http-equiv=\"refresh\" contents=\"0; url="
, url'
, "\" /><script type=\"text/javascript\">window.location=\""
, url'
, "\"</script></head><body><p>Redirecting to <a href=\""
, url'
, "\">"
, html'
, "</a>...</p></body></html>"
]
Just False -> do
cancelledRedirect destination >>= addMessage
return (Right page)
-- | Converts contents of page file to Page object.
contentsToPage :: String -> ContentTransformer Page
contentsToPage s = do
cfg <- lift getConfig
pn <- getPageName
return $ stringToPage cfg pn s
-- | Converts pandoc document to HTML.
pandocToHtml :: Pandoc -> ContentTransformer Html
pandocToHtml pandocContents = do
base' <- lift getWikiBase
toc <- liftM ctxTOC get
bird <- liftM ctxBirdTracks get
cfg <- lift getConfig
return $ primHtml $ T.unpack .
(if xssSanitize cfg then sanitizeBalance else id) . T.pack $
writeHtmlString def{
writerStandalone = True
, writerTemplate = "$if(toc)$<div id=\"TOC\">\n$toc$\n</div>\n$endif$\n$body$"
, writerHTMLMathMethod =
case mathMethod cfg of
MathML -> Pandoc.MathML Nothing
WebTeX u -> Pandoc.WebTeX u
MathJax u -> Pandoc.MathJax u
_ -> JsMath (Just $ base' ++
"/js/jsMath/easy/load.js")
, writerTableOfContents = toc
, writerHighlight = True
, writerExtensions = if bird
then Set.insert
Ext_literate_haskell
$ writerExtensions def
else writerExtensions def
-- note: javascript obfuscation gives problems on preview
, writerEmailObfuscation = ReferenceObfuscation
} pandocContents
-- | Returns highlighted source code.
highlightSource :: Maybe String -> ContentTransformer Html
highlightSource Nothing = mzero
highlightSource (Just source) = do
file <- getFileName
let formatOpts = defaultFormatOpts { numberLines = True, lineAnchors = True }
case languagesByExtension $ takeExtension file of
[] -> mzero
(l:_) -> return $ primHtml $ Blaze.renderHtml
$ formatHtmlBlock formatOpts
$! highlightAs l $ filter (/='\r') source
--
-- Plugin combinators
--
getPageTransforms :: ContentTransformer [Pandoc -> PluginM Pandoc]
getPageTransforms = liftM (mapMaybe pageTransform) $ queryGititState plugins
where pageTransform (PageTransform x) = Just x
pageTransform _ = Nothing
getPreParseTransforms :: ContentTransformer [String -> PluginM String]
getPreParseTransforms = liftM (mapMaybe preParseTransform) $
queryGititState plugins
where preParseTransform (PreParseTransform x) = Just x
preParseTransform _ = Nothing
getPreCommitTransforms :: ContentTransformer [String -> PluginM String]
getPreCommitTransforms = liftM (mapMaybe preCommitTransform) $
queryGititState plugins
where preCommitTransform (PreCommitTransform x) = Just x
preCommitTransform _ = Nothing
-- | @applyTransform a t@ applies the transform @t@ to input @a@.
applyTransform :: a -> (a -> PluginM a) -> ContentTransformer a
applyTransform inp transform = do
context <- get
conf <- lift getConfig
user <- lift getLoggedInUser
fs <- lift getFileStore
req <- lift askRq
let pluginData = PluginData{ pluginConfig = conf
, pluginUser = user
, pluginRequest = req
, pluginFileStore = fs }
(result', context') <- liftIO $ runPluginM (transform inp) pluginData context
put context'
return result'
-- | Applies all the page transform plugins to a Pandoc document.
applyPageTransforms :: Pandoc -> ContentTransformer Pandoc
applyPageTransforms c = do
xforms <- getPageTransforms
foldM applyTransform c (wikiLinksTransform : xforms)
-- | Applies all the pre-parse transform plugins to a Page object.
applyPreParseTransforms :: Page -> ContentTransformer Page
applyPreParseTransforms page' = getPreParseTransforms >>= foldM applyTransform (pageText page') >>=
(\t -> return page'{ pageText = t })
-- | Applies all the pre-commit transform plugins to a raw string.
applyPreCommitTransforms :: String -> ContentTransformer String
applyPreCommitTransforms c = getPreCommitTransforms >>= foldM applyTransform c
--
-- Content or context augmentation combinators
--
-- | Puts rendered page content into a wikipage div, adding
-- categories.
wikiDivify :: Html -> ContentTransformer Html
wikiDivify c = do
categories <- liftM ctxCategories get
base' <- lift getWikiBase
let categoryLink ctg = li (anchor ! [href $ base' ++ "/_category/" ++ ctg] << ctg)
let htmlCategories = if null categories
then noHtml
else thediv ! [identifier "categoryList"] << ulist << map categoryLink categories
return $ thediv ! [identifier "wikipage"] << [c, htmlCategories]
-- | Adds page title to a Pandoc document.
addPageTitleToPandoc :: String -> Pandoc -> ContentTransformer Pandoc
addPageTitleToPandoc title' (Pandoc _ blocks) = do
updateLayout $ \layout -> layout{ pgTitle = title' }
return $ if null title'
then Pandoc nullMeta blocks
else Pandoc (B.setMeta "title" (B.str title') nullMeta) blocks
-- | Adds javascript links for math support.
addMathSupport :: a -> ContentTransformer a
addMathSupport c = do
conf <- lift getConfig
updateLayout $ \l ->
case mathMethod conf of
JsMathScript -> addScripts l ["jsMath/easy/load.js"]
MathML -> addScripts l ["MathMLinHTML.js"]
WebTeX _ -> l
MathJax u -> addScripts l [u]
RawTeX -> l
return c
-- | Adds javascripts to page layout.
addScripts :: PageLayout -> [String] -> PageLayout
addScripts layout scriptPaths =
layout{ pgScripts = scriptPaths ++ pgScripts layout }
--
-- ContentTransformer context API
--
getParams :: ContentTransformer Params
getParams = lift (withData return)
getFileName :: ContentTransformer FilePath
getFileName = liftM ctxFile get
getPageName :: ContentTransformer String
getPageName = liftM (pgPageName . ctxLayout) get
getLayout :: ContentTransformer PageLayout
getLayout = liftM ctxLayout get
getCacheable :: ContentTransformer Bool
getCacheable = liftM ctxCacheable get
-- | Updates the layout with the result of applying f to the current layout
updateLayout :: (PageLayout -> PageLayout) -> ContentTransformer ()
updateLayout f = do
ctx <- get
let l = ctxLayout ctx
put ctx { ctxLayout = f l }
--
-- Pandoc and wiki content conversion support
--
readerFor :: PageType -> Bool -> String -> Pandoc
readerFor pt lhs =
let defPS = def{ readerSmart = True
, readerExtensions = if lhs
then Set.insert Ext_literate_haskell
$ readerExtensions def
else readerExtensions def
, readerParseRaw = True
}
in handleError . case pt of
RST -> readRST defPS
Markdown -> readMarkdown defPS
#if MIN_VERSION_pandoc(1,14,0)
CommonMark -> readCommonMark defPS
#else
CommonMark -> error "CommonMark input requires pandoc 1.14"
#endif
LaTeX -> readLaTeX defPS
HTML -> readHtml defPS
Textile -> readTextile defPS
Org -> readOrg defPS
DocBook -> readDocBook defPS
MediaWiki -> readMediaWiki defPS
wikiLinksTransform :: Pandoc -> PluginM Pandoc
wikiLinksTransform pandoc
= do cfg <- liftM pluginConfig ask -- Can't use askConfig from Interface due to circular dependencies.
return (bottomUp (convertWikiLinks cfg) pandoc)
-- | Convert links with no URL to wikilinks.
convertWikiLinks :: Config -> Inline -> Inline
#if MIN_VERSION_pandoc(1,16,0)
convertWikiLinks cfg (Link attr ref ("", "")) | useAbsoluteUrls cfg =
Link attr ref ("/" </> baseUrl cfg </> inlinesToURL ref, "Go to wiki page")
convertWikiLinks _cfg (Link attr ref ("", "")) =
Link attr ref (inlinesToURL ref, "Go to wiki page")
#else
convertWikiLinks cfg (Link ref ("", "")) | useAbsoluteUrls cfg =
Link ref ("/" </> baseUrl cfg </> inlinesToURL ref, "Go to wiki page")
convertWikiLinks _cfg (Link ref ("", "")) =
Link ref (inlinesToURL ref, "Go to wiki page")
#endif
convertWikiLinks _cfg x = x
-- | Derives a URL from a list of Pandoc Inline elements.
inlinesToURL :: [Inline] -> String
inlinesToURL = encString False isUnescapedInURI . inlinesToString
-- | Convert a list of inlines into a string.
inlinesToString :: [Inline] -> String
inlinesToString = concatMap go
where go x = case x of
Str s -> s
Emph xs -> concatMap go xs
Strong xs -> concatMap go xs
Strikeout xs -> concatMap go xs
Superscript xs -> concatMap go xs
Subscript xs -> concatMap go xs
SmallCaps xs -> concatMap go xs
Quoted DoubleQuote xs -> '"' : (concatMap go xs ++ "\"")
Quoted SingleQuote xs -> '\'' : (concatMap go xs ++ "'")
Cite _ xs -> concatMap go xs
Code _ s -> s
Space -> " "
#if MIN_VERSION_pandoc(1,16,0)
SoftBreak -> " "
#endif
LineBreak -> " "
Math DisplayMath s -> "$$" ++ s ++ "$$"
Math InlineMath s -> "$" ++ s ++ "$"
RawInline (Format "tex") s -> s
RawInline _ _ -> ""
#if MIN_VERSION_pandoc(1,16,0)
Link _ xs _ -> concatMap go xs
Image _ xs _ -> concatMap go xs
#else
Link xs _ -> concatMap go xs
Image xs _ -> concatMap go xs
#endif
Note _ -> ""
Span _ xs -> concatMap go xs
| bergmannf/gitit | src/Network/Gitit/ContentTransformer.hs | gpl-2.0 | 29,756 | 0 | 27 | 9,057 | 5,384 | 2,815 | 2,569 | 507 | 21 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fr-FR">
<title>Python Scripting</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Rechercher</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/jython/src/main/javahelp/org/zaproxy/zap/extension/jython/resources/help_fr_FR/helpset_fr_FR.hs | apache-2.0 | 967 | 79 | 66 | 157 | 409 | 207 | 202 | -1 | -1 |
{-# LANGUAGE DeriveFoldable, DeriveFunctor, DeriveTraversable #-}
module Ermine.Core.Module
( Module(Module)
, definitions
, termExports
, instances
, types
, dataDecls
) where
import Control.Applicative
import Control.Lens
import Data.Binary as Binary
import Data.Bytes.Serial
import Data.ByteString
import Data.Foldable
import Data.Map
import Data.Serialize as Serialize
import Data.Void
import Ermine.Syntax.Data
import Ermine.Syntax.Global
import Ermine.Syntax.ModuleName
import Ermine.Syntax.Name
import Ermine.Syntax.Type
data Module a = Module
{ _name :: ModuleName
, _dependencies :: [ModuleName]
, _definitions :: [a]
, _termExports :: Map Global (Either Global Int)
, _instances :: Map ByteString Int
, _types :: Map Global (Type Void Void)
, _dataDecls :: [DataType Void Void]
} deriving (Eq,Show,Foldable,Functor,Traversable)
instance HasName (Module a) where
name = moduleName.name
instance HasModuleName (Module a) where
moduleName f m@Module{_name = nm} = f nm <&> \nm' -> m { _name = nm' }
dependencies :: Lens' (Module a) [ModuleName]
dependencies f (Module n deps defs ts is tys d) = f deps <&> \deps' -> Module n deps' defs ts is tys d
definitions :: Lens (Module a) (Module b) [a] [b]
definitions f (Module n deps defs ts is tys d) = f defs <&> \defs' -> Module n deps defs' ts is tys d
termExports :: Lens' (Module a) (Map Global (Either Global Int))
termExports f (Module n deps defs ts is tys d) = f ts <&> \ts' -> Module n deps defs ts' is tys d
instances :: Lens' (Module a) (Map ByteString Int)
instances f (Module n deps defs ts is tys d) = f is <&> \is' -> Module n deps defs ts is' tys d
types :: Lens' (Module a) (Map Global (Type Void Void))
types f (Module n deps defs ts is tys d) = f tys <&> \tys' -> Module n deps defs ts is tys' d
dataDecls :: Lens' (Module a) [DataType Void Void]
dataDecls f (Module n deps defs ts is tys d) = f d <&> \d' -> Module n deps defs ts is tys d'
instance Serial1 Module where
serializeWith f m =
serialize (m^.moduleName) >>
serialize (m^.dependencies) >>
serializeWith f (m^.definitions) >>
serialize (m^.termExports) >>
serialize (m^.instances) >>
serialize (m^.types) >>
serialize (m^.dataDecls)
deserializeWith g =
Module <$> deserialize <*> deserialize <*> deserializeWith g <*> deserialize <*> deserialize <*> deserialize <*> deserialize
instance Serial a => Serial (Module a) where
serialize = serializeWith serialize
deserialize = deserializeWith deserialize
instance Binary a => Binary (Module a) where
put = serializeWith Binary.put
get = deserializeWith Binary.get
instance Serialize a => Serialize (Module a) where
put = serializeWith Serialize.put
get = deserializeWith Serialize.get
| PipocaQuemada/ermine | src/Ermine/Core/Module.hs | bsd-2-clause | 2,792 | 0 | 14 | 552 | 1,074 | 562 | 512 | 70 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor.Compose
-- Copyright : (c) Ross Paterson 2010
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Composition of functors.
--
-- @since 4.9.0.0
-----------------------------------------------------------------------------
module Data.Functor.Compose (
Compose(..),
) where
import Data.Functor.Classes
import Control.Applicative
import Data.Data (Data)
import Data.Foldable (Foldable(foldMap))
import Data.Traversable (Traversable(traverse))
import GHC.Generics (Generic, Generic1)
infixr 9 `Compose`
-- | Right-to-left composition of functors.
-- The composition of applicative functors is always applicative,
-- but the composition of monads is not always a monad.
newtype Compose f g a = Compose { getCompose :: f (g a) }
deriving (Data, Generic, Generic1)
-- Instances of lifted Prelude classes
-- | @since 4.9.0.0
instance (Eq1 f, Eq1 g) => Eq1 (Compose f g) where
liftEq eq (Compose x) (Compose y) = liftEq (liftEq eq) x y
-- | @since 4.9.0.0
instance (Ord1 f, Ord1 g) => Ord1 (Compose f g) where
liftCompare comp (Compose x) (Compose y) =
liftCompare (liftCompare comp) x y
-- | @since 4.9.0.0
instance (Read1 f, Read1 g) => Read1 (Compose f g) where
liftReadsPrec rp rl = readsData $
readsUnaryWith (liftReadsPrec rp' rl') "Compose" Compose
where
rp' = liftReadsPrec rp rl
rl' = liftReadList rp rl
-- | @since 4.9.0.0
instance (Show1 f, Show1 g) => Show1 (Compose f g) where
liftShowsPrec sp sl d (Compose x) =
showsUnaryWith (liftShowsPrec sp' sl') "Compose" d x
where
sp' = liftShowsPrec sp sl
sl' = liftShowList sp sl
-- Instances of Prelude classes
-- | @since 4.9.0.0
instance (Eq1 f, Eq1 g, Eq a) => Eq (Compose f g a) where
(==) = eq1
-- | @since 4.9.0.0
instance (Ord1 f, Ord1 g, Ord a) => Ord (Compose f g a) where
compare = compare1
-- | @since 4.9.0.0
instance (Read1 f, Read1 g, Read a) => Read (Compose f g a) where
readsPrec = readsPrec1
-- | @since 4.9.0.0
instance (Show1 f, Show1 g, Show a) => Show (Compose f g a) where
showsPrec = showsPrec1
-- Functor instances
-- | @since 4.9.0.0
instance (Functor f, Functor g) => Functor (Compose f g) where
fmap f (Compose x) = Compose (fmap (fmap f) x)
-- | @since 4.9.0.0
instance (Foldable f, Foldable g) => Foldable (Compose f g) where
foldMap f (Compose t) = foldMap (foldMap f) t
-- | @since 4.9.0.0
instance (Traversable f, Traversable g) => Traversable (Compose f g) where
traverse f (Compose t) = Compose <$> traverse (traverse f) t
-- | @since 4.9.0.0
instance (Applicative f, Applicative g) => Applicative (Compose f g) where
pure x = Compose (pure (pure x))
Compose f <*> Compose x = Compose ((<*>) <$> f <*> x)
-- | @since 4.9.0.0
instance (Alternative f, Applicative g) => Alternative (Compose f g) where
empty = Compose empty
Compose x <|> Compose y = Compose (x <|> y)
| vTurbine/ghc | libraries/base/Data/Functor/Compose.hs | bsd-3-clause | 3,246 | 0 | 10 | 676 | 985 | 532 | 453 | 50 | 0 |
{-# LANGUAGE NamedFieldPuns #-}
-- | Pure functions for working with CompileState
module Fay.Compiler.State where
import Fay.Compiler.Misc
import Fay.Compiler.QName
import qualified Fay.Exts.NoAnnotation as N
import Fay.Types
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
import Language.Haskell.Names (sv_origName, Symbols (Symbols), SymValueInfo (SymValue, SymMethod, SymSelector, SymConstructor), OrigName, sv_typeName)
-- | Get all non local identifiers that should be exported in the JS module scope.
getNonLocalExportsWithoutNewtypes :: N.ModuleName -> CompileState -> Maybe (Set N.QName)
getNonLocalExportsWithoutNewtypes modName cs =
fmap ( S.filter (not . isLocal)
. S.map (origName2QName . sv_origName)
. S.filter (not . (`isNewtype` cs))
. (\(Symbols exports _) -> exports)
)
. M.lookup modName . stateInterfaces $ cs
where
isLocal = (Just modName ==) . qModName
getLocalExportsWithoutNewtypes :: N.ModuleName -> CompileState -> Maybe (Set N.QName)
getLocalExportsWithoutNewtypes modName cs =
fmap ( S.filter isLocal
. S.map (origName2QName . sv_origName)
. S.filter (not . (`isNewtype` cs))
. (\(Symbols exports _) -> exports)
)
. M.lookup modName . stateInterfaces $ cs
where
isLocal = (Just modName ==) . qModName
-- | Is this *resolved* name a new type constructor or destructor?
isNewtype :: SymValueInfo OrigName -> CompileState -> Bool
isNewtype s cs = case s of
SymValue{} -> False
SymMethod{} -> False
SymSelector { sv_typeName = tn } -> not . (`isNewtypeDest` cs) . origName2QName $ tn
SymConstructor { sv_typeName = tn } -> not . (`isNewtypeCons` cs) . origName2QName $ tn
-- | Is this *resolved* name a new type destructor?
isNewtypeDest :: N.QName -> CompileState -> Bool
isNewtypeDest o = any (\(_,mdest,_) -> mdest == Just o) . stateNewtypes
-- | Is this *resolved* name a new type constructor?
isNewtypeCons :: N.QName -> CompileState -> Bool
isNewtypeCons o = any (\(cons,_,_) -> cons == o) . stateNewtypes
-- | Add a ModulePath to CompileState, meaning it has been printed.
addModulePath :: ModulePath -> CompileState -> CompileState
addModulePath mp cs = cs { stateJsModulePaths = mp `S.insert` stateJsModulePaths cs }
-- | Has this ModulePath been added/printed?
addedModulePath :: ModulePath -> CompileState -> Bool
addedModulePath mp CompileState { stateJsModulePaths } = mp `S.member` stateJsModulePaths
-- | Find the type signature of a top level name
findTypeSig :: N.QName -> CompileState -> Maybe N.Type
findTypeSig n = M.lookup n . stateTypeSigs
| beni55/fay | src/Fay/Compiler/State.hs | bsd-3-clause | 2,774 | 0 | 15 | 622 | 729 | 408 | 321 | 42 | 4 |
-- |
-- Module : Crypto.Cipher.Types
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : Stable
-- Portability : Excellent
--
-- Symmetric cipher basic types
--
{-# LANGUAGE DeriveDataTypeable #-}
module Crypto.Cipher.Types
(
-- * Cipher classes
Cipher(..)
, BlockCipher(..)
, BlockCipher128(..)
, StreamCipher(..)
, DataUnitOffset
, KeySizeSpecifier(..)
-- , cfb8Encrypt
-- , cfb8Decrypt
-- * AEAD functions
, AEADMode(..)
, CCM_M(..)
, CCM_L(..)
, module Crypto.Cipher.Types.AEAD
-- * Initial Vector type and constructor
, IV
, makeIV
, nullIV
, ivAdd
-- * Authentification Tag
, AuthTag(..)
) where
import Crypto.Cipher.Types.Base
import Crypto.Cipher.Types.Block
import Crypto.Cipher.Types.Stream
import Crypto.Cipher.Types.AEAD
| vincenthz/cryptonite | Crypto/Cipher/Types.hs | bsd-3-clause | 878 | 0 | 5 | 210 | 139 | 103 | 36 | 22 | 0 |
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
module Haskell.Ide.Engine.Transport.JsonHttp.Undecidable where
import Data.Singletons.Prelude
import Haskell.Ide.Engine.PluginDescriptor
data ContextMappingFun :: (TyFun AcceptedContext [ParamDescType]) -> *
type instance Apply ContextMappingFun a = ContextMapping a
type family CommandParams cxts params :: [ParamDescType] where
CommandParams cxts params = ConcatMap ContextMappingFun cxts :++ params
| JPMoresmau/haskell-ide-engine | src/Haskell/Ide/Engine/Transport/JsonHttp/Undecidable.hs | bsd-3-clause | 567 | 0 | 8 | 67 | 97 | 61 | 36 | -1 | -1 |
--
-- Copyright © 2013-2015 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Description: Types representing OAuth2 clients
--
-- Types representing OAuth2 clients
module Network.OAuth2.Server.Types.Client (
-- * Types
ClientState,
ClientStatus,
ClientDetails(..),
-- * ByteString Encoding and Decoding
clientState,
clientStatus,
) where
import Control.Applicative ((<$>), (<*>))
import Control.Lens.Fold (preview, (^?))
import Control.Lens.Operators ((^.))
import Control.Lens.Prism (Prism', prism')
import Control.Lens.Review (re, review)
import Control.Monad (guard)
import Crypto.Scrypt (EncryptedPass (..))
import Data.Aeson (FromJSON (..),
ToJSON (..),
Value (String),
withText)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B (all, null)
import qualified Data.ByteString.Char8 as B (unpack)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as T (unpack)
import qualified Data.Text.Encoding as T (decodeUtf8,
encodeUtf8)
import Data.Typeable (Typeable)
import qualified Data.Vector as V
import Database.PostgreSQL.Simple.FromField
import Database.PostgreSQL.Simple.FromRow
import Database.PostgreSQL.Simple.ToField
import URI.ByteString (URI)
import Yesod.Core (PathPiece (..))
import Network.OAuth2.Server.Types.Auth
import Network.OAuth2.Server.Types.Common
import Network.OAuth2.Server.Types.Scope
--------------------------------------------------------------------------------
-- * Types
-- | Opaque value used by the client to maintain state between request and
-- response. Used to prevent cross-site request forgery as defined here:
--
-- https://tools.ietf.org/html/rfc6749#section-10.12
newtype ClientState = ClientState { unClientState :: ByteString }
deriving (Eq, Typeable)
-- | The activity status of the client.
--
-- Deleted clients do not show up in lookups and their tokens are invalid.
data ClientStatus = ClientActive
| ClientDeleted
deriving (Bounded, Enum, Eq, Typeable)
-- | Details relevant to a client.
data ClientDetails = ClientDetails
{ clientClientId :: ClientID -- ^ Unique identifier for client
, clientSecret :: EncryptedPass -- ^ Client secret
, clientConfidential :: Bool -- ^ Whether the client is confidential or not
, clientRedirectURI :: [RedirectURI] -- ^ The registered redirection URIs for the client
, clientName :: Text -- ^ The human readable name for the client
, clientDescription :: Text -- ^ The human readable description for the client
, clientAppUrl :: URI -- ^ The URL for the client application
, clientScope :: Scope -- ^ The scopes the client is registered for.
, clientActivity :: ClientStatus -- ^ Whether the client is active/deleted etc.
}
deriving (Eq, Show)
--------------------------------------------------------------------------------
-- * ByteString Encoding and Decoding
-- | Client state is an opaque non-empty value as defined here:
--
-- https://tools.ietf.org/html/rfc6749#appendix-A.5
--
-- state = 1*VSCHAR
clientState :: Prism' ByteString ClientState
clientState = prism' cs2b b2cs
where
cs2b = unClientState
b2cs b = do
guard . not $ B.null b
guard $ B.all vschar b
return (ClientState b)
-- | Simple prism for safe construction/deconstruction of client statuses for
-- all uses (Postgresql, HTTP, etc.)
clientStatus :: Prism' ByteString ClientStatus
clientStatus = prism' cs2b b2cs
where
cs2b ClientActive = "active"
cs2b ClientDeleted = "deleted"
b2cs b = case b of
"active" -> Just ClientActive
"deleted" -> Just ClientDeleted
_ -> Nothing
--------------------------------------------------------------------------------
-- String Encoding and Decoding
instance Show ClientState where
show = show . review clientState
instance Show ClientStatus where
show = B.unpack . review clientStatus
--------------------------------------------------------------------------------
-- Yesod Encoding and Decoding
instance PathPiece ClientState where
fromPathPiece t = T.encodeUtf8 t ^? clientState
toPathPiece cs = T.decodeUtf8 $ cs ^.re clientState
--------------------------------------------------------------------------------
-- Postgres Encoding and Decoding
instance FromField ClientState where
fromField f bs = do
s <- fromField f bs
case preview clientState s of
Nothing -> returnError ConversionFailed f "Unable to parse ClientState"
Just state -> return state
instance ToField ClientState where
toField x = toField $ x ^.re clientState
instance FromField ClientStatus where
fromField f bs = do
(s :: Text) <- fromField f bs
case s of
"active" -> return ClientActive
"deleted" -> return ClientDeleted
x -> returnError ConversionFailed f $ show x <> " is an invalid ClientStatus"
instance ToField ClientStatus where
toField = toField . show
instance FromRow ClientDetails where
fromRow = ClientDetails <$> field
<*> (EncryptedPass <$> field)
<*> field
<*> (V.toList <$> field)
<*> field
<*> field
<*> fieldWith fromFieldURI
<*> field
<*> field
--------------------------------------------------------------------------------
-- JSON/Aeson Encoding and Decoding
instance ToJSON ClientState where
toJSON c = String . T.decodeUtf8 $ c ^.re clientState
instance FromJSON ClientState where
parseJSON = withText "ClientState" $ \t ->
case T.encodeUtf8 t ^? clientState of
Nothing -> fail $ T.unpack t <> " is not a valid ClientState."
Just s -> return s
--------------------------------------------------------------------------------
| jasonzoladz/oauth2-server | lib/Network/OAuth2/Server/Types/Client.hs | bsd-3-clause | 7,280 | 0 | 15 | 2,299 | 1,127 | 654 | 473 | 111 | 4 |
{- |
Module : $Header$
Description : convert global annotations to a list of annotations
Copyright : (c) Carsten Fischer and Uni Bremen 2003-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Convert global annotations to a list of annotations
-}
module Common.ConvertGlobalAnnos
( mergeGlobalAnnos
, convertGlobalAnnos
, convertLiteralAnnos
, removeHetCASLprefixes
) where
import Common.Id
import Common.IRI
import Common.GlobalAnnotations
import Common.AS_Annotation
import qualified Common.Lib.Rel as Rel
import Common.AnalyseAnnos
import Common.Result
import Common.DocUtils
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List (partition, groupBy, sortBy)
import Data.Ord
import Data.Function (on)
instance Pretty GlobalAnnos where
pretty = printAnnotationList . convertGlobalAnnos . removeHetCASLprefixes
removeHetCASLprefixes :: GlobalAnnos -> GlobalAnnos
removeHetCASLprefixes ga = ga
{ prefix_map = Map.filter (not . null . iriScheme) $ prefix_map ga }
convertGlobalAnnos :: GlobalAnnos -> [Annotation]
convertGlobalAnnos ga = convertPrefixMap (prefix_map ga)
++ convertPrec (prec_annos ga)
++ convertAssoc (assoc_annos ga)
++ convertDispl (display_annos ga)
++ convertLiteralAnnos (literal_annos ga)
mergeGlobalAnnos :: GlobalAnnos -> GlobalAnnos -> Result GlobalAnnos
mergeGlobalAnnos ga1 = addGlobalAnnos ga1 . convertGlobalAnnos
convertPrec :: PrecedenceGraph -> [Annotation]
convertPrec pg =
let cs = Rel.sccOfClosure pg
in map (\ l -> let (f, r) = splitAt (div (length l) 2) l in
Prec_anno BothDirections f r nullRange)
(filter ((> 1) . length) $ map Set.toList cs)
++ map (\ l ->
Prec_anno Lower (map fst l) (Set.toList $ snd $ head l) nullRange)
(groupBy ((==) `on` snd)
$ sortBy (comparing snd)
$ Map.toList $ Rel.toMap
$ Rel.rmNullSets $ Rel.transReduce $ Rel.irreflex
$ Rel.collaps cs pg)
convertAssoc :: AssocMap -> [Annotation]
convertAssoc am =
let (i1s, i2s) = partition ((== ALeft) . snd) $ Map.toList am
-- [(Id,assEith)]
in [Assoc_anno ALeft (map fst i1s) nullRange | not $ null i1s]
++ [Assoc_anno ARight (map fst i2s) nullRange | not $ null i2s ]
convertDispl :: DisplayMap -> [Annotation]
convertDispl dm =
let m1 = Map.toList dm -- m1::[(Id,Map.Map Display_format [Token])]
toStrTup (x, y) = (x, concatMap tokStr y)
m2 = map (\ (x, m) -> (x, map toStrTup $ Map.toList m)) m1
-- m2::[(ID,[(Display_format,String)])]
in map (\ (i, x) -> Display_anno i x nullRange) m2
convertLiteralAnnos :: LiteralAnnos -> [Annotation]
convertLiteralAnnos la = let
str = case string_lit la of
Just (x, y) -> [String_anno x y nullRange]
_ -> []
lis = map (\ (br, (n, con)) -> List_anno br n con nullRange)
$ Map.toList $ list_lit la
number = case number_lit la of
Just x -> [Number_anno x nullRange]
_ -> []
flo = case float_lit la of
Just (a, b) -> [Float_anno a b nullRange]
_ -> []
in str ++ lis ++ number ++ flo
convertPrefixMap :: PrefixMap -> [Annotation]
convertPrefixMap pm =
if Map.null pm then [] else [Prefix_anno (Map.toList pm) nullRange]
| keithodulaigh/Hets | Common/ConvertGlobalAnnos.hs | gpl-2.0 | 3,437 | 0 | 19 | 819 | 1,058 | 558 | 500 | 72 | 4 |
module Prolog.Substitution where
import Prolog.Data
import Data.Map (Map)
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
import Control.Monad ( guard )
type Substitution = Map Identifier Term
apply :: Substitution -> Term -> Term
apply sub t = case t of
Variable v -> case M.lookup v sub of
Just t' -> t'
Nothing -> t
Apply f xs -> Apply f $ map ( apply sub ) xs
times :: Substitution
-> Substitution
-> Substitution
times s t = M.unions
[ chained s t
, s `without` M.keysSet t
, t `without` M.keysSet s
]
f `without` ks =
M.filterWithKey ( \ k v -> S.notMember k ks ) f
chained s t = M.fromList $ do
( k, v ) <- M.toList s
let w = apply t v
guard $ v /= Variable k
return ( k, w )
| Erdwolf/autotool-bonn | src/Prolog/Substitution.hs | gpl-2.0 | 806 | 0 | 11 | 234 | 334 | 174 | 160 | 28 | 3 |
-----------------------------------------------------------------------------
-- Dummy module to import all of the Hugs libraries; programmers should
-- normally be more selective than this when it comes to specifying the
-- modules that a particular program depends on.
--
-- Suitable for use with Hugs 98
-----------------------------------------------------------------------------
module HugsLibs where
import StdLibs
import Trace
import Number
import ParseLib
import Interact
import AnsiScreen
import AnsiInteract
import IOExtensions
import ListUtils
import Dynamic
-----------------------------------------------------------------------------
| OS2World/DEV-UTIL-HUGS | oldlib/HugsLibs.hs | bsd-3-clause | 652 | 0 | 3 | 70 | 42 | 31 | 11 | 11 | 0 |
module SortTest where
import Util.Sort
import Test.HUnit
tests = test [ "test1" ~: "quicksort [0]," ~: [0] ~=? (quicksort [0]),
"test2" ~: "quicksort [1,0,2]," ~: [0,1,2] ~=? (quicksort [1,0,2]) ]
-- test1 = TestCase (assertEqual "quicksort [0]," [0] (quicksort [0]))
-- test2 = TestCase (assertEqual "quicksort [1,0,2]," [0,1,2] (quicksort [1,0,2]))
-- tests = TestList [TestLabel "test1" test1, TestLabel "test2" test2]
| tohosokawa/haskell-dev-env | Test/SortTest.hs | bsd-3-clause | 440 | 0 | 10 | 78 | 91 | 54 | 37 | 5 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="da-DK">
<title>SVN Digger Files</title>
<maps>
<homeID>svndigger</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/svndigger/src/main/javahelp/help_da_DK/helpset_da_DK.hs | apache-2.0 | 967 | 77 | 66 | 157 | 409 | 207 | 202 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>DOM XSS Active Scan Rule | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/domxss/src/main/javahelp/org/zaproxy/zap/extension/domxss/resources/help_es_ES/helpset_es_ES.hs | apache-2.0 | 985 | 78 | 66 | 162 | 419 | 212 | 207 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Test suite for "Stack.Solver"
module Stack.SolverSpec where
import Data.Text (unpack)
import Stack.Prelude
import Stack.Types.FlagName
import Stack.Types.PackageName
import Stack.Types.Version
import Test.Hspec
import qualified Data.Map as Map
import Stack.Solver
spec :: Spec
spec =
describe "Stack.Solver" $ do
successfulExample
"text-1.2.1.1 (latest: 1.2.2.0) -integer-simple (via: parsec-3.1.9) (new package)"
$(mkPackageName "text")
$(mkVersion "1.2.1.1")
[ ($(mkFlagName "integer-simple"), False)
]
successfulExample
"hspec-snap-1.0.0.0 *test (via: servant-snap-0.5) (new package)"
$(mkPackageName "hspec-snap")
$(mkVersion "1.0.0.0")
[]
successfulExample
"time-locale-compat-0.1.1.1 -old-locale (via: http-api-data-0.2.2) (new package)"
$(mkPackageName "time-locale-compat")
$(mkVersion "0.1.1.1")
[ ($(mkFlagName "old-locale"), False)
]
successfulExample
"flowdock-rest-0.2.0.0 -aeson-compat *test (via: haxl-fxtra-0.0.0.0) (new package)"
$(mkPackageName "flowdock-rest")
$(mkVersion "0.2.0.0")
[ ($(mkFlagName "aeson-compat"), False)
]
where
successfulExample input pkgName pkgVersion flags =
it ("parses " ++ unpack input) $
parseCabalOutputLine input `shouldBe` Right (pkgName, (pkgVersion, Map.fromList flags))
| MichielDerhaeg/stack | src/test/Stack/SolverSpec.hs | bsd-3-clause | 1,546 | 0 | 13 | 350 | 294 | 154 | 140 | 38 | 1 |
module Case3 where
data T = C2 Int
caseIt x
= case x of
42 -> 1 + (f ((error
"C1 no longer defined for T at line: 3")
1
2))
where f x = 9
| kmate/HaRe | old/testing/removeCon/Case3AST.hs | bsd-3-clause | 280 | 0 | 14 | 178 | 65 | 34 | 31 | 9 | 1 |
module AddOneParameter.C2 where
import AddOneParameter.D2
sumSquares1 (x:xs) = (sq sq_f) x + sumSquares1 xs
sumSquares1 [] = 0
sq_f_1 = 0
| RefactoringTools/HaRe | test/testdata/AddOneParameter/C2.expected.hs | bsd-3-clause | 145 | 0 | 8 | 28 | 57 | 30 | 27 | 5 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, MagicHash, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Event.IntMap
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of maps from integer keys to values.
--
-- Since many function names (but not the type name) clash with
-- "Prelude" names, this module is usually imported @qualified@, e.g.
--
-- > import Data.IntMap (IntMap)
-- > import qualified Data.IntMap as IntMap
--
-- The implementation is based on /big-endian patricia trees/. This data
-- structure performs especially well on binary operations like 'union'
-- and 'intersection'. However, my benchmarks show that it is also
-- (much) faster on insertions and deletions when compared to a generic
-- size-balanced map implementation (see "Data.Map").
--
-- * Chris Okasaki and Andy Gill, \"/Fast Mergeable Integer Maps/\",
-- Workshop on ML, September 1998, pages 77-86,
-- <http://citeseer.ist.psu.edu/okasaki98fast.html>
--
-- * D.R. Morrison, \"/PATRICIA -- Practical Algorithm To Retrieve
-- Information Coded In Alphanumeric/\", Journal of the ACM, 15(4),
-- October 1968, pages 514-534.
--
-- Operation comments contain the operation time complexity in
-- the Big-O notation <http://en.wikipedia.org/wiki/Big_O_notation>.
-- Many operations have a worst-case complexity of /O(min(n,W))/.
-- This means that the operation can become linear in the number of
-- elements with a maximum of /W/ -- the number of bits in an 'Int'
-- (32 or 64).
--
-----------------------------------------------------------------------------
module GHC.Event.IntMap
(
-- * Map type
IntMap
, Key
-- * Query
, lookup
, member
-- * Construction
, empty
-- * Insertion
, insertWith
-- * Delete\/Update
, delete
, updateWith
-- * Traversal
-- ** Fold
, foldWithKey
-- * Conversion
, keys
) where
import Data.Bits
import Data.Maybe (Maybe(..))
import GHC.Base hiding (foldr)
import GHC.Num (Num(..))
import GHC.Real (fromIntegral)
import GHC.Show (Show(showsPrec), showParen, shows, showString)
#if __GLASGOW_HASKELL__
import GHC.Word (Word(..))
#else
import Data.Word
#endif
-- | A @Nat@ is a natural machine word (an unsigned Int)
type Nat = Word
natFromInt :: Key -> Nat
natFromInt i = fromIntegral i
intFromNat :: Nat -> Key
intFromNat w = fromIntegral w
shiftRL :: Nat -> Key -> Nat
#if __GLASGOW_HASKELL__
-- GHC: use unboxing to get @shiftRL@ inlined.
shiftRL (W# x) (I# i) = W# (shiftRL# x i)
#else
shiftRL x i = shiftR x i
#endif
------------------------------------------------------------------------
-- Types
-- | A map of integers to values @a@.
data IntMap a = Nil
| Tip {-# UNPACK #-} !Key !a
| Bin {-# UNPACK #-} !Prefix
{-# UNPACK #-} !Mask
!(IntMap a)
!(IntMap a)
type Prefix = Int
type Mask = Int
type Key = Int
------------------------------------------------------------------------
-- Query
-- | /O(min(n,W))/ Lookup the value at a key in the map. See also
-- 'Data.Map.lookup'.
lookup :: Key -> IntMap a -> Maybe a
lookup k t = let nk = natFromInt k in seq nk (lookupN nk t)
lookupN :: Nat -> IntMap a -> Maybe a
lookupN k t
= case t of
Bin _ m l r
| zeroN k (natFromInt m) -> lookupN k l
| otherwise -> lookupN k r
Tip kx x
| (k == natFromInt kx) -> Just x
| otherwise -> Nothing
Nil -> Nothing
-- | /O(min(n,W))/. Is the key a member of the map?
--
-- > member 5 (fromList [(5,'a'), (3,'b')]) == True
-- > member 1 (fromList [(5,'a'), (3,'b')]) == False
member :: Key -> IntMap a -> Bool
member k m
= case lookup k m of
Nothing -> False
Just _ -> True
------------------------------------------------------------------------
-- Construction
-- | /O(1)/ The empty map.
--
-- > empty == fromList []
-- > size empty == 0
empty :: IntMap a
empty = Nil
------------------------------------------------------------------------
-- Insert
-- | /O(min(n,W))/ Insert with a function, combining new value and old
-- value. @insertWith f key value mp@ will insert the pair (key,
-- value) into @mp@ if key does not exist in the map. If the key does
-- exist, the function will insert the pair (key, f new_value
-- old_value). The result is a pair where the first element is the
-- old value, if one was present, and the second is the modified map.
insertWith :: (a -> a -> a) -> Key -> a -> IntMap a -> (Maybe a, IntMap a)
insertWith f k x t = case t of
Bin p m l r
| nomatch k p m -> (Nothing, join k (Tip k x) p t)
| zero k m -> let (found, l') = insertWith f k x l
in (found, Bin p m l' r)
| otherwise -> let (found, r') = insertWith f k x r
in (found, Bin p m l r')
Tip ky y
| k == ky -> (Just y, Tip k (f x y))
| otherwise -> (Nothing, join k (Tip k x) ky t)
Nil -> (Nothing, Tip k x)
------------------------------------------------------------------------
-- Delete/Update
-- | /O(min(n,W))/. Delete a key and its value from the map. When the
-- key is not a member of the map, the original map is returned. The
-- result is a pair where the first element is the value associated
-- with the deleted key, if one existed, and the second element is the
-- modified map.
delete :: Key -> IntMap a -> (Maybe a, IntMap a)
delete k t = case t of
Bin p m l r
| nomatch k p m -> (Nothing, t)
| zero k m -> let (found, l') = delete k l
in (found, bin p m l' r)
| otherwise -> let (found, r') = delete k r
in (found, bin p m l r')
Tip ky y
| k == ky -> (Just y, Nil)
| otherwise -> (Nothing, t)
Nil -> (Nothing, Nil)
updateWith :: (a -> Maybe a) -> Key -> IntMap a -> (Maybe a, IntMap a)
updateWith f k t = case t of
Bin p m l r
| nomatch k p m -> (Nothing, t)
| zero k m -> let (found, l') = updateWith f k l
in (found, bin p m l' r)
| otherwise -> let (found, r') = updateWith f k r
in (found, bin p m l r')
Tip ky y
| k == ky -> case (f y) of
Just y' -> (Just y, Tip ky y')
Nothing -> (Just y, Nil)
| otherwise -> (Nothing, t)
Nil -> (Nothing, Nil)
-- | /O(n)/. Fold the keys and values in the map, such that
-- @'foldWithKey' f z == 'Prelude.foldr' ('uncurry' f) z . 'toAscList'@.
-- For example,
--
-- > keys map = foldWithKey (\k x ks -> k:ks) [] map
--
-- > let f k a result = result ++ "(" ++ (show k) ++ ":" ++ a ++ ")"
-- > foldWithKey f "Map: " (fromList [(5,"a"), (3,"b")]) == "Map: (5:a)(3:b)"
foldWithKey :: (Key -> a -> b -> b) -> b -> IntMap a -> b
foldWithKey f z t
= foldr f z t
-- | /O(n)/. Convert the map to a list of key\/value pairs.
--
-- > toList (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
-- > toList empty == []
toList :: IntMap a -> [(Key,a)]
toList t
= foldWithKey (\k x xs -> (k,x):xs) [] t
foldr :: (Key -> a -> b -> b) -> b -> IntMap a -> b
foldr f z t
= case t of
Bin 0 m l r | m < 0 -> foldr' f (foldr' f z l) r -- put negative numbers before.
Bin _ _ _ _ -> foldr' f z t
Tip k x -> f k x z
Nil -> z
foldr' :: (Key -> a -> b -> b) -> b -> IntMap a -> b
foldr' f z t
= case t of
Bin _ _ l r -> foldr' f (foldr' f z r) l
Tip k x -> f k x z
Nil -> z
-- | /O(n)/. Return all keys of the map in ascending order.
--
-- > keys (fromList [(5,"a"), (3,"b")]) == [3,5]
-- > keys empty == []
keys :: IntMap a -> [Key]
keys m
= foldWithKey (\k _ ks -> k:ks) [] m
------------------------------------------------------------------------
-- Eq
instance Eq a => Eq (IntMap a) where
t1 == t2 = equal t1 t2
t1 /= t2 = nequal t1 t2
equal :: Eq a => IntMap a -> IntMap a -> Bool
equal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 == m2) && (p1 == p2) && (equal l1 l2) && (equal r1 r2)
equal (Tip kx x) (Tip ky y)
= (kx == ky) && (x==y)
equal Nil Nil = True
equal _ _ = False
nequal :: Eq a => IntMap a -> IntMap a -> Bool
nequal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 /= m2) || (p1 /= p2) || (nequal l1 l2) || (nequal r1 r2)
nequal (Tip kx x) (Tip ky y)
= (kx /= ky) || (x/=y)
nequal Nil Nil = False
nequal _ _ = True
instance Show a => Show (IntMap a) where
showsPrec d m = showParen (d > 10) $
showString "fromList " . shows (toList m)
------------------------------------------------------------------------
-- Utility functions
join :: Prefix -> IntMap a -> Prefix -> IntMap a -> IntMap a
join p1 t1 p2 t2
| zero p1 m = Bin p m t1 t2
| otherwise = Bin p m t2 t1
where
m = branchMask p1 p2
p = mask p1 m
-- | @bin@ assures that we never have empty trees within a tree.
bin :: Prefix -> Mask -> IntMap a -> IntMap a -> IntMap a
bin _ _ l Nil = l
bin _ _ Nil r = r
bin p m l r = Bin p m l r
------------------------------------------------------------------------
-- Endian independent bit twiddling
zero :: Key -> Mask -> Bool
zero i m = (natFromInt i) .&. (natFromInt m) == 0
nomatch :: Key -> Prefix -> Mask -> Bool
nomatch i p m = (mask i m) /= p
mask :: Key -> Mask -> Prefix
mask i m = maskW (natFromInt i) (natFromInt m)
zeroN :: Nat -> Nat -> Bool
zeroN i m = (i .&. m) == 0
------------------------------------------------------------------------
-- Big endian operations
maskW :: Nat -> Nat -> Prefix
maskW i m = intFromNat (i .&. (complement (m-1) `xor` m))
branchMask :: Prefix -> Prefix -> Mask
branchMask p1 p2
= intFromNat (highestBitMask (natFromInt p1 `xor` natFromInt p2))
{-
Finding the highest bit mask in a word [x] can be done efficiently in
three ways:
* convert to a floating point value and the mantissa tells us the
[log2(x)] that corresponds with the highest bit position. The mantissa
is retrieved either via the standard C function [frexp] or by some bit
twiddling on IEEE compatible numbers (float). Note that one needs to
use at least [double] precision for an accurate mantissa of 32 bit
numbers.
* use bit twiddling, a logarithmic sequence of bitwise or's and shifts (bit).
* use processor specific assembler instruction (asm).
The most portable way would be [bit], but is it efficient enough?
I have measured the cycle counts of the different methods on an AMD
Athlon-XP 1800 (~ Pentium III 1.8Ghz) using the RDTSC instruction:
highestBitMask: method cycles
--------------
frexp 200
float 33
bit 11
asm 12
Wow, the bit twiddling is on today's RISC like machines even faster
than a single CISC instruction (BSR)!
-}
-- | @highestBitMask@ returns a word where only the highest bit is
-- set. It is found by first setting all bits in lower positions than
-- the highest bit and than taking an exclusive or with the original
-- value. Allthough the function may look expensive, GHC compiles
-- this into excellent C code that subsequently compiled into highly
-- efficient machine code. The algorithm is derived from Jorg Arndt's
-- FXT library.
highestBitMask :: Nat -> Nat
highestBitMask x0
= case (x0 .|. shiftRL x0 1) of
x1 -> case (x1 .|. shiftRL x1 2) of
x2 -> case (x2 .|. shiftRL x2 4) of
x3 -> case (x3 .|. shiftRL x3 8) of
x4 -> case (x4 .|. shiftRL x4 16) of
x5 -> case (x5 .|. shiftRL x5 32) of -- for 64 bit platforms
x6 -> (x6 `xor` (shiftRL x6 1))
| mightymoose/liquidhaskell | benchmarks/ghc-7.4.1/Event/IntMap.hs | bsd-3-clause | 12,045 | 0 | 26 | 3,282 | 2,960 | 1,547 | 1,413 | 174 | 4 |
f :: Int -> Int -> Int
f x y = x + y
g :: (Int, Int) -> Int
g (x,y) = x + y
main = do
print $ curry g 3 4
print $ uncurry f (3,4)
| manyoo/ghcjs | test/fay/curry.hs | mit | 137 | 0 | 9 | 47 | 98 | 51 | 47 | 7 | 1 |
{-# LANGUAGE FlexibleContexts, MultiParamTypeClasses, FlexibleInstances #-}
module T13526 where
class C a where
op :: a -> a
instance {-# OVERLAPPING #-} C [Char] where
op x = x
instance C a => C [a] where
op (x:xs) = [op x]
instance C a => C (Maybe a) where
op x = error "urk"
-- We should get no complaint
foo :: C [a] => a -> [a]
foo x = op [x]
bar :: C (Maybe a) => a -> Maybe a
bar x = op (Just x)
| ezyang/ghc | testsuite/tests/typecheck/should_compile/T13526.hs | bsd-3-clause | 418 | 0 | 8 | 104 | 193 | 99 | 94 | 14 | 1 |
----------------------------------------------------------------------------
-- demo_cqm_recipient.hs demonstrator code how to use CMQ.
-- Copyright : (c) 2012 Joerg Fritsch
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Receives and prints UDP messages.
-- Messages can be shown on the screen w e.g. grep -v Nothing
-----------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
import CMQ
import Network.Socket hiding (send, sendTo, recv, recvFrom)
import Control.Monad
import Data.Maybe
main = withSocketsDo $ do
qs <- socket AF_INET Datagram defaultProtocol
hostAddr <- inet_addr "192.168.35.85"
bindSocket qs (SockAddrInet 4711 hostAddr)
(token) <- newRq qs 512 200
forever $ do
msg <- cwPop token
print msg
| viloocity/CMQ | examples/0.0.1/demo_cmq_recipient.hs | mit | 893 | 0 | 12 | 159 | 136 | 73 | 63 | 13 | 1 |
{-# LANGUAGE DeriveGeneric, ViewPatterns, RecordWildCards, OverloadedStrings, CPP #-}
module ReportTypes where
import qualified Data.Map as M
import Data.Aeson
import Data.Aeson.Types
import GHC.Generics
import Text.Printf
import Data.List
import Data.Char
import Paths
import ReadResult
import qualified BenchmarkSettings as S
data ClientSettings = ClientSettings
{ title :: String
, revisionInfo :: String
, diffLink :: Maybe String
}
deriving (Generic)
instance ToJSON ClientSettings
data GlobalReport = GlobalReport
{ settings :: Maybe ClientSettings
, benchmarks :: Maybe (M.Map BenchName ())
, revisions :: Maybe (M.Map Hash RevReport)
, benchGroups :: Maybe [BenchGroup]
, branches :: Maybe (M.Map BranchName BranchReport)
}
instance ToJSON GlobalReport where
toJSON (GlobalReport {..}) = object $
( "settings" .=? settings ) ++
( "benchmarks" .=? benchmarks ) ++
( "revisions" .=? revisions ) ++
( "benchGroups" .=? benchGroups ) ++
( "branches" .=? branches )
where
k .=? Just v = [ k .= toJSON v ]
_ .=? Nothing = []
emptyGlobalReport :: GlobalReport
emptyGlobalReport = GlobalReport Nothing Nothing Nothing Nothing Nothing
data SummaryStats = SummaryStats
{ totalCount :: Int
, improvementCount :: Int
, regressionCount :: Int
, summaryDesc :: String
}
deriving (Show, Generic)
instance ToJSON SummaryStats
instance FromJSON SummaryStats
{-
sumStats :: [SummaryStats] -> SummaryStats
sumStats = foldl' go (SummaryStats 0 0 0)
where go (SummaryStats a b c) (SummaryStats a' b' c') =
SummaryStats (a + a') (b + b') (c + c')
-}
data Status = Built | Failed | Waiting
deriving (Show, Generic)
instance ToJSON Status
instance FromJSON Status
data Summary = Summary
{ hash :: Hash
, gitDate :: Integer
, gitSubject :: String
, stats :: SummaryStats
, parents :: [Hash]
-- , status :: Status
}
deriving (Generic)
instance ToJSON Summary
instance FromJSON Summary
data RevReport = RevReport
{ summary :: Summary
, gitLog :: String
, benchResults :: M.Map BenchName BenchResult
}
deriving (Generic)
instance ToJSON RevReport
instance FromJSON RevReport
data BranchReport = BranchReport
{ branchHash :: Hash
, mergeBaseHash :: Hash
, branchStats :: SummaryStats
, commitCount :: Int
}
deriving (Generic)
instance ToJSON BranchReport
instance FromJSON BranchReport
data ChangeType = Improvement | Boring | Regression
deriving (Eq, Generic)
instance ToJSON ChangeType
instance FromJSON ChangeType
data BenchGroup = BenchGroup
{ groupName :: String
, groupMembers :: [BenchName]
, groupUnitFull :: Maybe String
}
deriving (Generic)
instance ToJSON BenchGroup
instance FromJSON BenchGroup
data BenchResult = BenchResult
{ name :: String
, value :: BenchValue
, unit :: String
, important :: Bool
}
deriving (Generic)
instance ToJSON BenchResult where
toJSON = genericToJSON defaultOptions
#if MIN_VERSION_aeson(0,10,0)
toEncoding = genericToEncoding defaultOptions
#endif
instance FromJSON BenchResult where
parseJSON = genericParseJSON defaultOptions
data BenchComparison = BenchComparison
{ changeName :: String
, change :: String
, changeType :: ChangeType
, changeImportant :: Bool
}
-- A smaller BenchResult
-- (This is a hack: BenchResult no longer carries a changeType. But it is convenient to
-- have that in the graph report, for the tallying for the graph summary. But all not very
-- satisfactory.)
data GraphPoint = GraphPoint
{ gpValue :: BenchValue
, gpChangeType :: ChangeType
}
deriving (Generic)
instance ToJSON GraphPoint where
toJSON = genericToJSON graphPointOptions
#if MIN_VERSION_aeson(0,10,0)
toEncoding = genericToEncoding graphPointOptions
#endif
instance FromJSON GraphPoint where
parseJSON = genericParseJSON graphPointOptions
graphPointOptions = defaultOptions { fieldLabelModifier = fixup }
where fixup ('g':'p':c:cs) = toLower c : cs
invertChangeType :: ChangeType -> ChangeType
invertChangeType Improvement = Regression
invertChangeType Boring = Boring
invertChangeType Regression = Improvement
type Explanation = (String, ChangeType)
noExplanation :: Explanation
noExplanation = ("", Boring)
equalExplanation :: Explanation
equalExplanation = ("=", Boring)
explainSmallInt :: S.BenchSettings -> Integer -> Integer -> Explanation
explainSmallInt _ i1 i2
| i2 == i1 = equalExplanation
| i2 > i1 = ("+ " ++ show (i2 - i1), Improvement)
| i2 < i1 = ("- " ++ show (i1 - i2), Regression)
explainInt :: S.BenchSettings -> Integer -> Integer -> Explanation
explainInt s i1 i2 = explainFloat s (fromIntegral i1) (fromIntegral i2)
explainFloat :: S.BenchSettings -> Double -> Double -> Explanation
explainFloat _ 0 0 = equalExplanation
explainFloat _ 0 _ = ("+ ∞", Improvement)
explainFloat s f1 f2 = (change, typ)
where
change | abs perc < 0.01 = "="
| perc >= 0 = printf "+ %.2f%%" perc
| perc < 0 = printf "- %.2f%%" (-perc)
typ | perc >= 0, perc < th_up = Boring
| perc < 0, -perc < th_down = Boring
| perc >= 0 = Improvement
| perc < 0 = Regression
perc = 100 * ((f2 - f1) / f1)
th_up = S.threshold s
-- Adjusted threshold, to make sure that the inverse change is flagged
-- equivalently
th_down = (1-(1/(1+S.threshold s/100)))*100
toFloat :: BenchValue -> Double
toFloat (I i) = fromIntegral i
toFloat (F f) = f
explain :: S.BenchSettings -> BenchValue -> BenchValue -> (String, ChangeType)
explain s@(S.numberType -> S.SmallIntegralNT) (I i1) (I i2) = explainSmallInt s i1 i2
explain s@(S.numberType -> S.IntegralNT) (I i1) (I i2) = explainInt s i1 i2
-- Treat everything else as Floats, so that we do something sensible
-- even if the user did not set the numberType correctly:
explain s v1 v2 = explainFloat s (toFloat v1) (toFloat v2)
toResult :: S.BenchSettings -> String -> BenchValue -> BenchResult
toResult s name value = BenchResult
{ name = name
, value = value
, unit = S.unit s
, important = S.important s
}
makeComparison :: S.BenchSettings -> String -> BenchValue -> Maybe BenchValue -> BenchComparison
makeComparison s name value prev = BenchComparison
{ changeName = name
, change = change
, changeType = changeType
, changeImportant = S.important s
}
where
(change, changeType') =
case prev of
Just p -> explain s p value
Nothing -> noExplanation
changeType | S.smallerIsBetter s = invertChangeType changeType'
| otherwise = changeType'
toSummaryStats :: [BenchComparison] -> SummaryStats
toSummaryStats comps = SummaryStats
{ totalCount = length comps
, improvementCount = length
[ () | comp <- importantComps , changeType comp == Improvement ]
, regressionCount = length
[ () | comp <- importantComps , changeType comp == Regression ]
, summaryDesc = andMore "No significant changes" 5
[ changeName comp ++ ": " ++ change comp
| comp <- importantComps
, changeType comp `elem` [Improvement, Regression]
]
}
where importantComps = filter changeImportant comps
andMore :: String -> Int -> [String] -> String
andMore def _ [] = def
andMore _ n xs = intercalate "\n" (take n xs) ++ rest
where rest | length xs > n = "\nand " ++ show (length xs - n) ++ " more"
| otherwise = ""
{-
toGroup :: String -> [BenchResult] -> BenchGroup
toGroup n res = BenchGroup
{ groupName = n
, benchResults = res
, groupStats = SummaryStats
{ totalCount = length res
, improvementCount = length [ () | BenchResult { changeType = Improvement } <- res ]
, regressionCount = length [ () | BenchResult { changeType = Regression } <- res ]
}
}
-}
createBranchReport ::
S.Settings -> Hash -> Hash ->
ResultMap -> ResultMap ->
Int ->
BranchReport
createBranchReport settings this other thisM otherM commitCount = BranchReport
{ branchHash = this
, mergeBaseHash = other
, branchStats = toSummaryStats comparisons
, commitCount = commitCount
}
where
comparisons =
[ makeComparison s name value (M.lookup name otherM)
| (name, value) <- M.toAscList thisM
, let s = S.benchSettings settings name
]
createReport ::
S.Settings -> Hash -> [Hash] ->
ResultMap -> ResultMap ->
String -> String -> Integer ->
RevReport
createReport settings this parents thisM parentM log subject date = RevReport
{ summary = Summary
{ hash = this
, parents = parents
, stats = toSummaryStats comparisons
, gitSubject = subject
, gitDate = date
}
--, benchGroups = benchGroups
, benchResults = results
, gitLog = log
}
where
results = M.fromList
[ (name, toResult s name value)
| (name, value) <- M.toAscList thisM
, let s = S.benchSettings settings name
]
comparisons =
[ makeComparison s name value (M.lookup name parentM)
| (name, value) <- M.toAscList thisM
, let s = S.benchSettings settings name
]
summarize :: RevReport -> Summary
summarize (RevReport {..}) = summary
| nomeata/gipeda | src/ReportTypes.hs | mit | 9,495 | 0 | 16 | 2,369 | 2,523 | 1,356 | 1,167 | 217 | 2 |
-- if we want to map an IO function
-- through a list we might make:
-- map print [1, 2, 3] will make [print 1, print 2, print 3]
-- we need to make instead:
-- sequence (map print [1, 2, 3, 4, 5])
-- or better yet:
main = mapM print [1, 2, 3] | fabriceleal/learn-you-a-haskell | 09/mapMs.hs | mit | 255 | 0 | 6 | 67 | 26 | 17 | 9 | 1 | 1 |
{-# htermination addListToFM_C :: Ord a => (b -> b -> b) -> FiniteMap a b -> [(a,b)] -> FiniteMap a b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_addListToFM_C_1.hs | mit | 123 | 0 | 3 | 25 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE ViewPatterns #-}
module Core.LambdaLift.Abstract.Naive
( abstract
) where
import Common
import Core.AST
import Core.AnnotAST
import qualified Data.Set as S
abstract :: AnnotProgram (S.Set Name) Name -> Program Name
abstract = Program . map abstractSC . getProgramF
where abstractSC (SupercombF name args body) = Supercomb name args (abstractExpr body)
abstractExpr :: AnnotExpr (S.Set Name) Name -> Expr Name
abstractExpr (Annot (S.toList -> fv, e)) = case e of
EVarF v -> EVar v
ENumF n -> ENum n
EConstrF tag arity -> EConstr tag arity
EApF e1 e2 -> EAp (abstractExpr e1) (abstractExpr e2)
ELetF rec defs body -> ELet rec defs' body'
where defs' = [(x, abstractExpr e) | (x, e) <- defs]
body' = abstractExpr body
ECaseF e alts -> ECase (abstractExpr e) (map abstractAlter alts)
EAbsF args body -> foldl EAp e' (map EVar fv)
where e' = ELet False [(anonym, EAbs (fv ++ args) body')] (EVar anonym)
body' = abstractExpr body
abstractAlter :: AnnotAlter (S.Set Name) Name -> Alter Name
abstractAlter (AlterF tag xs body) = Alter tag xs (abstractExpr body)
| meimisaki/Rin | src/Core/LambdaLift/Abstract/Naive.hs | mit | 1,117 | 0 | 15 | 226 | 454 | 229 | 225 | 25 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.