code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module Data.Tournament (
Tournament(..),
TournamentRank(..),
TournamentType(..),
ScoringMethod(..)
) where
import Data.Dates (DateTime(..), parseDate)
import Data.Text (Text, pack, unpack, append)
import Data.Aeson (ToJSON(..), FromJSON(..), Value(..), object, (.=), (.:))
import Data.Aeson.Types (typeMismatch)
import Control.Applicative (Applicative(..), (<$>), (<*>))
import Data.ID(ID(..))
import Data.Location (Location)
data TournamentRank = Local | Districtal | Regional | National | International deriving (Eq, Show, Ord)
data TournamentType = Individual | Pairs | Teams deriving (Eq, Show)
data ScoringMethod = TopScore | IMP | Patton deriving (Eq, Show)
data Tournament = Tournament {
_id :: ID Tournament,
date :: DateTime,
name :: Text,
location :: Location,
tournamentType :: TournamentType,
scoring :: ScoringMethod,
rank :: TournamentRank,
fee :: Integer
}
instance ToJSON TournamentRank where
toJSON Local = String "L"
toJSON Districtal = String "D"
toJSON Regional = String "R"
toJSON National = String "N"
toJSON International = String "I"
instance FromJSON TournamentRank where
parseJSON (String "L") = pure Local
parseJSON (String "D") = pure Districtal
parseJSON (String "R") = pure Regional
parseJSON (String "N") = pure National
parseJSON (String "I") = pure International
parseJSON v = typeMismatch "TournamentRank" v
instance ToJSON TournamentType where
toJSON Individual = String "individual"
toJSON Pairs = String "pairs"
toJSON Teams = String "teams"
instance FromJSON TournamentType where
parseJSON (String "individual") = pure Individual
parseJSON (String "pairs") = pure Pairs
parseJSON (String "teams") = pure Teams
parseJSON v = typeMismatch "TournamentType" v
instance ToJSON ScoringMethod where
toJSON TopScore = String "MAX"
toJSON IMP = String "IMP"
toJSON Patton = String "PAT"
instance FromJSON ScoringMethod where
parseJSON (String "MAX") = pure TopScore
parseJSON (String "IMP") = pure IMP
parseJSON (String "PAT") = pure Patton
parseJSON v = typeMismatch "ScoringMethod" v
instance ToJSON DateTime where
toJSON d = String $ asText year d `append` "/" `append` asText month d
`append` "/" `append` asText day d
instance FromJSON DateTime where
parseJSON (Object o) = DateTime <$> o .: "year"
<*> o .: "month"
<*> o .: "day"
<*> pure 0 <*> pure 0 <*> pure 0
parseJSON txt@(String str) = case parseDate epoch (unpack str) of
Right d -> pure d
Left err -> typeMismatch (show err) txt
parseJSON v = typeMismatch "Date" v
instance ToJSON Tournament where
toJSON tournament = object [
"id" .= (toJSON $ _id tournament),
"date" .= (toJSON $ date tournament),
"name" .= (toJSON $ name tournament),
"location" .= (toJSON $ location tournament),
"type" .= (toJSON $ tournamentType tournament),
"scoring" .= (toJSON $ scoring tournament),
"rank" .= (toJSON $ rank tournament),
"fee" .= (toJSON $ fee tournament)
]
instance FromJSON Tournament where
parseJSON (Object o) = Tournament <$> o .: "id"
<*> o .: "date"
<*> o .: "name"
<*> o .: "location"
<*> o .: "type"
<*> o .: "scoring"
<*> o .: "rank"
<*> o .: "fee"
parseJSON val = typeMismatch "Tournament" val
asText :: (Show b) => (a -> b) -> a -> Text
asText transform = pack . show . transform
epoch :: DateTime
epoch = DateTime 1970 1 1 0 0 0
|
Sventimir/turniejowo
|
src/Data/Tournament.hs
|
apache-2.0
| 4,151 | 0 | 21 | 1,383 | 1,235 | 654 | 581 | 92 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE LambdaCase #-}
-- | Parsing the Skladnica constituency treebank.
module NLP.Skladnica
(
-- * Types
NID
, IsHead (..)
, Node (..)
, Label
, NonTerm (..)
, Term (..)
-- * Parsing
, parseTop
, readTop
-- * Conversion
-- ** Tree
, Tree
, Edge (..)
, modifyEdge
, modifyNode
-- , modifyRootEdge
-- , modifyRootNode
, mapFst
, simplify
, purge
, drawTree
-- ** DAG
, DAG
, mkDAG
, forest
-- ** Utils
, printChosen
) where
import Control.Applicative ((<|>))
import Control.Arrow ((&&&))
import qualified Control.Arrow as Arr
import Control.Monad (guard)
import qualified Data.Foldable as F
import qualified Data.Map.Strict as M
import Data.Maybe (maybeToList)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as L
import qualified Data.Text.Lazy.IO as L
import qualified Data.Tree as R
import qualified Text.HTML.TagSoup as TagSoup
import Text.XML.PolySoup hiding (P, Q)
import qualified Text.XML.PolySoup as PolySoup
-------------------------------------------------
-- Data types
-------------------------------------------------
-- | Parsing predicates.
type P a = PolySoup.P (XmlTree L.Text) a
type Q a = PolySoup.Q (XmlTree L.Text) a
-- | Node ID.
type NID = Int
-- | Is it head or not?
data IsHead = HeadYes | HeadNo
deriving (Show, Eq, Ord)
-- | A label stored in a tree node.
type Label = Either NonTerm Term
-- | A node of the parsed forest.
data Node = Node
{ nid :: NID
-- ^ ID of the node.
, chosen :: Bool
, label :: Label
, children :: [[(NID, IsHead)]]
-- ^ Note potential non-determinism!
} deriving (Show, Eq, Ord)
-- | Non-terminal
data NonTerm = NonTerm
{ cat :: Text
-- ^ Category
, morph :: M.Map Text Text
} deriving (Show, Eq, Ord)
-- | Terminal
data Term = Term
{ orth :: Text
, base :: Text
, tag :: Text
} deriving (Show, Eq, Ord)
-------------------------------------------------
-- Parsing
-------------------------------------------------
-- | Top-level parser
-- topP :: P (Text, [Node])
topP :: P [Node]
topP = concat <$> every' nodesQ
-- | Text (input sentence) extractor.
-- :: Q Text
-- = named "text" `joinR` first (node text)
-- | Nodes parser
nodesQ :: Q [Node]
-- nodesQ = concat <$> (true //> nodeQ)
nodesQ = true //> nodeQ
-- | Node parser
nodeQ :: Q Node
nodeQ = (named "node" *> ((,) <$> nidP <*> attr "chosen")) `join`
\(theNid, chosenText) -> do
theLabel <- first labelQ
childrenAlt <- every' childrenQ
return $ Node
{ nid = theNid
, chosen = chosenText == "true"
, label = theLabel
, children = childrenAlt }
labelQ :: Q Label
labelQ = (Left <$> nonTermQ) <|> (Right <$> termQ)
-- nonTermQ :: Q NonTerm
-- nonTermQ = named "nonterminal" `joinR` first
-- (named "category" `joinR` first
-- (node text))
nonTermQ :: Q NonTerm
nonTermQ = named "nonterminal" `joinR` do
cat_ <- first $ named "category" `joinR` first (node text)
mor_ <- every' $ (named "f" *> attr "type") `join`
\typ -> ((typ,) <$> first (node text))
return $ NonTerm
{ cat = L.toStrict cat_
, morph = M.fromList (map strictPair mor_) }
where
strictPair = Arr.first L.toStrict . Arr.second L.toStrict
termQ :: Q Term
termQ = named "terminal" `joinR` do
orth_ <- first $ named "orth" `joinR` first (node text)
base_ <- first $ named "base" `joinR` first (node text)
tag_ <- first $ (named "f" *> hasAttrVal "type" "tag") `joinR` first (node text)
return $ Term
{ orth = L.toStrict orth_
, base = L.toStrict base_
, tag = L.toStrict tag_ }
childrenQ :: Q [(NID, IsHead)]
childrenQ = named "children" `joinR` every' childQ
childQ :: Q (NID, IsHead)
childQ = node $ named "child" *> ((,) <$> nidP <*> isHeadP)
nidP :: PolySoup.Q (TagSoup.Tag L.Text) NID
nidP = read . L.unpack <$> attr "nid"
isHeadP :: PolySoup.Q (TagSoup.Tag L.Text) IsHead
isHeadP =
let f x = if x == "true" then HeadYes else HeadNo
in f <$> attr "head"
-- | Parse an XML string (in the Skladnica format) into a sequence of `Node`s.
parseTop :: L.Text -> [Node]
parseTop =
F.concat . evalP topP . parseForest . TagSoup.parseTags
-- | Read a Skladnica XML file and parse it into a sequence of `Node`s.
readTop :: FilePath -> IO [Node]
readTop path = parseTop <$> L.readFile path
-------------------------------------------------
-- Tree
-------------------------------------------------
-- | To distinguish edge labels from the node (child) labels.
data Edge a b = Edge
{ edgeLabel :: b
-- ^ Label assigned to the in-going edge
, nodeLabel :: a
-- ^ Label assigned to the node itself
} deriving (Show, Eq, Ord)
-- | Modify edge label value.
modifyEdge
:: (b -> c)
-> Edge a b
-> Edge a c
modifyEdge f e@Edge{..} =
e {edgeLabel = f edgeLabel}
-- | Modify node label value.
modifyNode
:: (a -> b)
-> Edge a c
-> Edge b c
modifyNode f e@Edge{..} =
e {nodeLabel = f nodeLabel}
-- | Skladnica tree.
type Tree a b = R.Tree (Edge a b)
-- -- | Modify root's edge label value.
-- modifyRootEdge
-- :: (b -> c)
-- -> Tree a b
-- -> Tree a c
-- modifyRootEdge = undefined
-- -- | Modify root's node label value.
-- modifyRootNode
-- :: (a -> b)
-- -> Tree a c
-- -> Tree b c
-- modifyRootNode f R.Node{..} = R.Node
-- { R.rootLabel = modifyNode f rootLabel
-- ,
-- | Map a function over labels attached to tree nodes.
mapFst :: (a -> c) -> Tree a b -> Tree c b
mapFst f t = R.Node
{ R.rootLabel =
let x = R.rootLabel t
in x {nodeLabel = f (nodeLabel x)}
, R.subForest = map (mapFst f) (R.subForest t) }
-- | Simplify a tree to a regular rose tree (i.e., from the containers package).
simplify :: Tree a b -> R.Tree a
simplify R.Node{..} = R.Node
{ R.rootLabel = nodeLabel rootLabel
, R.subForest = map simplify subForest }
-- | Draw the tree.
drawTree :: Tree String String -> String
drawTree = unlines . treeLines
-- | Draw tree lines.
treeLines :: Tree String String -> [String]
treeLines R.Node{..} =
show rootLabel : concat
[ map indent
(treeLines child)
| child <- subForest ]
where
indent = (" " ++)
-- | Purge the nodes which satisfy the predicate.
purge :: (a -> Bool) -> R.Tree a -> R.Forest a
purge p t =
let x = R.rootLabel t
ts = concatMap (purge p) (R.subForest t)
in case p x of
True -> ts
False -> [R.Node x ts]
-- -- | Purge the nodes which satisfy the predicate. However, if the discarded node
-- -- contains more than one child, we take its first non-terminal trunk child and
-- -- copy it to the place of the discarded node. Finally, if all trunk children
-- -- are terminals, we don't do anything.
-- purge
-- :: (a -> Bool)
-- -- ^ Predicate which indicates nodes to be discarded
-- -> (a -> Bool)
-- -- ^ Predicate which tells wheter a given node is a trunk (head) non-terminal
-- -> R.Tree a
-- -> R.Tree a
-- purge p isTrunkNT root =
-- let x = R.rootLabel root
-- ts = map (purge p isTrunkNT) (R.subForest root)
-- in case p x of
-- False -> R.Node x ts
-- True -> case ts of
-- [t] -> t -- discarding `x` node
-- _ -> case findTrunkChild (map R.rootLabel ts) of
-- Just y -> R.Node y ts -- copying trunk non-terminal `y` in place of `x`
-- -- Just _ -> error "Skladnica.purge: why not, let's fail..."
-- Nothing -> error "Skladnica.purge: don't know what to do..."
-- where
-- -- find the first non-terminal trunk child
-- findTrunkChild = \case
-- [] -> Nothing
-- (x:xs) -> x <$ guard (isTrunkNT x) <|> findTrunkChild xs
-- -- isTrunkNT x = do
-- -- guard $ edgeLabel x == HeadYes
-- -- guard $ case label (nodeLabel x) of
-- -- Left _nonTerm -> True
-- -- Right _term -> False
-------------------------------------------------
-- Conversion
-------------------------------------------------
-- | A full parse forest stored in a form of a directed acyclic graph (DAG)
type DAG = M.Map NID Node
-- | Construct DAG from the list of nodes.
mkDAG :: [Node] -> DAG
mkDAG = M.fromList . map (nid &&& id)
-- | Extract the forest of trees encoded in the given DAG.
forest
:: (Node -> Bool) -- ^ Nodes which do not satisfy the predicate will be ignored
-> NID -- ^ Root node
-> DAG -- ^ The DAG
-> [Tree Node IsHead]
forest nodePred rootID dag =
go rootID HeadYes
where
go i isHead = do
n@Node{..} <- maybeToList $ M.lookup i dag
guard $ nodePred n
if null children then do
-- return $ Tree n []
return $ R.Node (Edge isHead n) []
else do
-- take one of the children alternatives
childrenD <- children
-- for the given children list, determine the
-- corresponding forests
sub <- mapM (uncurry go) childrenD
-- the result
return $ R.Node (Edge isHead n) sub
-- | Print the chosen (simplified) tree represented in the DAG.
printChosen :: DAG -> IO ()
printChosen dag =
let t0 = simplify $ forest chosen 0 dag !! 0
simpLab = T.unpack . either cat orth . label
in putStrLn . R.drawTree . fmap simpLab $ t0
-- -- | A version of `Node` adapted to trees (i.e., it doesn't contain information
-- -- already available in the structure of the tree).
-- data TreeNode = TreeNode
-- { nid :: NID
-- -- ^ ID of the node.
-- , chosen :: Bool
-- , label :: Label
-- , children :: [[(NID, IsHead)]]
-- -- ^ Note potential non-determinism!
-- } deriving (Show, Eq, Ord)
-- -------------------------------------------------
-- -- Utils
-- -------------------------------------------------
--
--
-- -- -- | Obtain non-terminal from source tree's root.
-- -- getNT' :: Tree Node b -> L.Text
-- -- getNT' t =
-- -- case label (rootLabel t) of
-- -- Left (NonTerm{..}) -> cat
-- -- _ -> error "getNT': invalid source tree"
-- --
-- --
-- -- -- | Extract the non-terminal from the label (raise an error if not possible).
-- -- labelNT :: Label -> NonTerm
-- -- labelNT x = case x of
-- -- Left x -> x
-- -- _ -> error "labelNT: not a non-terminal"
-- --
-- --
-- -- -- | Obtain non-terminal from source tree's root.
-- -- getTerm' :: R.Tree (Label, a) -> L.Text
-- -- getTerm' l =
-- -- case fst (R.rootLabel l) of
-- -- Right (Term{..}) -> base
-- -- _ -> error "getT': invalid source tree"
|
kawu/skladnica-parser
|
src/NLP/Skladnica.hs
|
bsd-2-clause
| 10,921 | 0 | 15 | 2,843 | 2,268 | 1,298 | 970 | 181 | 2 |
-- | Display a model in 2D after PCA analysis.
--
-- Principal component analysis extracts from a matrix the "most significant dimensions",
-- that is the dimensions which have the higher correlations with other dimensions. Or at
-- least this is what I understood...
-- We extract the two main principal components from the feature matrix of a model and generate
-- a 2d picture of the most frequent words from the dictionary.
module Display where
import qualified Data.Array.IO as A
-- hmatrix package
import qualified Data.Packed.Matrix as M
import qualified Data.Packed.Vector as V
-- Module containing code for PCA computation
import qualified Numeric.LinearAlgebra.NIPALS as P
import Words
import Model
import Graphics.Rendering.Chart
import Data.Default.Class
import Data.Colour
import Data.Colour.Names
import Control.Lens
-- | Compute 2D mapping of words from a model.
--
-- We first transform the syn0 values of model into a Matrix of doubles
-- then compute 2 first PCA from this matrix. The first 2 PCAs are zipped along with each corresponding
-- word from the vocabulary to produce a vector of tuples with coordinates
pcaAnalysis :: Model -> IO [(String, Double, Double)]
pcaAnalysis m = do
matrix <- toMatrix (numberOfWords m) (modelSize m) (syn0 m)
let (pc1, _ , residual) = P.firstPC matrix
let (pc2, _ , _) = P.firstPC residual
let indexedWords = orderedWords (vocabulary m)
return $ zip3 indexedWords (V.toList pc1) (V.toList pc2)
-- |Draw a chart of the X most frequent words in a model using PCA dimensions.
drawMostFrequentWords :: Int -- ^Limit number of words to display
-> Model -- ^The model to draw frequencies from
-> [(String,Double,Double)] -- ^Result of PCA analysis from model
-> Renderable () -- ^The output from Chart
drawMostFrequentWords limit model vectors = let
points = plot_points_style .~ filledCircles 2 (opaque red)
$ plot_points_values .~ [(x,y) | (_,x,y) <- take limit vectors]
$ def
labels = plot_annotation_values .~ [(x + 0.001,y + 0.001,l) | (l,x,y) <- take limit vectors]
$ def
layout = layout_title .~ "Words Vector Space"
$ layout_plots .~ [toPlot points, toPlot labels]
$ def
in
toRenderable layout
toMatrix :: Int -> Int -> Layer -> IO (M.Matrix Double)
toMatrix r c l = A.getElems l >>= return . M.trans . (r M.>< c)
|
RayRacine/hs-word2vec
|
Display.hs
|
bsd-3-clause
| 2,499 | 0 | 15 | 596 | 527 | 295 | 232 | 35 | 1 |
module Args
( Command(..)
, Options(..)
, argsParser
) where
import Options.Applicative
data Command
= Xml
| XmlListFiles
| Admin
| Cache
| Cms
| Config
| Customer
| Db
| Design
| Dev
| Extension
| Index
| Sys
| Help
deriving Show
--data Verbosity = Normal | Verbose
data Options = Options { optDebug :: Bool
, optVerbose :: Bool
, optRoot :: String
, subCommand :: Command
} deriving Show
withInfo :: Parser a -> String -> ParserInfo a
withInfo opts desc = info (helper <*> opts) $ progDesc desc
makeCommand :: String -> String -> Parser a -> Mod CommandFields a
makeCommand cmd desc p = command cmd (p `withInfo` desc)
parseCommand :: Parser Command
parseCommand = subparser $ xmlCmd <> helpCmd
where
xmlCmd = makeCommand "xml" "Do xml stuff" $
subparser $ mconcat [ makeCommand "list-files" "List XML files" (pure XmlListFiles) ]
helpCmd = makeCommand "help" "Halp!" (pure Help)
parseOptions :: Parser Options
parseOptions = Options
<$> debugOpt
<*> verboseOpt
<*> rootOpt
<*> parseCommand
where
debugOpt = switch $
long "debug" <> short 'd' <> help "Debug mode"
verboseOpt = switch $
long "verbose" <> short 'v' <> help "Verbose mode"
rootOpt = strOption $
long "root" <> short 'r' <> metavar "ROOT" <> value "./"
parser :: ParserInfo Options
parser = info (helper <*> parseOptions) $
mconcat [fullDesc, progDesc "Do Magento stuff", header "v0.0.1"]
argsParser :: (Options -> IO ()) -> IO ()
argsParser cmd = customExecParser (prefs showHelpOnError) parser >>= cmd
|
dxtr/hagento
|
src/Args.hs
|
bsd-3-clause
| 1,666 | 0 | 12 | 445 | 499 | 264 | 235 | 52 | 1 |
{-#LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Text.Aztex.Processing where
import qualified Data.Map as Map
import Control.Monad.RWS
import Control.Applicative
import Text.Aztex.Helpers
import Text.Aztex.Types
-- Could use another data type?
expand :: Aztex -> RWS AztexStyle AztexError AztexState Aztex
expand Empty = return Empty
expand Whitespace = return Whitespace
expand EOL = return EOL
expand (CommandBlock aztex) = CommandBlock <$> expand aztex
expand (TextBlock aztex) = TextBlock <$> expand aztex
expand (MathBlock aztex) = MathBlock <$> expand aztex
expand (Binding name fcn) = do
st <- get
put $ st{bindings = Map.insert name fcn (bindings st)}
return Empty
expand (CallBinding name args) = do
st <- get
case Map.lookup name (bindings st) of
Nothing -> tell ["Identifier " ++ name ++ " used out of scope."] >> return Empty
Just (AztexFunction argNames fcnBody) -> do
-- Evaluate all arguments.
expandedArgs :: [Aztex] <- mapM expand args
-- Bind local arguments for this function's body.
-- Note: Map.union is left-biased.
let bindingsWithLocal = Map.union (Map.fromList $ zip argNames $ map aztexToFunction expandedArgs) (bindings st)
{-
let bindingsWithLocal = Map.union (Map.fromList $ uncurry makeBinding <$> zip argNames args) (bindings st)
makeBinding bindingName arg = case Map.lookup bindingName (bindings st) of
Nothing -> (bindingName, aztexToFunction arg)
Just alreadyBound -> (bindingName, alreadyBound)
-}
put $ st{bindings = bindingsWithLocal}
fcnResult <- expand fcnBody
-- Pop bindings.
st' <- get
put st'{bindings = bindings st}
return fcnResult
expand (Block l) = Block <$> mapM expand l
expand (Import imp) = do
st <- get
put $ st {bindings = Map.union (bindings st) imp}
return Empty
expand t@(Token _) = return t
expand (Parens a) = Parens <$> expand a
expand (Brackets a) = Brackets <$> expand a
expand (Subscript a) = Subscript <$> expand a
expand (Superscript a) = Superscript <$> expand a
expand (Quoted a) = return $ Quoted a
expand a@(ImplicitModeSwitch _) = return a
expand (TitlePage title_a author_a) = do
st <- get
title_l <- expand title_a
author_l <- expand author_a
put st {titlePage = Just (title_l, author_l)}
return Empty
|
nelk/aztex-compiler
|
src/Text/Aztex/Processing.hs
|
bsd-3-clause
| 2,452 | 0 | 19 | 598 | 717 | 350 | 367 | 48 | 2 |
-- | Положение в исходном коде и в промежуточном представлении
module VSim.Data.Loc where
import Data.Maybe
import VSim.Data.Line (Line(..))
-- | Положение в сорце (и в промежуточном формате)
data Loc
= Loc
{ locSrcLine :: !(Maybe Line)
, locLine :: !Int
, locStartChar :: !Int
, locEndChar :: !Int
, locFn :: !String
}
deriving (Show, Eq, Ord)
unknownLoc = Loc Nothing 0 0 0 "<unknown>"
data WithLoc a
= WithLoc
{ withLocLoc :: Loc
, withLocVal :: a
}
deriving Show
instance Functor WithLoc where
fmap f (WithLoc l x) = WithLoc l (f x)
getLocLine loc = fromMaybe (getLocIrLine loc) (locSrcLine loc)
getLocIrLine loc = Line (locFn loc) (locLine loc) (locStartChar loc)
|
ierton/vsim
|
src/VSim/Data/Loc.hs
|
bsd-3-clause
| 850 | 0 | 11 | 207 | 230 | 126 | 104 | 31 | 1 |
{-# LANGUAGE CPP #-}
import Control.Monad
import Data.IORef
import Distribution.Simple
import Distribution.Simple.InstallDirs as I
import Distribution.Simple.LocalBuildInfo as L
import qualified Distribution.Simple.Setup as S
import qualified Distribution.Simple.Program as P
import Distribution.PackageDescription
import Distribution.Text
import System.Exit
import System.FilePath ((</>), splitDirectories)
import System.Directory
import qualified System.FilePath.Posix as Px
import System.Process
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
-- After Idris is built, we need to check and install the prelude and other libs
make verbosity = P.runProgramInvocation verbosity . P.simpleProgramInvocation "make"
mvn verbosity = P.runProgramInvocation verbosity . P.simpleProgramInvocation "mvn"
#ifdef mingw32_HOST_OS
-- make on mingw32 exepects unix style separators
(<//>) = (Px.</>)
idrisCmd local = Px.joinPath $ splitDirectories $
".." <//> buildDir local <//> "idris" <//> "idris"
rtsDir local = Px.joinPath $ splitDirectories $
".." <//> buildDir local <//> "rts" <//> "libidris_rts"
#else
idrisCmd local = ".." </> buildDir local </> "idris" </> "idris"
rtsDir local = ".." </> buildDir local </> "rts" </> "libidris_rts"
#endif
cleanStdLib verbosity
= do make verbosity [ "-C", "lib", "clean", "IDRIS=idris" ]
make verbosity [ "-C", "effects", "clean", "IDRIS=idris" ]
make verbosity [ "-C", "javascript", "clean", "IDRIS=idris" ]
cleanJavaLib verbosity
= do dirty <- doesDirectoryExist ("java" </> "target")
when dirty $ mvn verbosity [ "-f", "java/pom.xml", "clean" ]
pomExists <- doesFileExist ("java" </> "pom.xml")
when pomExists $ removeFile ("java" </> "pom.xml")
execPomExists <- doesFileExist ("java" </> "executable_pom.xml")
when pomExists $ removeFile ("java" </> "executable_pom.xml")
installStdLib pkg local withoutEffects verbosity copy
= do let dirs = L.absoluteInstallDirs pkg local copy
let idir = datadir dirs
let icmd = idrisCmd local
putStrLn $ "Installing libraries in " ++ idir
make verbosity
[ "-C", "lib", "install"
, "TARGET=" ++ idir
, "IDRIS=" ++ icmd
]
unless withoutEffects $
make verbosity
[ "-C", "effects", "install"
, "TARGET=" ++ idir
, "IDRIS=" ++ icmd
]
make verbosity
[ "-C", "javascript", "install"
, "TARGET=" ++ idir
, "IDRIS=" ++ icmd
]
let idirRts = idir </> "rts"
putStrLn $ "Installing run time system in " ++ idirRts
make verbosity
[ "-C", "rts", "install"
, "TARGET=" ++ idirRts
, "IDRIS=" ++ icmd
]
installJavaLib pkg local verbosity copy version = do
let rtsFile = "idris-" ++ display version ++ ".jar"
putStrLn $ "Installing java libraries"
mvn verbosity [ "install:install-file"
, "-Dfile=" ++ ("java" </> "target" </> rtsFile)
, "-DgroupId=org.idris-lang"
, "-DartifactId=idris"
, "-Dversion=" ++ display version
, "-Dpackaging=jar"
, "-DgeneratePom=True"
]
let dir = datadir $ L.absoluteInstallDirs pkg local copy
copyFile ("java" </> "executable_pom.xml") (dir </> "executable_pom.xml")
-- This is a hack. I don't know how to tell cabal that a data file needs
-- installing but shouldn't be in the distribution. And it won't make the
-- distribution if it's not there, so instead I just delete
-- the file after configure.
removeLibIdris local verbosity
= do let icmd = idrisCmd local
make verbosity
[ "-C", "rts", "clean"
, "IDRIS=" ++ icmd
]
checkStdLib local withoutEffects verbosity
= do let icmd = idrisCmd local
putStrLn $ "Building libraries..."
make verbosity
[ "-C", "lib", "check"
, "IDRIS=" ++ icmd
]
unless withoutEffects $
make verbosity
[ "-C", "effects", "check"
, "IDRIS=" ++ icmd
]
make verbosity
[ "-C", "javascript", "check"
, "IDRIS=" ++ icmd
]
make verbosity
[ "-C", "rts", "check"
, "IDRIS=" ++ icmd
]
checkJavaLib verbosity = mvn verbosity [ "-f", "java" </> "pom.xml", "package" ]
javaFlag flags =
case lookup (FlagName "java") (S.configConfigurationsFlags flags) of
Just True -> True
Just False -> False
Nothing -> False
noEffectsFlag flags =
case lookup (FlagName "noeffects") (S.configConfigurationsFlags flags) of
Just True -> True
Just False -> False
Nothing -> False
preparePoms version
= do pomTemplate <- TIO.readFile ("java" </> "pom_template.xml")
TIO.writeFile ("java" </> "pom.xml") (insertVersion pomTemplate)
execPomTemplate <- TIO.readFile ("java" </> "executable_pom_template.xml")
TIO.writeFile ("java" </> "executable_pom.xml") (insertVersion execPomTemplate)
where
insertVersion template =
T.replace (T.pack "$RTS-VERSION$") (T.pack $ display version) template
-- Install libraries during both copy and install
-- See http://hackage.haskell.org/trac/hackage/ticket/718
main = do
defaultMainWithHooks $ simpleUserHooks
{ postCopy = \ _ flags pkg lbi -> do
let verb = S.fromFlag $ S.copyVerbosity flags
let withoutEffects = noEffectsFlag $ configFlags lbi
installStdLib pkg lbi withoutEffects verb
(S.fromFlag $ S.copyDest flags)
, postInst = \ _ flags pkg lbi -> do
let verb = (S.fromFlag $ S.installVerbosity flags)
let withoutEffects = noEffectsFlag $ configFlags lbi
installStdLib pkg lbi withoutEffects verb
NoCopyDest
when (javaFlag $ configFlags lbi)
(installJavaLib pkg
lbi
verb
NoCopyDest
(pkgVersion . package $ localPkgDescr lbi)
)
, postConf = \ _ flags _ lbi -> do
removeLibIdris lbi (S.fromFlag $ S.configVerbosity flags)
when (javaFlag $ configFlags lbi)
(preparePoms . pkgVersion . package $ localPkgDescr lbi)
, postClean = \ _ flags _ _ -> do
let verb = S.fromFlag $ S.cleanVerbosity flags
cleanStdLib verb
cleanJavaLib verb
, postBuild = \ _ flags _ lbi -> do
let verb = S.fromFlag $ S.buildVerbosity flags
let withoutEffects = noEffectsFlag $ configFlags lbi
checkStdLib lbi withoutEffects verb
when (javaFlag $ configFlags lbi) (checkJavaLib verb)
}
|
byorgey/Idris-dev
|
Setup.hs
|
bsd-3-clause
| 7,210 | 0 | 19 | 2,325 | 1,671 | 853 | 818 | 138 | 3 |
{-# LANGUAGE TypeSynonymInstances, MultiParamTypeClasses, FlexibleInstances#-}
module Course.DataTypes where
import Data.Monoid hiding ((<>))
{- This is the haskell code for purely functional data types -}
-- Queues
-- the queue has two part to record the used part `runF` and unused part `runR`, Queue [1,2,3] [6,5,4] is an example, watch out the right part is reverse
data Queue a = Queue {runF :: [a]
,runR :: [a]}
qhead :: Queue a -> a
qhead (Queue (a : _) _) = a
qhead (Queue [] _) = error "right part is empty"
qtail :: Queue a -> Queue a
qtail (Queue [] _) = error "right part is empty"
qtail (Queue [_] b) = Queue b []
qtail (Queue (_ : a) b) = Queue a b
qsnoc :: Queue a -> a -> Queue a
qsnoc (Queue [] _) x = Queue [x] []
qsnoc (Queue r l) x = Queue r (x : l) -- qsnoc is `cons` spelled backward
-- 2~3 finger tree
-- type Size = Int
-- data Tree v a = Leaf v a | Branch v (Tree v a) (Tree v a) deriving Show
-- tag :: Tree v a -> v
-- tag (Leaf v a) = v
-- tag (Branch v _ _) = v
-- instance Monoid Size where
-- mempty = 0
-- mappend = (+)
-- class Monoid v => Measured a v where
-- measure :: a -> v
-- instance Measured a Size where
-- measure _ = 1
-- instance Measured a v => Measured (Tree v a) v where
-- measure = tag
-- x <> y = tag x `mappend` tag y
-- leaf :: Measured a v => a -> Tree v a
-- leaf a = Leaf (measure a) a
-- branch :: (Monoid v) => Tree v a -> Tree v a -> Tree v a
-- branch x y = Branch (x <> y) x y
|
niexshao/AlgorithmEx
|
src/Course/DataTypes.hs
|
bsd-3-clause
| 1,473 | 0 | 9 | 359 | 297 | 167 | 130 | 15 | 1 |
module Language.Java.Paragon.NameResolution.ExpansionRecord
(
-- * The @Expansion@ mapping
ExpansionRecord(..)
-- * Functionality
, emptyExpansionRecord
, expandAll
) where
import Language.Java.Paragon.Syntax
import Language.Java.Paragon.NameResolution.Expansion
-- | A record for collecting various types of idents that need to be expanded.
data ExpansionRecord = ExpansionRecord
{ expandTypes :: [Id]
, expandMethods :: [Id]
, expandLocks :: [Id]
, expandExps :: [Id]
}
-- | Empty expansion record.
emptyExpansionRecord :: ExpansionRecord
emptyExpansionRecord = ExpansionRecord
{ expandTypes = []
, expandMethods = []
, expandLocks = []
, expandExps = []
}
-- | Perform expansion of all the idents in the expansion record.
expandAll :: ExpansionRecord -> Expansion
expandAll rec = expansionUnion $
map (mkExpExpansion . idIdent) (expandExps rec) ++
map (mkLockExpansion . idIdent) (expandLocks rec) ++
map (mkMethodExpansion . idIdent) (expandMethods rec) ++
map (mkTypeExpansion . idIdent) (expandTypes rec)
|
bvdelft/paragon
|
src/Language/Java/Paragon/NameResolution/ExpansionRecord.hs
|
bsd-3-clause
| 1,086 | 0 | 11 | 210 | 238 | 142 | 96 | 24 | 1 |
module Sodium (translate) where
import Sodium.Chloride.Vectorize (vectorize)
import Sodium.Chloride.Flatten (flatten)
import Sodium.Chloride.JoinMultiIf (joinMultiIf)
import Sodium.Chloride.IOMagic (uncurse)
import Sodium.Chloride.Inline (inline)
import Sodium.Chloride.FoldMatch (foldMatch)
import Sodium.Chloride.Side (side)
import Sodium.Pascal.Parse (parse)
import Sodium.Haskell.Render (render)
import qualified Sodium.Pascal.Convert as P (convert)
import qualified Sodium.Haskell.Convert as H (convert)
import Data.Profunctor
translate :: String -> String
translate = dimap fromPascal toHaskell onChloride where
fromPascal = P.convert . parse
toHaskell = render . H.convert
onChloride = dimap onScalar onVector vectorize
onScalar = uncurse . fff (flatten . joinMultiIf) . side
onVector = fff (inline . foldMatch)
fff f = (!!42) . iterate f
|
kirbyfan64/sodium
|
src/Sodium.hs
|
bsd-3-clause
| 870 | 0 | 11 | 120 | 255 | 152 | 103 | 21 | 1 |
{-# LANGUAGE
EmptyDataDecls
, FlexibleContexts
, FlexibleInstances
, FunctionalDependencies
, MultiParamTypeClasses
, TypeFamilies
, TypeSynonymInstances
, UndecidableInstances #-}
module Control.Monad.Code.Class.Typed
( module Data.ClassFile.Desc.Typed
, MonadCode (..)
, ldc
) where
import qualified Control.Monad.Indexed as Indexed
import Data.ClassFile.Desc.Typed
import Data.Int hiding (Int)
import Data.Word
import Prelude hiding (Double, Float, Int, return)
import qualified Prelude
data Z
data S a
type One = S Z
type Two = S One
type Three = S Two
type Four = S Three
class Category a b | a -> b
instance Category Int One
instance Category Long Two
instance Category Float One
instance Category Double Two
instance Category ReturnAddress One
instance Category Reference One
class Add2 a b c | a b -> c, c a -> b
instance Add2 Z a a
instance Add2 a b c => Add2 (S a) b (S c)
class Add a b c | a b -> c, b c -> a, c a -> b
instance (Add2 a b c, Add2 b a c) => Add a b c
class Subtract2 a b c | a b -> c, b c -> a
instance Subtract2 a Z a
instance Subtract2 a b c => Subtract2 (S a) (S b) c
class Subtract a b c | a b -> c, b c -> a, c a -> b
instance (Subtract2 a b c, Add b c a) => Subtract a b c
class Take a b c | a b -> c
instance Take Z xs ()
instance ( Category x cat
, Subtract (S n) cat n'
, Take n' xs ys
) => Take (S n) (x, xs) (x, ys)
class Concat a b c | a b -> c, c a -> b
instance Concat () xs xs
instance Concat xs ys xs' => Concat (x, xs) ys (x, xs')
class Drop a b c | a b -> c
instance Drop Z xs xs
instance ( Category x cat
, Subtract (S n) cat n'
, Drop n' xs ys
) => Drop (S n) (x, xs) ys
class ParameterDesc a => Pop a b c | a b -> c
instance Pop () a a
instance Pop Int (Int, a) a
instance Pop Long (Long, a) a
instance Pop Float (Float, a) a
instance Pop Double (Double, a) a
instance Pop Reference (Reference, a) a
instance ParameterDesc (a, b) => Pop (a, b) (a, (b, c)) c
class ReturnDesc a => Push a b c | a b -> c, b c -> a, c a -> b
instance Push Int a (Int, a)
instance Push Long a (Long, a)
instance Push Float a (Float, a)
instance Push Double a (Double, a)
instance Push Reference a (Reference, a)
instance Push Void a a
class ReturnAddressOrReference a
instance ReturnAddressOrReference ReturnAddress
instance ReturnAddressOrReference Reference
type Operation m p q = m p q (Label m p)
class Indexed.Monad m => MonadCode m where
data ArrayType m
type Label m :: * -> *
boolean :: ArrayType m
char :: ArrayType m
float :: ArrayType m
double :: ArrayType m
byte :: ArrayType m
short :: ArrayType m
int :: ArrayType m
long :: ArrayType m
aaload :: Operation m (Int, (Reference, xs)) (Reference, xs)
aastore :: Operation m (Reference, (Int, (Reference, xs))) xs
aconst_null :: Operation m xs (Reference, xs)
aload :: Word16 -> Operation m xs (Reference, xs)
anewarray :: String -> Operation m (Int, xs) (Reference, xs)
areturn :: Operation m (Reference, xs) xs
arraylength :: Operation m (Reference, xs) (Int, xs)
astore :: ReturnAddressOrReference x =>
Word16 ->
Operation m (x, xs) xs
athrow :: Operation m (Reference, xs) xs
baload :: Operation m (Int, (Reference, xs)) (Int, xs)
bastore :: Operation m (Int, (Int, (Reference, xs))) xs
checkcast :: String -> Operation m (Reference, xs) (Reference, xs)
dup :: ( Take One xs x
, Concat x xs ys
) => Operation m xs ys
dup_x1 :: ( Take One xs x
, Take Two xs y
, Drop Two xs xs'
, Concat x xs' xs''
, Concat y xs'' ys
) => Operation m xs ys
dup_x2 :: ( Take One xs x
, Take Three xs y
, Drop Three xs xs'
, Concat x xs' xs''
, Concat y xs'' ys
) => Operation m xs ys
dup2 :: ( Take Two xs x
, Concat x xs ys
) => Operation m xs ys
dup2_x1 :: ( Take Two xs x
, Take Three xs y
, Drop Three xs xs'
, Concat x xs' xs''
, Concat y xs'' ys
) => Operation m xs ys
dup2_x2 :: ( Take Two xs x
, Take Four xs y
, Drop Four xs xs'
, Concat x xs' xs''
, Concat y xs'' ys
) => Operation m xs ys
getfield :: ( FieldDesc x
, Pop Reference xs xs'
, Push x xs' ys
) =>
String ->
String ->
x ->
Operation m xs ys
getstatic :: ( FieldDesc x
, Push x xs ys
) =>
String ->
String ->
x ->
Operation m xs ys
goto :: Label m xs -> Operation m xs xs
i2b :: Operation m (Int, xs) (Int, xs)
iadd :: Operation m (Int, (Int, xs)) (Int, xs)
iinc :: Word16 -> Int32 -> Operation m xs xs
ifeq :: Label m xs -> Operation m (Int, xs) xs
ifne :: Label m xs -> Operation m (Int, xs) xs
iload :: Word16 -> Operation m xs (Int, xs)
invokeinterface :: ( ParameterDesc parameters
, ReturnDesc return
, Pop parameters xs xs'
, Pop Reference xs' xs''
, Push return xs'' ys
) =>
String ->
String ->
parameters ->
return ->
Operation m xs ys
invokespecial :: ( ParameterDesc parameters
, ReturnDesc return
, Pop parameters xs xs'
, Pop Reference xs' xs''
, Push return xs'' ys
) =>
String ->
String ->
parameters ->
return ->
Operation m xs ys
invokestatic :: ( ParameterDesc parameters
, ReturnDesc return
, Pop parameters xs xs'
, Push return xs' ys
) =>
String ->
String ->
parameters ->
return ->
Operation m xs ys
invokevirtual :: ( ParameterDesc parameters
, ReturnDesc return
, Pop parameters xs xs'
, Pop Reference xs' xs''
, Push return xs'' ys
) =>
String ->
String ->
parameters ->
return ->
Operation m xs ys
istore :: Word16 -> Operation m (Int, xs) xs
isub :: Operation m (Int, (Int, xs)) (Int, xs)
ldcInt :: Int32 -> Operation m xs (Int, xs)
ldcFloat :: Prelude.Float -> Operation m xs (Float, xs)
ldcString :: String -> Operation m xs (Reference, xs)
ldcClass :: String -> Operation m xs (Reference, xs)
ldcLong :: Int64 -> Operation m xs (Long, xs)
ldcDouble :: Prelude.Double -> Operation m xs (Double, xs)
new :: String -> Operation m xs (Reference, xs)
newarray :: ArrayType m -> Operation m (Int, xs) (Reference, xs)
nop :: Operation m xs xs
pop :: Category x One => Operation m (x, xs) xs
return :: Operation m xs xs
swap :: ( Category x One
, Category y One
) => Operation m (x, (y, xs)) (y, (x, xs))
class MonadCode m => Ldc a b m | a -> b, b -> a where
ldc :: b -> Operation m xs (a, xs)
instance MonadCode m => Ldc Int Int32 m where
ldc = ldcInt
instance MonadCode m => Ldc Float Prelude.Float m where
ldc = ldcFloat
instance MonadCode m => Ldc Reference String m where
ldc = ldcString
instance MonadCode m => Ldc Long Int64 m where
ldc = ldcLong
instance MonadCode m => Ldc Double Prelude.Double m where
ldc = ldcDouble
|
sonyandy/tnt
|
Control/Monad/Code/Class/Typed.hs
|
bsd-3-clause
| 7,896 | 0 | 12 | 2,891 | 2,867 | 1,541 | 1,326 | -1 | -1 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
module Floskell.Styles ( Style(..), styles ) where
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Text ( Text )
import Floskell.Config
-- | A printer style.
data Style =
Style { styleName :: !Text -- ^ Name of the style, used in the commandline interface.
, styleAuthor :: !Text -- ^ Author of the style definition.
, styleDescription :: !Text -- ^ Description of the style.
, styleConfig :: !Config -- ^ Style definition.
}
chrisDoneCfg :: Config
chrisDoneCfg =
defaultConfig { cfgIndent, cfgLayout, cfgOp, cfgGroup, cfgOptions }
where
cfgIndent =
IndentConfig { cfgIndentOnside = 2
, cfgIndentDeriving = 2
, cfgIndentWhere = 2
, cfgIndentApp = Align
, cfgIndentCase = IndentBy 2
, cfgIndentClass = IndentBy 2
, cfgIndentDo = Align
, cfgIndentIf = IndentBy 3
, cfgIndentLet = Align
, cfgIndentLetBinds = Align
, cfgIndentLetIn = Align
, cfgIndentMultiIf = IndentBy 2
, cfgIndentTypesig = Align
, cfgIndentWhereBinds = Align
, cfgIndentExportSpecList = IndentBy 2
, cfgIndentImportSpecList = AlignOrIndentBy 7
}
cfgLayout = LayoutConfig { cfgLayoutApp = TryOneline
, cfgLayoutConDecls = Vertical
, cfgLayoutDeclaration = TryOneline
, cfgLayoutExportSpecList = TryOneline
, cfgLayoutIf = Vertical
, cfgLayoutImportSpecList = Flex
, cfgLayoutInfixApp = TryOneline
, cfgLayoutLet = Vertical
, cfgLayoutListComp = Flex
, cfgLayoutRecord = Vertical
, cfgLayoutType = TryOneline
}
cfgOp =
OpConfig ConfigMap { cfgMapDefault = Whitespace WsBoth WsBefore False
, cfgMapOverrides = Map.fromList opWsOverrides
}
opWsOverrides =
[ (ConfigMapKey (Just ",") Nothing, Whitespace WsNone WsBefore False)
, ( ConfigMapKey (Just "record") Nothing
, Whitespace WsAfter WsNone False
)
, ( ConfigMapKey (Just ".") (Just Type)
, Whitespace WsAfter WsAfter False
)
, (ConfigMapKey (Just "=") Nothing, Whitespace WsBoth WsAfter False)
, (ConfigMapKey (Just "<-") Nothing, Whitespace WsBoth WsAfter False)
, (ConfigMapKey (Just ":") Nothing, Whitespace WsNone WsBefore False)
]
cfgGroup =
GroupConfig ConfigMap { cfgMapDefault =
Whitespace WsNone WsNone False
, cfgMapOverrides = Map.fromList groupWsOverrides
}
groupWsOverrides = []
cfgOptions = OptionConfig { cfgOptionSortPragmas = False
, cfgOptionSplitLanguagePragmas = False
, cfgOptionSortImports = NoImportSort
, cfgOptionSortImportLists = False
, cfgOptionAlignSumTypeDecl = True
, cfgOptionFlexibleOneline = False
, cfgOptionPreserveVerticalSpace = False
, cfgOptionDeclNoBlankLines = Set.empty
, cfgOptionAlignLetBindsAndInExpr = False
}
cramerCfg :: Config
cramerCfg = defaultConfig { cfgAlign
, cfgIndent
, cfgLayout
, cfgOp
, cfgGroup
, cfgOptions
}
where
cfgAlign = AlignConfig { cfgAlignLimits = (10, 25)
, cfgAlignCase = False
, cfgAlignClass = False
, cfgAlignImportModule = True
, cfgAlignImportSpec = True
, cfgAlignLetBinds = False
, cfgAlignMatches = False
, cfgAlignRecordFields = True
, cfgAlignWhere = False
}
cfgIndent =
IndentConfig { cfgIndentOnside = 4
, cfgIndentDeriving = 4
, cfgIndentWhere = 2
, cfgIndentApp = Align
, cfgIndentCase = IndentBy 4
, cfgIndentClass = IndentBy 4
, cfgIndentDo = IndentBy 4
, cfgIndentIf = Align
, cfgIndentLet = Align
, cfgIndentLetBinds = Align
, cfgIndentLetIn = IndentBy 4
, cfgIndentMultiIf = IndentBy 4
, cfgIndentTypesig = Align
, cfgIndentWhereBinds = IndentBy 2
, cfgIndentExportSpecList = IndentBy 4
, cfgIndentImportSpecList = AlignOrIndentBy 17
}
cfgLayout = LayoutConfig { cfgLayoutApp = TryOneline
, cfgLayoutConDecls = TryOneline
, cfgLayoutDeclaration = Flex
, cfgLayoutExportSpecList = TryOneline
, cfgLayoutIf = TryOneline
, cfgLayoutImportSpecList = Flex
, cfgLayoutInfixApp = Flex
, cfgLayoutLet = TryOneline
, cfgLayoutListComp = TryOneline
, cfgLayoutRecord = TryOneline
, cfgLayoutType = TryOneline
}
cfgOp =
OpConfig ConfigMap { cfgMapDefault = Whitespace WsBoth WsBefore False
, cfgMapOverrides = Map.fromList opWsOverrides
}
opWsOverrides =
[ (ConfigMapKey (Just ",") Nothing, Whitespace WsAfter WsBefore False)
, ( ConfigMapKey (Just "record") Nothing
, Whitespace WsAfter WsNone False
)
, ( ConfigMapKey (Just ".") (Just Type)
, Whitespace WsAfter WsAfter False
)
, (ConfigMapKey (Just "=") Nothing, Whitespace WsBoth WsAfter False)
, (ConfigMapKey (Just "$") Nothing, Whitespace WsBoth WsAfter False)
, (ConfigMapKey (Just "@") Nothing, Whitespace WsNone WsNone False)
, ( ConfigMapKey (Just "->") (Just Expression)
, Whitespace WsBoth WsAfter False
)
, ( ConfigMapKey (Just "record") (Just Pattern)
, Whitespace WsNone WsNone False
)
]
cfgGroup =
GroupConfig ConfigMap { cfgMapDefault =
Whitespace WsBoth WsAfter False
, cfgMapOverrides = Map.fromList groupWsOverrides
}
groupWsOverrides =
[ (ConfigMapKey Nothing (Just Type), Whitespace WsNone WsAfter False)
, ( ConfigMapKey Nothing (Just Pattern)
, Whitespace WsNone WsAfter False
)
, (ConfigMapKey (Just "$(") Nothing, Whitespace WsNone WsNone False)
, (ConfigMapKey (Just "[|") Nothing, Whitespace WsNone WsNone False)
, (ConfigMapKey (Just "[d|") Nothing, Whitespace WsNone WsNone False)
, (ConfigMapKey (Just "[p|") Nothing, Whitespace WsNone WsNone False)
, (ConfigMapKey (Just "[t|") Nothing, Whitespace WsNone WsNone False)
, (ConfigMapKey (Just "(") Nothing, Whitespace WsNone WsAfter False)
, ( ConfigMapKey (Just "(") (Just Other)
, Whitespace WsBoth WsAfter False
)
, ( ConfigMapKey (Just "[") (Just Pattern)
, Whitespace WsBoth WsAfter False
)
, (ConfigMapKey (Just "[") (Just Type), Whitespace WsNone WsNone False)
]
cfgOptions =
OptionConfig { cfgOptionSortPragmas = True
, cfgOptionSplitLanguagePragmas = True
, cfgOptionSortImports = SortImportsByPrefix
, cfgOptionSortImportLists = True
, cfgOptionAlignSumTypeDecl = False
, cfgOptionFlexibleOneline = False
, cfgOptionPreserveVerticalSpace = True
, cfgOptionDeclNoBlankLines = Set.empty
, cfgOptionAlignLetBindsAndInExpr = False
}
gibianskyCfg :: Config
gibianskyCfg = defaultConfig { cfgAlign
, cfgIndent
, cfgLayout
, cfgOp
, cfgGroup
, cfgOptions
}
where
cfgAlign = AlignConfig { cfgAlignLimits = (10, 25)
, cfgAlignCase = True
, cfgAlignClass = False
, cfgAlignImportModule = True
, cfgAlignImportSpec = False
, cfgAlignLetBinds = False
, cfgAlignMatches = False
, cfgAlignRecordFields = False
, cfgAlignWhere = False
}
cfgIndent =
IndentConfig { cfgIndentOnside = 2
, cfgIndentDeriving = 2
, cfgIndentWhere = 2
, cfgIndentApp = IndentBy 2
, cfgIndentCase = IndentBy 2
, cfgIndentClass = IndentBy 2
, cfgIndentDo = IndentBy 2
, cfgIndentIf = Align
, cfgIndentLet = Align
, cfgIndentLetBinds = Align
, cfgIndentLetIn = Align
, cfgIndentMultiIf = IndentBy 2
, cfgIndentTypesig = Align
, cfgIndentWhereBinds = IndentBy 2
, cfgIndentExportSpecList = IndentBy 4
, cfgIndentImportSpecList = Align
}
cfgLayout = LayoutConfig { cfgLayoutApp = TryOneline
, cfgLayoutConDecls = Vertical
, cfgLayoutDeclaration = Flex
, cfgLayoutExportSpecList = TryOneline
, cfgLayoutIf = Vertical
, cfgLayoutImportSpecList = Flex
, cfgLayoutInfixApp = TryOneline
, cfgLayoutLet = Vertical
, cfgLayoutListComp = TryOneline
, cfgLayoutRecord = TryOneline
, cfgLayoutType = TryOneline
}
cfgOp =
OpConfig ConfigMap { cfgMapDefault = Whitespace WsBoth WsBefore False
, cfgMapOverrides = Map.fromList opWsOverrides
}
opWsOverrides =
[ (ConfigMapKey (Just ",") Nothing, Whitespace WsAfter WsBefore False)
, ( ConfigMapKey (Just "record") Nothing
, Whitespace WsAfter WsNone False
)
, ( ConfigMapKey (Just ".") (Just Type)
, Whitespace WsAfter WsAfter False
)
, (ConfigMapKey (Just "=") Nothing, Whitespace WsBoth WsAfter False)
, (ConfigMapKey (Just ":") Nothing, Whitespace WsNone WsBefore False)
]
cfgGroup =
GroupConfig ConfigMap { cfgMapDefault =
Whitespace WsNone WsNone False
, cfgMapOverrides = Map.fromList groupWsOverrides
}
groupWsOverrides =
[ (ConfigMapKey (Just "{") Nothing, Whitespace WsBoth WsAfter False) ]
cfgOptions = OptionConfig { cfgOptionSortPragmas = False
, cfgOptionSplitLanguagePragmas = False
, cfgOptionSortImports = NoImportSort
, cfgOptionSortImportLists = False
, cfgOptionAlignSumTypeDecl = False
, cfgOptionFlexibleOneline = False
, cfgOptionPreserveVerticalSpace = False
, cfgOptionDeclNoBlankLines = Set.empty
, cfgOptionAlignLetBindsAndInExpr = False
}
johanTibellCfg :: Config
johanTibellCfg =
defaultConfig { cfgIndent, cfgLayout, cfgOp, cfgGroup, cfgOptions }
where
cfgIndent =
IndentConfig { cfgIndentOnside = 4
, cfgIndentDeriving = 4
, cfgIndentWhere = 2
, cfgIndentApp = IndentBy 4
, cfgIndentCase = IndentBy 4
, cfgIndentClass = IndentBy 4
, cfgIndentDo = IndentBy 4
, cfgIndentIf = IndentBy 4
, cfgIndentLet = Align
, cfgIndentLetBinds = Align
, cfgIndentLetIn = Align
, cfgIndentMultiIf = IndentBy 2
, cfgIndentTypesig = Align
, cfgIndentWhereBinds = IndentBy 2
, cfgIndentExportSpecList = IndentBy 2
, cfgIndentImportSpecList = AlignOrIndentBy 7
}
cfgLayout = LayoutConfig { cfgLayoutApp = TryOneline
, cfgLayoutConDecls = Vertical
, cfgLayoutDeclaration = TryOneline
, cfgLayoutExportSpecList = TryOneline
, cfgLayoutIf = Vertical
, cfgLayoutImportSpecList = Flex
, cfgLayoutInfixApp = TryOneline
, cfgLayoutLet = Vertical
, cfgLayoutListComp = Flex
, cfgLayoutRecord = Vertical
, cfgLayoutType = TryOneline
}
cfgOp =
OpConfig ConfigMap { cfgMapDefault = Whitespace WsBoth WsBefore False
, cfgMapOverrides = Map.fromList opWsOverrides
}
opWsOverrides =
[ (ConfigMapKey (Just ",") Nothing, Whitespace WsAfter WsBefore False)
, ( ConfigMapKey (Just "record") Nothing
, Whitespace WsAfter WsAfter True
)
, ( ConfigMapKey (Just ".") (Just Type)
, Whitespace WsAfter WsAfter False
)
, (ConfigMapKey (Just "=") Nothing, Whitespace WsBoth WsAfter False)
, ( ConfigMapKey (Just ":") (Just Pattern)
, Whitespace WsNone WsBefore False
)
, ( ConfigMapKey (Just ",") (Just Pattern)
, Whitespace WsNone WsBefore False
)
, ( ConfigMapKey (Just ",") (Just Other)
, Whitespace WsNone WsBefore False
)
, ( ConfigMapKey (Just "record") (Just Pattern)
, Whitespace WsAfter WsAfter False
)
]
cfgGroup =
GroupConfig ConfigMap { cfgMapDefault =
Whitespace WsNone WsNone False
, cfgMapOverrides = Map.fromList groupWsOverrides
}
groupWsOverrides =
[ (ConfigMapKey (Just "{") Nothing, Whitespace WsBoth WsAfter False)
, ( ConfigMapKey (Just "{") (Just Pattern)
, Whitespace WsNone WsNone False
)
]
cfgOptions = OptionConfig { cfgOptionSortPragmas = False
, cfgOptionSplitLanguagePragmas = False
, cfgOptionSortImports = NoImportSort
, cfgOptionSortImportLists = False
, cfgOptionAlignSumTypeDecl = True
, cfgOptionFlexibleOneline = True
, cfgOptionPreserveVerticalSpace = False
, cfgOptionDeclNoBlankLines = Set.empty
, cfgOptionAlignLetBindsAndInExpr = False
}
-- | Base style definition.
base :: Style
base = Style { styleName = "base"
, styleAuthor = "Enno Cramer"
, styleDescription = "Configurable formatting style"
, styleConfig = defaultConfig
}
chrisDone :: Style
chrisDone = Style { styleName = "chris-done"
, styleAuthor = "Chris Done"
, styleDescription = "Chris Done's style"
, styleConfig = chrisDoneCfg
}
cramer :: Style
cramer = Style { styleName = "cramer"
, styleAuthor = "Enno Cramer"
, styleDescription = "Enno Cramer's style"
, styleConfig = cramerCfg
}
gibiansky :: Style
gibiansky = Style { styleName = "gibiansky"
, styleAuthor = "Andrew Gibiansky"
, styleDescription = "Andrew Gibiansky's style"
, styleConfig = gibianskyCfg
}
johanTibell :: Style
johanTibell = Style { styleName = "johan-tibell"
, styleAuthor = "Johan Tibell"
, styleDescription = "Johan Tibell's style"
, styleConfig = johanTibellCfg
}
-- | Styles list, useful for programmatically choosing.
styles :: [Style]
styles = [ base, chrisDone, johanTibell, gibiansky, cramer ]
|
ennocramer/floskell
|
src/Floskell/Styles.hs
|
bsd-3-clause
| 18,473 | 0 | 11 | 8,519 | 3,110 | 1,803 | 1,307 | 329 | 1 |
{-#LANGUAGE RecordWildCards #-}
{-# LANGUAGE LambdaCase #-}
module FileServer where
import Network hiding (accept, sClose)
import Network.Socket hiding (send, recv, sendTo, recvFrom, Broadcast)
import Network.Socket.ByteString
import Data.ByteString.Char8 (pack, unpack)
import System.Environment
import System.IO
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad (forever, when, join)
import Data.List.Split
import Data.Word
import Text.Printf (printf)
import System.Directory
--Server data type allows me to pass address and port details easily
data FileServer = FileServer { address :: String, port :: String }
--Constructor
newFileServer :: String -> String -> IO FileServer
newFileServer address port = atomically $ do FileServer <$> return address <*> return port
--4 is easy for testing the pooling
maxnumThreads = 4
serverport :: String
serverport = "7007"
serverhost :: String
serverhost = "localhost"
run:: IO ()
run = withSocketsDo $ do
--Command line arguments for port and address
--args <- getArgs
createDirectoryIfMissing True "/distserver/"
setCurrentDirectory "/distserver/"
server <- newFileServer serverhost serverport
--sock <- listenOn (PortNumber (fromIntegral serverport))
addrinfos <- getAddrInfo
(Just (defaultHints {addrFlags = [AI_PASSIVE]}))
Nothing (Just serverport)
let serveraddr = head addrinfos
sock <- socket (addrFamily serveraddr) Stream defaultProtocol
bindSocket sock (addrAddress serveraddr)
listen sock 5
_ <- printf "Listening on port %s\n" serverport
--Listen on port from command line argument
--New Abstract FIFO Channel
chan <- newChan
--Tvars are variables Stored in memory, this way we can access the numThreads from any method
numThreads <- atomically $ newTVar 0
--Spawns a new thread to handle the clientconnectHandler method, passes socket, channel, numThreads and server
forkIO $ clientconnectHandler sock chan numThreads server
--Calls the mainHandler which will monitor the FIFO channel
mainHandler sock chan
mainHandler :: Socket -> Chan String -> IO ()
mainHandler sock chan = do
--Read current message on the FIFO channel
chanMsg <- readChan chan
--If KILL_SERVICE, stop mainHandler running, If anything else, call mainHandler again, keeping the service running
case (chanMsg) of
("KILL_SERVICE") -> putStrLn "Terminating the Service!"
_ -> mainHandler sock chan
clientconnectHandler :: Socket -> Chan String -> TVar Int -> FileServer -> IO ()
clientconnectHandler sock chan numThreads server = do
--Accept the socket which returns a handle, host and port
--(handle, host, port) <- accept sock
(s,a) <- accept sock
--handle <- socketToHandle s ReadWriteMode
--Read numThreads from memory and print it on server console
count <- atomically $ readTVar numThreads
putStrLn $ "numThreads = " ++ show count
--If there are still threads remaining create new thread and increment (exception if thread is lost -> decrement), else tell user capacity has been reached
if (count < maxnumThreads) then do
forkFinally (clientHandler s chan server) (\_ -> atomically $ decrementTVar numThreads)
atomically $ incrementTVar numThreads
else do
send s (pack ("Maximum number of threads in use. try again soon"++"\n\n"))
sClose s
clientconnectHandler sock chan numThreads server
clientHandler :: Socket -> Chan String -> FileServer -> IO ()
clientHandler sock chan server@FileServer{..} =
forever $ do
message <- recv sock 1024
let msg = unpack message
print $ msg ++ "!ENDLINE!"
let cmd = head $ words $ head $ splitOn ":" msg
print cmd
case cmd of
("HELO") -> heloCommand sock server $ (words msg) !! 1
("KILL_SERVICE") -> killCommand chan sock
("DOWNLOAD") -> downloadCommand sock server msg
_ -> do send sock (pack ("Unknown Command - " ++ msg ++ "\n\n")) ; return ()
--Function called when HELO text command recieved
heloCommand :: Socket -> FileServer -> String -> IO ()
heloCommand sock FileServer{..} msg = do
send sock $ pack $ "HELO " ++ msg ++ "\n" ++
"IP:" ++ "192.168.6.129" ++ "\n" ++
"Port:" ++ port ++ "\n" ++
"StudentID:12306421\n\n"
return ()
killCommand :: Chan String -> Socket -> IO ()
killCommand chan sock = do
send sock $ pack $ "Service is now terminating!"
writeChan chan "KILL_SERVICE"
downloadCommand :: Socket -> FileServer -> String -> IO ()
downloadCommand sock server@FileServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 0) !! 1
doesFileExist filename >>= \case
True -> do let fdata = readFile filename
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"DATA: " ++ show(fdata) ++ "\n\n"
False -> send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"DATA: " ++ "File not Found!!" ++ "\n\n"+ "\n\n";
return ()
--Increment Tvar stored in memory i.e. numThreads
incrementTVar :: TVar Int -> STM ()
incrementTVar tv = modifyTVar tv ((+) 1)
--Decrement Tvar stored in memory i.e. numThreads
decrementTVar :: TVar Int -> STM ()
decrementTVar tv = modifyTVar tv (subtract 1)
|
Garygunn94/DFS
|
.stack-work/intero/intero26690As0.hs
|
bsd-3-clause
| 5,436 | 98 | 15 | 1,242 | 1,255 | 666 | 589 | 99 | 4 |
{-
(c) The University of Glasgow 2006-2008
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
-}
{-# LANGUAGE CPP, NondecreasingIndentation #-}
-- | Module for constructing @ModIface@ values (interface files),
-- writing them to disk and comparing two versions to see if
-- recompilation is required.
module MkIface (
mkUsedNames,
mkDependencies,
mkIface, -- Build a ModIface from a ModGuts,
-- including computing version information
mkIfaceTc,
writeIfaceFile, -- Write the interface file
checkOldIface, -- See if recompilation is required, by
-- comparing version information
RecompileRequired(..), recompileRequired,
tyThingToIfaceDecl -- Converting things to their Iface equivalents
) where
{-
-----------------------------------------------
Recompilation checking
-----------------------------------------------
A complete description of how recompilation checking works can be
found in the wiki commentary:
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/RecompilationAvoidance
Please read the above page for a top-down description of how this all
works. Notes below cover specific issues related to the implementation.
Basic idea:
* In the mi_usages information in an interface, we record the
fingerprint of each free variable of the module
* In mkIface, we compute the fingerprint of each exported thing A.f.
For each external thing that A.f refers to, we include the fingerprint
of the external reference when computing the fingerprint of A.f. So
if anything that A.f depends on changes, then A.f's fingerprint will
change.
Also record any dependent files added with
* addDependentFile
* #include
* -optP-include
* In checkOldIface we compare the mi_usages for the module with
the actual fingerprint for all each thing recorded in mi_usages
-}
#include "HsVersions.h"
import IfaceSyn
import LoadIface
import FlagChecker
import Id
import IdInfo
import Demand
import Coercion( tidyCo )
import Annotations
import CoreSyn
import CoreFVs
import Class
import Kind
import TyCon
import CoAxiom
import ConLike
import DataCon
import PatSyn
import Type
import TcType
import TysPrim ( alphaTyVars )
import InstEnv
import FamInstEnv
import TcRnMonad
import HsSyn
import HscTypes
import Finder
import DynFlags
import VarEnv
import VarSet
import Var
import Name
import Avail
import RdrName
import NameEnv
import NameSet
import Module
import BinIface
import ErrUtils
import Digraph
import SrcLoc
import Outputable
import BasicTypes hiding ( SuccessFlag(..) )
import UniqFM
import Unique
import Util hiding ( eqListBy )
import FastString
import Maybes
import ListSetOps
import Binary
import Fingerprint
import Bag
import Exception
import Control.Monad
import Data.Function
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Ord
import Data.IORef
import System.Directory
import System.FilePath
{-
************************************************************************
* *
\subsection{Completing an interface}
* *
************************************************************************
-}
mkIface :: HscEnv
-> Maybe Fingerprint -- The old fingerprint, if we have it
-> ModDetails -- The trimmed, tidied interface
-> ModGuts -- Usages, deprecations, etc
-> IO (Messages,
Maybe (ModIface, -- The new one
Bool)) -- True <=> there was an old Iface, and the
-- new one is identical, so no need
-- to write it
mkIface hsc_env maybe_old_fingerprint mod_details
ModGuts{ mg_module = this_mod,
mg_boot = is_boot,
mg_used_names = used_names,
mg_used_th = used_th,
mg_deps = deps,
mg_dir_imps = dir_imp_mods,
mg_rdr_env = rdr_env,
mg_fix_env = fix_env,
mg_warns = warns,
mg_hpc_info = hpc_info,
mg_safe_haskell = safe_mode,
mg_trust_pkg = self_trust,
mg_dependent_files = dependent_files
}
= mkIface_ hsc_env maybe_old_fingerprint
this_mod is_boot used_names used_th deps rdr_env fix_env
warns hpc_info dir_imp_mods self_trust dependent_files
safe_mode mod_details
-- | make an interface from the results of typechecking only. Useful
-- for non-optimising compilation, or where we aren't generating any
-- object code at all ('HscNothing').
mkIfaceTc :: HscEnv
-> Maybe Fingerprint -- The old fingerprint, if we have it
-> SafeHaskellMode -- The safe haskell mode
-> ModDetails -- gotten from mkBootModDetails, probably
-> TcGblEnv -- Usages, deprecations, etc
-> IO (Messages, Maybe (ModIface, Bool))
mkIfaceTc hsc_env maybe_old_fingerprint safe_mode mod_details
tc_result@TcGblEnv{ tcg_mod = this_mod,
tcg_src = hsc_src,
tcg_imports = imports,
tcg_rdr_env = rdr_env,
tcg_fix_env = fix_env,
tcg_warns = warns,
tcg_hpc = other_hpc_info,
tcg_th_splice_used = tc_splice_used,
tcg_dependent_files = dependent_files
}
= do
let used_names = mkUsedNames tc_result
deps <- mkDependencies tc_result
let hpc_info = emptyHpcInfo other_hpc_info
used_th <- readIORef tc_splice_used
dep_files <- (readIORef dependent_files)
mkIface_ hsc_env maybe_old_fingerprint
this_mod (hsc_src == HsBootFile) used_names
used_th deps rdr_env
fix_env warns hpc_info (imp_mods imports)
(imp_trust_own_pkg imports) dep_files safe_mode mod_details
mkUsedNames :: TcGblEnv -> NameSet
mkUsedNames TcGblEnv{ tcg_dus = dus } = allUses dus
-- | Extract information from the rename and typecheck phases to produce
-- a dependencies information for the module being compiled.
mkDependencies :: TcGblEnv -> IO Dependencies
mkDependencies
TcGblEnv{ tcg_mod = mod,
tcg_imports = imports,
tcg_th_used = th_var
}
= do
-- Template Haskell used?
th_used <- readIORef th_var
let dep_mods = eltsUFM (delFromUFM (imp_dep_mods imports) (moduleName mod))
-- M.hi-boot can be in the imp_dep_mods, but we must remove
-- it before recording the modules on which this one depends!
-- (We want to retain M.hi-boot in imp_dep_mods so that
-- loadHiBootInterface can see if M's direct imports depend
-- on M.hi-boot, and hence that we should do the hi-boot consistency
-- check.)
pkgs | th_used = insertList thPackageKey (imp_dep_pkgs imports)
| otherwise = imp_dep_pkgs imports
-- Set the packages required to be Safe according to Safe Haskell.
-- See Note [RnNames . Tracking Trust Transitively]
sorted_pkgs = sortBy stablePackageKeyCmp pkgs
trust_pkgs = imp_trust_pkgs imports
dep_pkgs' = map (\x -> (x, x `elem` trust_pkgs)) sorted_pkgs
return Deps { dep_mods = sortBy (stableModuleNameCmp `on` fst) dep_mods,
dep_pkgs = dep_pkgs',
dep_orphs = sortBy stableModuleCmp (imp_orphs imports),
dep_finsts = sortBy stableModuleCmp (imp_finsts imports) }
-- sort to get into canonical order
-- NB. remember to use lexicographic ordering
mkIface_ :: HscEnv -> Maybe Fingerprint -> Module -> IsBootInterface
-> NameSet -> Bool -> Dependencies -> GlobalRdrEnv
-> NameEnv FixItem -> Warnings -> HpcInfo
-> ImportedMods -> Bool
-> [FilePath]
-> SafeHaskellMode
-> ModDetails
-> IO (Messages, Maybe (ModIface, Bool))
mkIface_ hsc_env maybe_old_fingerprint
this_mod is_boot used_names used_th deps rdr_env fix_env src_warns
hpc_info dir_imp_mods pkg_trust_req dependent_files safe_mode
ModDetails{ md_insts = insts,
md_fam_insts = fam_insts,
md_rules = rules,
md_anns = anns,
md_vect_info = vect_info,
md_types = type_env,
md_exports = exports }
-- NB: notice that mkIface does not look at the bindings
-- only at the TypeEnv. The previous Tidy phase has
-- put exactly the info into the TypeEnv that we want
-- to expose in the interface
= do
usages <- mkUsageInfo hsc_env this_mod dir_imp_mods used_names dependent_files
let entities = typeEnvElts type_env
decls = [ tyThingToIfaceDecl entity
| entity <- entities,
let name = getName entity,
not (isImplicitTyThing entity),
-- No implicit Ids and class tycons in the interface file
not (isWiredInName name),
-- Nor wired-in things; the compiler knows about them anyhow
nameIsLocalOrFrom this_mod name ]
-- Sigh: see Note [Root-main Id] in TcRnDriver
fixities = [(occ,fix) | FixItem occ fix <- nameEnvElts fix_env]
warns = src_warns
iface_rules = map (coreRuleToIfaceRule this_mod) rules
iface_insts = map instanceToIfaceInst insts
iface_fam_insts = map famInstToIfaceFamInst fam_insts
iface_vect_info = flattenVectInfo vect_info
trust_info = setSafeMode safe_mode
annotations = map mkIfaceAnnotation anns
sig_of = getSigOf dflags (moduleName this_mod)
intermediate_iface = ModIface {
mi_module = this_mod,
mi_sig_of = sig_of,
mi_boot = is_boot,
mi_deps = deps,
mi_usages = usages,
mi_exports = mkIfaceExports exports,
-- Sort these lexicographically, so that
-- the result is stable across compilations
mi_insts = sortBy cmp_inst iface_insts,
mi_fam_insts = sortBy cmp_fam_inst iface_fam_insts,
mi_rules = sortBy cmp_rule iface_rules,
mi_vect_info = iface_vect_info,
mi_fixities = fixities,
mi_warns = warns,
mi_anns = annotations,
mi_globals = maybeGlobalRdrEnv rdr_env,
-- Left out deliberately: filled in by addFingerprints
mi_iface_hash = fingerprint0,
mi_mod_hash = fingerprint0,
mi_flag_hash = fingerprint0,
mi_exp_hash = fingerprint0,
mi_used_th = used_th,
mi_orphan_hash = fingerprint0,
mi_orphan = False, -- Always set by addFingerprints, but
-- it's a strict field, so we can't omit it.
mi_finsts = False, -- Ditto
mi_decls = deliberatelyOmitted "decls",
mi_hash_fn = deliberatelyOmitted "hash_fn",
mi_hpc = isHpcUsed hpc_info,
mi_trust = trust_info,
mi_trust_pkg = pkg_trust_req,
-- And build the cached values
mi_warn_fn = mkIfaceWarnCache warns,
mi_fix_fn = mkIfaceFixCache fixities }
(new_iface, no_change_at_all)
<- {-# SCC "versioninfo" #-}
addFingerprints hsc_env maybe_old_fingerprint
intermediate_iface decls
-- Warn about orphans
-- See Note [Orphans and auto-generated rules]
let warn_orphs = wopt Opt_WarnOrphans dflags
warn_auto_orphs = wopt Opt_WarnAutoOrphans dflags
orph_warnings --- Laziness means no work done unless -fwarn-orphans
| warn_orphs || warn_auto_orphs = rule_warns `unionBags` inst_warns
| otherwise = emptyBag
errs_and_warns = (orph_warnings, emptyBag)
unqual = mkPrintUnqualified dflags rdr_env
inst_warns = listToBag [ instOrphWarn dflags unqual d
| (d,i) <- insts `zip` iface_insts
, isOrphan (ifInstOrph i) ]
rule_warns = listToBag [ ruleOrphWarn dflags unqual this_mod r
| r <- iface_rules
, isOrphan (ifRuleOrph r)
, if ifRuleAuto r then warn_auto_orphs
else warn_orphs ]
if errorsFound dflags errs_and_warns
then return ( errs_and_warns, Nothing )
else do
-- Debug printing
dumpIfSet_dyn dflags Opt_D_dump_hi "FINAL INTERFACE"
(pprModIface new_iface)
-- bug #1617: on reload we weren't updating the PrintUnqualified
-- correctly. This stems from the fact that the interface had
-- not changed, so addFingerprints returns the old ModIface
-- with the old GlobalRdrEnv (mi_globals).
let final_iface = new_iface{ mi_globals = maybeGlobalRdrEnv rdr_env }
return (errs_and_warns, Just (final_iface, no_change_at_all))
where
cmp_rule = comparing ifRuleName
-- Compare these lexicographically by OccName, *not* by unique,
-- because the latter is not stable across compilations:
cmp_inst = comparing (nameOccName . ifDFun)
cmp_fam_inst = comparing (nameOccName . ifFamInstTcName)
dflags = hsc_dflags hsc_env
-- We only fill in mi_globals if the module was compiled to byte
-- code. Otherwise, the compiler may not have retained all the
-- top-level bindings and they won't be in the TypeEnv (see
-- Desugar.addExportFlagsAndRules). The mi_globals field is used
-- by GHCi to decide whether the module has its full top-level
-- scope available. (#5534)
maybeGlobalRdrEnv :: GlobalRdrEnv -> Maybe GlobalRdrEnv
maybeGlobalRdrEnv rdr_env
| targetRetainsAllBindings (hscTarget dflags) = Just rdr_env
| otherwise = Nothing
deliberatelyOmitted :: String -> a
deliberatelyOmitted x = panic ("Deliberately omitted: " ++ x)
ifFamInstTcName = ifFamInstFam
flattenVectInfo (VectInfo { vectInfoVar = vVar
, vectInfoTyCon = vTyCon
, vectInfoParallelVars = vParallelVars
, vectInfoParallelTyCons = vParallelTyCons
}) =
IfaceVectInfo
{ ifaceVectInfoVar = [Var.varName v | (v, _ ) <- varEnvElts vVar]
, ifaceVectInfoTyCon = [tyConName t | (t, t_v) <- nameEnvElts vTyCon, t /= t_v]
, ifaceVectInfoTyConReuse = [tyConName t | (t, t_v) <- nameEnvElts vTyCon, t == t_v]
, ifaceVectInfoParallelVars = [Var.varName v | v <- varSetElems vParallelVars]
, ifaceVectInfoParallelTyCons = nameSetElems vParallelTyCons
}
-----------------------------
writeIfaceFile :: DynFlags -> FilePath -> ModIface -> IO ()
writeIfaceFile dflags hi_file_path new_iface
= do createDirectoryIfMissing True (takeDirectory hi_file_path)
writeBinIface dflags hi_file_path new_iface
-- -----------------------------------------------------------------------------
-- Look up parents and versions of Names
-- This is like a global version of the mi_hash_fn field in each ModIface.
-- Given a Name, it finds the ModIface, and then uses mi_hash_fn to get
-- the parent and version info.
mkHashFun
:: HscEnv -- needed to look up versions
-> ExternalPackageState -- ditto
-> (Name -> Fingerprint)
mkHashFun hsc_env eps
= \name ->
let
mod = ASSERT2( isExternalName name, ppr name ) nameModule name
occ = nameOccName name
iface = lookupIfaceByModule (hsc_dflags hsc_env) hpt pit mod `orElse`
pprPanic "lookupVers2" (ppr mod <+> ppr occ)
in
snd (mi_hash_fn iface occ `orElse`
pprPanic "lookupVers1" (ppr mod <+> ppr occ))
where
hpt = hsc_HPT hsc_env
pit = eps_PIT eps
-- ---------------------------------------------------------------------------
-- Compute fingerprints for the interface
addFingerprints
:: HscEnv
-> Maybe Fingerprint -- the old fingerprint, if any
-> ModIface -- The new interface (lacking decls)
-> [IfaceDecl] -- The new decls
-> IO (ModIface, -- Updated interface
Bool) -- True <=> no changes at all;
-- no need to write Iface
addFingerprints hsc_env mb_old_fingerprint iface0 new_decls
= do
eps <- hscEPS hsc_env
let
-- The ABI of a declaration represents everything that is made
-- visible about the declaration that a client can depend on.
-- see IfaceDeclABI below.
declABI :: IfaceDecl -> IfaceDeclABI
declABI decl = (this_mod, decl, extras)
where extras = declExtras fix_fn ann_fn non_orph_rules non_orph_insts
non_orph_fis decl
edges :: [(IfaceDeclABI, Unique, [Unique])]
edges = [ (abi, getUnique (ifName decl), out)
| decl <- new_decls
, let abi = declABI decl
, let out = localOccs $ freeNamesDeclABI abi
]
name_module n = ASSERT2( isExternalName n, ppr n ) nameModule n
localOccs = map (getUnique . getParent . getOccName)
. filter ((== this_mod) . name_module)
. nameSetElems
where getParent occ = lookupOccEnv parent_map occ `orElse` occ
-- maps OccNames to their parents in the current module.
-- e.g. a reference to a constructor must be turned into a reference
-- to the TyCon for the purposes of calculating dependencies.
parent_map :: OccEnv OccName
parent_map = foldr extend emptyOccEnv new_decls
where extend d env =
extendOccEnvList env [ (b,n) | b <- ifaceDeclImplicitBndrs d ]
where n = ifName d
-- strongly-connected groups of declarations, in dependency order
groups = stronglyConnCompFromEdgedVertices edges
global_hash_fn = mkHashFun hsc_env eps
-- how to output Names when generating the data to fingerprint.
-- Here we want to output the fingerprint for each top-level
-- Name, whether it comes from the current module or another
-- module. In this way, the fingerprint for a declaration will
-- change if the fingerprint for anything it refers to (transitively)
-- changes.
mk_put_name :: (OccEnv (OccName,Fingerprint))
-> BinHandle -> Name -> IO ()
mk_put_name local_env bh name
| isWiredInName name = putNameLiterally bh name
-- wired-in names don't have fingerprints
| otherwise
= ASSERT2( isExternalName name, ppr name )
let hash | nameModule name /= this_mod = global_hash_fn name
| otherwise = snd (lookupOccEnv local_env (getOccName name)
`orElse` pprPanic "urk! lookup local fingerprint"
(ppr name)) -- (undefined,fingerprint0))
-- This panic indicates that we got the dependency
-- analysis wrong, because we needed a fingerprint for
-- an entity that wasn't in the environment. To debug
-- it, turn the panic into a trace, uncomment the
-- pprTraces below, run the compile again, and inspect
-- the output and the generated .hi file with
-- --show-iface.
in put_ bh hash
-- take a strongly-connected group of declarations and compute
-- its fingerprint.
fingerprint_group :: (OccEnv (OccName,Fingerprint),
[(Fingerprint,IfaceDecl)])
-> SCC IfaceDeclABI
-> IO (OccEnv (OccName,Fingerprint),
[(Fingerprint,IfaceDecl)])
fingerprint_group (local_env, decls_w_hashes) (AcyclicSCC abi)
= do let hash_fn = mk_put_name local_env
decl = abiDecl abi
-- pprTrace "fingerprinting" (ppr (ifName decl) ) $ do
hash <- computeFingerprint hash_fn abi
env' <- extend_hash_env local_env (hash,decl)
return (env', (hash,decl) : decls_w_hashes)
fingerprint_group (local_env, decls_w_hashes) (CyclicSCC abis)
= do let decls = map abiDecl abis
local_env1 <- foldM extend_hash_env local_env
(zip (repeat fingerprint0) decls)
let hash_fn = mk_put_name local_env1
-- pprTrace "fingerprinting" (ppr (map ifName decls) ) $ do
let stable_abis = sortBy cmp_abiNames abis
-- put the cycle in a canonical order
hash <- computeFingerprint hash_fn stable_abis
let pairs = zip (repeat hash) decls
local_env2 <- foldM extend_hash_env local_env pairs
return (local_env2, pairs ++ decls_w_hashes)
-- we have fingerprinted the whole declaration, but we now need
-- to assign fingerprints to all the OccNames that it binds, to
-- use when referencing those OccNames in later declarations.
--
extend_hash_env :: OccEnv (OccName,Fingerprint)
-> (Fingerprint,IfaceDecl)
-> IO (OccEnv (OccName,Fingerprint))
extend_hash_env env0 (hash,d) = do
return (foldr (\(b,fp) env -> extendOccEnv env b (b,fp)) env0
(ifaceDeclFingerprints hash d))
--
(local_env, decls_w_hashes) <-
foldM fingerprint_group (emptyOccEnv, []) groups
-- when calculating fingerprints, we always need to use canonical
-- ordering for lists of things. In particular, the mi_deps has various
-- lists of modules and suchlike, so put these all in canonical order:
let sorted_deps = sortDependencies (mi_deps iface0)
-- the export hash of a module depends on the orphan hashes of the
-- orphan modules below us in the dependency tree. This is the way
-- that changes in orphans get propagated all the way up the
-- dependency tree. We only care about orphan modules in the current
-- package, because changes to orphans outside this package will be
-- tracked by the usage on the ABI hash of package modules that we import.
let orph_mods
= filter (/= this_mod) -- Note [Do not update EPS with your own hi-boot]
. filter ((== this_pkg) . modulePackageKey)
$ dep_orphs sorted_deps
dep_orphan_hashes <- getOrphanHashes hsc_env orph_mods
-- Note [Do not update EPS with your own hi-boot]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- (See also Trac #10182). When your hs-boot file includes an orphan
-- instance declaration, you may find that the dep_orphs of a module you
-- import contains reference to yourself. DO NOT actually load this module
-- or add it to the orphan hashes: you're going to provide the orphan
-- instances yourself, no need to consult hs-boot; if you do load the
-- interface into EPS, you will see a duplicate orphan instance.
orphan_hash <- computeFingerprint (mk_put_name local_env)
(map ifDFun orph_insts, orph_rules, orph_fis)
-- the export list hash doesn't depend on the fingerprints of
-- the Names it mentions, only the Names themselves, hence putNameLiterally.
export_hash <- computeFingerprint putNameLiterally
(mi_exports iface0,
orphan_hash,
dep_orphan_hashes,
dep_pkgs (mi_deps iface0),
-- dep_pkgs: see "Package Version Changes" on
-- wiki/Commentary/Compiler/RecompilationAvoidance
mi_trust iface0)
-- Make sure change of Safe Haskell mode causes recomp.
-- put the declarations in a canonical order, sorted by OccName
let sorted_decls = Map.elems $ Map.fromList $
[(ifName d, e) | e@(_, d) <- decls_w_hashes]
-- the flag hash depends on:
-- - (some of) dflags
-- it returns two hashes, one that shouldn't change
-- the abi hash and one that should
flag_hash <- fingerprintDynFlags dflags this_mod putNameLiterally
-- the ABI hash depends on:
-- - decls
-- - export list
-- - orphans
-- - deprecations
-- - vect info
-- - flag abi hash
mod_hash <- computeFingerprint putNameLiterally
(map fst sorted_decls,
export_hash, -- includes orphan_hash
mi_warns iface0,
mi_vect_info iface0)
-- The interface hash depends on:
-- - the ABI hash, plus
-- - the module level annotations,
-- - usages
-- - deps (home and external packages, dependent files)
-- - hpc
iface_hash <- computeFingerprint putNameLiterally
(mod_hash,
ann_fn (mkVarOcc "module"), -- See mkIfaceAnnCache
mi_usages iface0,
sorted_deps,
mi_hpc iface0)
let
no_change_at_all = Just iface_hash == mb_old_fingerprint
final_iface = iface0 {
mi_mod_hash = mod_hash,
mi_iface_hash = iface_hash,
mi_exp_hash = export_hash,
mi_orphan_hash = orphan_hash,
mi_flag_hash = flag_hash,
mi_orphan = not ( all ifRuleAuto orph_rules
-- See Note [Orphans and auto-generated rules]
&& null orph_insts
&& null orph_fis
&& isNoIfaceVectInfo (mi_vect_info iface0)),
mi_finsts = not . null $ mi_fam_insts iface0,
mi_decls = sorted_decls,
mi_hash_fn = lookupOccEnv local_env }
--
return (final_iface, no_change_at_all)
where
this_mod = mi_module iface0
dflags = hsc_dflags hsc_env
this_pkg = thisPackage dflags
(non_orph_insts, orph_insts) = mkOrphMap ifInstOrph (mi_insts iface0)
(non_orph_rules, orph_rules) = mkOrphMap ifRuleOrph (mi_rules iface0)
(non_orph_fis, orph_fis) = mkOrphMap ifFamInstOrph (mi_fam_insts iface0)
fix_fn = mi_fix_fn iface0
ann_fn = mkIfaceAnnCache (mi_anns iface0)
getOrphanHashes :: HscEnv -> [Module] -> IO [Fingerprint]
getOrphanHashes hsc_env mods = do
eps <- hscEPS hsc_env
let
hpt = hsc_HPT hsc_env
pit = eps_PIT eps
dflags = hsc_dflags hsc_env
get_orph_hash mod =
case lookupIfaceByModule dflags hpt pit mod of
Nothing -> pprPanic "moduleOrphanHash" (ppr mod)
Just iface -> mi_orphan_hash iface
--
return (map get_orph_hash mods)
sortDependencies :: Dependencies -> Dependencies
sortDependencies d
= Deps { dep_mods = sortBy (compare `on` (moduleNameFS.fst)) (dep_mods d),
dep_pkgs = sortBy (stablePackageKeyCmp `on` fst) (dep_pkgs d),
dep_orphs = sortBy stableModuleCmp (dep_orphs d),
dep_finsts = sortBy stableModuleCmp (dep_finsts d) }
-- | Creates cached lookup for the 'mi_anns' field of ModIface
-- Hackily, we use "module" as the OccName for any module-level annotations
mkIfaceAnnCache :: [IfaceAnnotation] -> OccName -> [AnnPayload]
mkIfaceAnnCache anns
= \n -> lookupOccEnv env n `orElse` []
where
pair (IfaceAnnotation target value) =
(case target of
NamedTarget occn -> occn
ModuleTarget _ -> mkVarOcc "module"
, [value])
-- flipping (++), so the first argument is always short
env = mkOccEnv_C (flip (++)) (map pair anns)
{-
Note [Orphans and auto-generated rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we specialise an INLINEABLE function, or when we have
-fspecialise-aggressively, we auto-generate RULES that are orphans.
We don't want to warn about these, at least not by default, or we'd
generate a lot of warnings. Hence -fwarn-auto-orphans.
Indeed, we don't even treat the module as an oprhan module if it has
auto-generated *rule* orphans. Orphan modules are read every time we
compile, so they are pretty obtrusive and slow down every compilation,
even non-optimised ones. (Reason: for type class instances it's a
type correctness issue.) But specialisation rules are strictly for
*optimisation* only so it's fine not to read the interface.
What this means is that a SPEC rules from auto-specialisation in
module M will be used in other modules only if M.hi has been read for
some other reason, which is actually pretty likely.
************************************************************************
* *
The ABI of an IfaceDecl
* *
************************************************************************
Note [The ABI of an IfaceDecl]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The ABI of a declaration consists of:
(a) the full name of the identifier (inc. module and package,
because these are used to construct the symbol name by which
the identifier is known externally).
(b) the declaration itself, as exposed to clients. That is, the
definition of an Id is included in the fingerprint only if
it is made available as an unfolding in the interface.
(c) the fixity of the identifier
(d) for Ids: rules
(e) for classes: instances, fixity & rules for methods
(f) for datatypes: instances, fixity & rules for constrs
Items (c)-(f) are not stored in the IfaceDecl, but instead appear
elsewhere in the interface file. But they are *fingerprinted* with
the declaration itself. This is done by grouping (c)-(f) in IfaceDeclExtras,
and fingerprinting that as part of the declaration.
-}
type IfaceDeclABI = (Module, IfaceDecl, IfaceDeclExtras)
data IfaceDeclExtras
= IfaceIdExtras IfaceIdExtras
| IfaceDataExtras
Fixity -- Fixity of the tycon itself
[IfaceInstABI] -- Local class and family instances of this tycon
-- See Note [Orphans] in InstEnv
[AnnPayload] -- Annotations of the type itself
[IfaceIdExtras] -- For each constructor: fixity, RULES and annotations
| IfaceClassExtras
Fixity -- Fixity of the class itself
[IfaceInstABI] -- Local instances of this class *or*
-- of its associated data types
-- See Note [Orphans] in InstEnv
[AnnPayload] -- Annotations of the type itself
[IfaceIdExtras] -- For each class method: fixity, RULES and annotations
| IfaceSynonymExtras Fixity [AnnPayload]
| IfaceFamilyExtras Fixity [IfaceInstABI] [AnnPayload]
| IfaceOtherDeclExtras
data IfaceIdExtras
= IdExtras
Fixity -- Fixity of the Id
[IfaceRule] -- Rules for the Id
[AnnPayload] -- Annotations for the Id
-- When hashing a class or family instance, we hash only the
-- DFunId or CoAxiom, because that depends on all the
-- information about the instance.
--
type IfaceInstABI = IfExtName -- Name of DFunId or CoAxiom that is evidence for the instance
abiDecl :: IfaceDeclABI -> IfaceDecl
abiDecl (_, decl, _) = decl
cmp_abiNames :: IfaceDeclABI -> IfaceDeclABI -> Ordering
cmp_abiNames abi1 abi2 = ifName (abiDecl abi1) `compare`
ifName (abiDecl abi2)
freeNamesDeclABI :: IfaceDeclABI -> NameSet
freeNamesDeclABI (_mod, decl, extras) =
freeNamesIfDecl decl `unionNameSet` freeNamesDeclExtras extras
freeNamesDeclExtras :: IfaceDeclExtras -> NameSet
freeNamesDeclExtras (IfaceIdExtras id_extras)
= freeNamesIdExtras id_extras
freeNamesDeclExtras (IfaceDataExtras _ insts _ subs)
= unionNameSets (mkNameSet insts : map freeNamesIdExtras subs)
freeNamesDeclExtras (IfaceClassExtras _ insts _ subs)
= unionNameSets (mkNameSet insts : map freeNamesIdExtras subs)
freeNamesDeclExtras (IfaceSynonymExtras _ _)
= emptyNameSet
freeNamesDeclExtras (IfaceFamilyExtras _ insts _)
= mkNameSet insts
freeNamesDeclExtras IfaceOtherDeclExtras
= emptyNameSet
freeNamesIdExtras :: IfaceIdExtras -> NameSet
freeNamesIdExtras (IdExtras _ rules _) = unionNameSets (map freeNamesIfRule rules)
instance Outputable IfaceDeclExtras where
ppr IfaceOtherDeclExtras = Outputable.empty
ppr (IfaceIdExtras extras) = ppr_id_extras extras
ppr (IfaceSynonymExtras fix anns) = vcat [ppr fix, ppr anns]
ppr (IfaceFamilyExtras fix finsts anns) = vcat [ppr fix, ppr finsts, ppr anns]
ppr (IfaceDataExtras fix insts anns stuff) = vcat [ppr fix, ppr_insts insts, ppr anns,
ppr_id_extras_s stuff]
ppr (IfaceClassExtras fix insts anns stuff) = vcat [ppr fix, ppr_insts insts, ppr anns,
ppr_id_extras_s stuff]
ppr_insts :: [IfaceInstABI] -> SDoc
ppr_insts _ = ptext (sLit "<insts>")
ppr_id_extras_s :: [IfaceIdExtras] -> SDoc
ppr_id_extras_s stuff = vcat (map ppr_id_extras stuff)
ppr_id_extras :: IfaceIdExtras -> SDoc
ppr_id_extras (IdExtras fix rules anns) = ppr fix $$ vcat (map ppr rules) $$ vcat (map ppr anns)
-- This instance is used only to compute fingerprints
instance Binary IfaceDeclExtras where
get _bh = panic "no get for IfaceDeclExtras"
put_ bh (IfaceIdExtras extras) = do
putByte bh 1; put_ bh extras
put_ bh (IfaceDataExtras fix insts anns cons) = do
putByte bh 2; put_ bh fix; put_ bh insts; put_ bh anns; put_ bh cons
put_ bh (IfaceClassExtras fix insts anns methods) = do
putByte bh 3; put_ bh fix; put_ bh insts; put_ bh anns; put_ bh methods
put_ bh (IfaceSynonymExtras fix anns) = do
putByte bh 4; put_ bh fix; put_ bh anns
put_ bh (IfaceFamilyExtras fix finsts anns) = do
putByte bh 5; put_ bh fix; put_ bh finsts; put_ bh anns
put_ bh IfaceOtherDeclExtras = putByte bh 6
instance Binary IfaceIdExtras where
get _bh = panic "no get for IfaceIdExtras"
put_ bh (IdExtras fix rules anns)= do { put_ bh fix; put_ bh rules; put_ bh anns }
declExtras :: (OccName -> Fixity)
-> (OccName -> [AnnPayload])
-> OccEnv [IfaceRule]
-> OccEnv [IfaceClsInst]
-> OccEnv [IfaceFamInst]
-> IfaceDecl
-> IfaceDeclExtras
declExtras fix_fn ann_fn rule_env inst_env fi_env decl
= case decl of
IfaceId{} -> IfaceIdExtras (id_extras n)
IfaceData{ifCons=cons} ->
IfaceDataExtras (fix_fn n)
(map ifFamInstAxiom (lookupOccEnvL fi_env n) ++
map ifDFun (lookupOccEnvL inst_env n))
(ann_fn n)
(map (id_extras . ifConOcc) (visibleIfConDecls cons))
IfaceClass{ifSigs=sigs, ifATs=ats} ->
IfaceClassExtras (fix_fn n)
(map ifDFun $ (concatMap at_extras ats)
++ lookupOccEnvL inst_env n)
-- Include instances of the associated types
-- as well as instances of the class (Trac #5147)
(ann_fn n)
[id_extras op | IfaceClassOp op _ _ <- sigs]
IfaceSynonym{} -> IfaceSynonymExtras (fix_fn n)
(ann_fn n)
IfaceFamily{} -> IfaceFamilyExtras (fix_fn n)
(map ifFamInstAxiom (lookupOccEnvL fi_env n))
(ann_fn n)
_other -> IfaceOtherDeclExtras
where
n = ifName decl
id_extras occ = IdExtras (fix_fn occ) (lookupOccEnvL rule_env occ) (ann_fn occ)
at_extras (IfaceAT decl _) = lookupOccEnvL inst_env (ifName decl)
lookupOccEnvL :: OccEnv [v] -> OccName -> [v]
lookupOccEnvL env k = lookupOccEnv env k `orElse` []
-- used when we want to fingerprint a structure without depending on the
-- fingerprints of external Names that it refers to.
putNameLiterally :: BinHandle -> Name -> IO ()
putNameLiterally bh name = ASSERT( isExternalName name )
do
put_ bh $! nameModule name
put_ bh $! nameOccName name
{-
-- for testing: use the md5sum command to generate fingerprints and
-- compare the results against our built-in version.
fp' <- oldMD5 dflags bh
if fp /= fp' then pprPanic "computeFingerprint" (ppr fp <+> ppr fp')
else return fp
oldMD5 dflags bh = do
tmp <- newTempName dflags "bin"
writeBinMem bh tmp
tmp2 <- newTempName dflags "md5"
let cmd = "md5sum " ++ tmp ++ " >" ++ tmp2
r <- system cmd
case r of
ExitFailure _ -> throwGhcExceptionIO (PhaseFailed cmd r)
ExitSuccess -> do
hash_str <- readFile tmp2
return $! readHexFingerprint hash_str
-}
instOrphWarn :: DynFlags -> PrintUnqualified -> ClsInst -> WarnMsg
instOrphWarn dflags unqual inst
= mkWarnMsg dflags (getSrcSpan inst) unqual $
hang (ptext (sLit "Orphan instance:")) 2 (pprInstanceHdr inst)
$$ text "To avoid this"
$$ nest 4 (vcat possibilities)
where
possibilities =
text "move the instance declaration to the module of the class or of the type, or" :
text "wrap the type with a newtype and declare the instance on the new type." :
[]
ruleOrphWarn :: DynFlags -> PrintUnqualified -> Module -> IfaceRule -> WarnMsg
ruleOrphWarn dflags unqual mod rule
= mkWarnMsg dflags silly_loc unqual $
ptext (sLit "Orphan rule:") <+> ppr rule
where
silly_loc = srcLocSpan (mkSrcLoc (moduleNameFS (moduleName mod)) 1 1)
-- We don't have a decent SrcSpan for a Rule, not even the CoreRule
-- Could readily be fixed by adding a SrcSpan to CoreRule, if we wanted to
----------------------
-- mkOrphMap partitions instance decls or rules into
-- (a) an OccEnv for ones that are not orphans,
-- mapping the local OccName to a list of its decls
-- (b) a list of orphan decls
mkOrphMap :: (decl -> IsOrphan) -- Extract orphan status from decl
-> [decl] -- Sorted into canonical order
-> (OccEnv [decl], -- Non-orphan decls associated with their key;
-- each sublist in canonical order
[decl]) -- Orphan decls; in canonical order
mkOrphMap get_key decls
= foldl go (emptyOccEnv, []) decls
where
go (non_orphs, orphs) d
| NotOrphan occ <- get_key d
= (extendOccEnv_Acc (:) singleton non_orphs occ d, orphs)
| otherwise = (non_orphs, d:orphs)
{-
************************************************************************
* *
Keeping track of what we've slurped, and fingerprints
* *
************************************************************************
-}
mkUsageInfo :: HscEnv -> Module -> ImportedMods -> NameSet -> [FilePath] -> IO [Usage]
mkUsageInfo hsc_env this_mod dir_imp_mods used_names dependent_files
= do
eps <- hscEPS hsc_env
hashes <- mapM getFileHash dependent_files
let mod_usages = mk_mod_usage_info (eps_PIT eps) hsc_env this_mod
dir_imp_mods used_names
let usages = mod_usages ++ [ UsageFile { usg_file_path = f
, usg_file_hash = hash }
| (f, hash) <- zip dependent_files hashes ]
usages `seqList` return usages
-- seq the list of Usages returned: occasionally these
-- don't get evaluated for a while and we can end up hanging on to
-- the entire collection of Ifaces.
mk_mod_usage_info :: PackageIfaceTable
-> HscEnv
-> Module
-> ImportedMods
-> NameSet
-> [Usage]
mk_mod_usage_info pit hsc_env this_mod direct_imports used_names
= mapMaybe mkUsage usage_mods
where
hpt = hsc_HPT hsc_env
dflags = hsc_dflags hsc_env
this_pkg = thisPackage dflags
used_mods = moduleEnvKeys ent_map
dir_imp_mods = moduleEnvKeys direct_imports
all_mods = used_mods ++ filter (`notElem` used_mods) dir_imp_mods
usage_mods = sortBy stableModuleCmp all_mods
-- canonical order is imported, to avoid interface-file
-- wobblage.
-- ent_map groups together all the things imported and used
-- from a particular module
ent_map :: ModuleEnv [OccName]
ent_map = foldNameSet add_mv emptyModuleEnv used_names
where
add_mv name mv_map
| isWiredInName name = mv_map -- ignore wired-in names
| otherwise
= case nameModule_maybe name of
Nothing -> ASSERT2( isSystemName name, ppr name ) mv_map
-- See Note [Internal used_names]
Just mod -> -- This lambda function is really just a
-- specialised (++); originally came about to
-- avoid quadratic behaviour (trac #2680)
extendModuleEnvWith (\_ xs -> occ:xs) mv_map mod [occ]
where occ = nameOccName name
-- We want to create a Usage for a home module if
-- a) we used something from it; has something in used_names
-- b) we imported it, even if we used nothing from it
-- (need to recompile if its export list changes: export_fprint)
mkUsage :: Module -> Maybe Usage
mkUsage mod
| isNothing maybe_iface -- We can't depend on it if we didn't
-- load its interface.
|| mod == this_mod -- We don't care about usages of
-- things in *this* module
= Nothing
| modulePackageKey mod /= this_pkg
= Just UsagePackageModule{ usg_mod = mod,
usg_mod_hash = mod_hash,
usg_safe = imp_safe }
-- for package modules, we record the module hash only
| (null used_occs
&& isNothing export_hash
&& not is_direct_import
&& not finsts_mod)
= Nothing -- Record no usage info
-- for directly-imported modules, we always want to record a usage
-- on the orphan hash. This is what triggers a recompilation if
-- an orphan is added or removed somewhere below us in the future.
| otherwise
= Just UsageHomeModule {
usg_mod_name = moduleName mod,
usg_mod_hash = mod_hash,
usg_exports = export_hash,
usg_entities = Map.toList ent_hashs,
usg_safe = imp_safe }
where
maybe_iface = lookupIfaceByModule dflags hpt pit mod
-- In one-shot mode, the interfaces for home-package
-- modules accumulate in the PIT not HPT. Sigh.
Just iface = maybe_iface
finsts_mod = mi_finsts iface
hash_env = mi_hash_fn iface
mod_hash = mi_mod_hash iface
export_hash | depend_on_exports = Just (mi_exp_hash iface)
| otherwise = Nothing
(is_direct_import, imp_safe)
= case lookupModuleEnv direct_imports mod of
Just ((_,_,_,safe):_xs) -> (True, safe)
Just _ -> pprPanic "mkUsage: empty direct import" Outputable.empty
Nothing -> (False, safeImplicitImpsReq dflags)
-- Nothing case is for implicit imports like 'System.IO' when 'putStrLn'
-- is used in the source code. We require them to be safe in Safe Haskell
used_occs = lookupModuleEnv ent_map mod `orElse` []
-- Making a Map here ensures that (a) we remove duplicates
-- when we have usages on several subordinates of a single parent,
-- and (b) that the usages emerge in a canonical order, which
-- is why we use Map rather than OccEnv: Map works
-- using Ord on the OccNames, which is a lexicographic ordering.
ent_hashs :: Map OccName Fingerprint
ent_hashs = Map.fromList (map lookup_occ used_occs)
lookup_occ occ =
case hash_env occ of
Nothing -> pprPanic "mkUsage" (ppr mod <+> ppr occ <+> ppr used_names)
Just r -> r
depend_on_exports = is_direct_import
{- True
Even if we used 'import M ()', we have to register a
usage on the export list because we are sensitive to
changes in orphan instances/rules.
False
In GHC 6.8.x we always returned true, and in
fact it recorded a dependency on *all* the
modules underneath in the dependency tree. This
happens to make orphans work right, but is too
expensive: it'll read too many interface files.
The 'isNothing maybe_iface' check above saved us
from generating many of these usages (at least in
one-shot mode), but that's even more bogus!
-}
mkIfaceAnnotation :: Annotation -> IfaceAnnotation
mkIfaceAnnotation (Annotation { ann_target = target, ann_value = payload })
= IfaceAnnotation {
ifAnnotatedTarget = fmap nameOccName target,
ifAnnotatedValue = payload
}
mkIfaceExports :: [AvailInfo] -> [IfaceExport] -- Sort to make canonical
mkIfaceExports exports
= sortBy stableAvailCmp (map sort_subs exports)
where
sort_subs :: AvailInfo -> AvailInfo
sort_subs (Avail n) = Avail n
sort_subs (AvailTC n []) = AvailTC n []
sort_subs (AvailTC n (m:ms))
| n==m = AvailTC n (m:sortBy stableNameCmp ms)
| otherwise = AvailTC n (sortBy stableNameCmp (m:ms))
-- Maintain the AvailTC Invariant
{-
Note [Orignal module]
~~~~~~~~~~~~~~~~~~~~~
Consider this:
module X where { data family T }
module Y( T(..) ) where { import X; data instance T Int = MkT Int }
The exported Avail from Y will look like
X.T{X.T, Y.MkT}
That is, in Y,
- only MkT is brought into scope by the data instance;
- but the parent (used for grouping and naming in T(..) exports) is X.T
- and in this case we export X.T too
In the result of MkIfaceExports, the names are grouped by defining module,
so we may need to split up a single Avail into multiple ones.
Note [Internal used_names]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Most of the used_names are External Names, but we can have Internal
Names too: see Note [Binders in Template Haskell] in Convert, and
Trac #5362 for an example. Such Names are always
- Such Names are always for locally-defined things, for which we
don't gather usage info, so we can just ignore them in ent_map
- They are always System Names, hence the assert, just as a double check.
************************************************************************
* *
Load the old interface file for this module (unless
we have it already), and check whether it is up to date
* *
************************************************************************
-}
data RecompileRequired
= UpToDate
-- ^ everything is up to date, recompilation is not required
| MustCompile
-- ^ The .hs file has been touched, or the .o/.hi file does not exist
| RecompBecause String
-- ^ The .o/.hi files are up to date, but something else has changed
-- to force recompilation; the String says what (one-line summary)
deriving Eq
recompileRequired :: RecompileRequired -> Bool
recompileRequired UpToDate = False
recompileRequired _ = True
-- | Top level function to check if the version of an old interface file
-- is equivalent to the current source file the user asked us to compile.
-- If the same, we can avoid recompilation. We return a tuple where the
-- first element is a bool saying if we should recompile the object file
-- and the second is maybe the interface file, where Nothng means to
-- rebuild the interface file not use the exisitng one.
checkOldIface
:: HscEnv
-> ModSummary
-> SourceModified
-> Maybe ModIface -- Old interface from compilation manager, if any
-> IO (RecompileRequired, Maybe ModIface)
checkOldIface hsc_env mod_summary source_modified maybe_iface
= do let dflags = hsc_dflags hsc_env
showPass dflags $
"Checking old interface for " ++
(showPpr dflags $ ms_mod mod_summary)
initIfaceCheck hsc_env $
check_old_iface hsc_env mod_summary source_modified maybe_iface
check_old_iface
:: HscEnv
-> ModSummary
-> SourceModified
-> Maybe ModIface
-> IfG (RecompileRequired, Maybe ModIface)
check_old_iface hsc_env mod_summary src_modified maybe_iface
= let dflags = hsc_dflags hsc_env
getIface =
case maybe_iface of
Just _ -> do
traceIf (text "We already have the old interface for" <+>
ppr (ms_mod mod_summary))
return maybe_iface
Nothing -> loadIface
loadIface = do
let iface_path = msHiFilePath mod_summary
read_result <- readIface (ms_mod mod_summary) iface_path
case read_result of
Failed err -> do
traceIf (text "FYI: cannot read old interface file:" $$ nest 4 err)
return Nothing
Succeeded iface -> do
traceIf (text "Read the interface file" <+> text iface_path)
return $ Just iface
src_changed
| gopt Opt_ForceRecomp (hsc_dflags hsc_env) = True
| SourceModified <- src_modified = True
| otherwise = False
in do
when src_changed $
traceHiDiffs (nest 4 $ text "Source file changed or recompilation check turned off")
case src_changed of
-- If the source has changed and we're in interactive mode,
-- avoid reading an interface; just return the one we might
-- have been supplied with.
True | not (isObjectTarget $ hscTarget dflags) ->
return (MustCompile, maybe_iface)
-- Try and read the old interface for the current module
-- from the .hi file left from the last time we compiled it
True -> do
maybe_iface' <- getIface
return (MustCompile, maybe_iface')
False -> do
maybe_iface' <- getIface
case maybe_iface' of
-- We can't retrieve the iface
Nothing -> return (MustCompile, Nothing)
-- We have got the old iface; check its versions
-- even in the SourceUnmodifiedAndStable case we
-- should check versions because some packages
-- might have changed or gone away.
Just iface -> checkVersions hsc_env mod_summary iface
-- | Check if a module is still the same 'version'.
--
-- This function is called in the recompilation checker after we have
-- determined that the module M being checked hasn't had any changes
-- to its source file since we last compiled M. So at this point in general
-- two things may have changed that mean we should recompile M:
-- * The interface export by a dependency of M has changed.
-- * The compiler flags specified this time for M have changed
-- in a manner that is significant for recompilaiton.
-- We return not just if we should recompile the object file but also
-- if we should rebuild the interface file.
checkVersions :: HscEnv
-> ModSummary
-> ModIface -- Old interface
-> IfG (RecompileRequired, Maybe ModIface)
checkVersions hsc_env mod_summary iface
= do { traceHiDiffs (text "Considering whether compilation is required for" <+>
ppr (mi_module iface) <> colon)
; recomp <- checkFlagHash hsc_env iface
; if recompileRequired recomp then return (recomp, Nothing) else do {
; if getSigOf (hsc_dflags hsc_env) (moduleName (mi_module iface))
/= mi_sig_of iface
then return (RecompBecause "sig-of changed", Nothing) else do {
; recomp <- checkDependencies hsc_env mod_summary iface
; if recompileRequired recomp then return (recomp, Just iface) else do {
-- Source code unchanged and no errors yet... carry on
--
-- First put the dependent-module info, read from the old
-- interface, into the envt, so that when we look for
-- interfaces we look for the right one (.hi or .hi-boot)
--
-- It's just temporary because either the usage check will succeed
-- (in which case we are done with this module) or it'll fail (in which
-- case we'll compile the module from scratch anyhow).
--
-- We do this regardless of compilation mode, although in --make mode
-- all the dependent modules should be in the HPT already, so it's
-- quite redundant
; updateEps_ $ \eps -> eps { eps_is_boot = mod_deps }
; recomp <- checkList [checkModUsage this_pkg u | u <- mi_usages iface]
; return (recomp, Just iface)
}}}}
where
this_pkg = thisPackage (hsc_dflags hsc_env)
-- This is a bit of a hack really
mod_deps :: ModuleNameEnv (ModuleName, IsBootInterface)
mod_deps = mkModDeps (dep_mods (mi_deps iface))
-- | Check the flags haven't changed
checkFlagHash :: HscEnv -> ModIface -> IfG RecompileRequired
checkFlagHash hsc_env iface = do
let old_hash = mi_flag_hash iface
new_hash <- liftIO $ fingerprintDynFlags (hsc_dflags hsc_env)
(mi_module iface)
putNameLiterally
case old_hash == new_hash of
True -> up_to_date (ptext $ sLit "Module flags unchanged")
False -> out_of_date_hash "flags changed"
(ptext $ sLit " Module flags have changed")
old_hash new_hash
-- If the direct imports of this module are resolved to targets that
-- are not among the dependencies of the previous interface file,
-- then we definitely need to recompile. This catches cases like
-- - an exposed package has been upgraded
-- - we are compiling with different package flags
-- - a home module that was shadowing a package module has been removed
-- - a new home module has been added that shadows a package module
-- See bug #1372.
--
-- Returns True if recompilation is required.
checkDependencies :: HscEnv -> ModSummary -> ModIface -> IfG RecompileRequired
checkDependencies hsc_env summary iface
= checkList (map dep_missing (ms_imps summary ++ ms_srcimps summary))
where
prev_dep_mods = dep_mods (mi_deps iface)
prev_dep_pkgs = dep_pkgs (mi_deps iface)
this_pkg = thisPackage (hsc_dflags hsc_env)
dep_missing (L _ (ImportDecl { ideclName = L _ mod, ideclPkgQual = pkg })) = do
find_res <- liftIO $ findImportedModule hsc_env mod pkg
let reason = moduleNameString mod ++ " changed"
case find_res of
FoundModule h -> check_mod reason (fr_mod h)
FoundSigs hs _backing -> check_mods reason (map fr_mod hs)
_otherwise -> return (RecompBecause reason)
check_mods _ [] = return UpToDate
check_mods reason (m:ms) = do
r <- check_mod reason m
case r of
UpToDate -> check_mods reason ms
_otherwise -> return r
check_mod reason mod
| pkg == this_pkg
= if moduleName mod `notElem` map fst prev_dep_mods
then do traceHiDiffs $
text "imported module " <> quotes (ppr mod) <>
text " not among previous dependencies"
return (RecompBecause reason)
else
return UpToDate
| otherwise
= if pkg `notElem` (map fst prev_dep_pkgs)
then do traceHiDiffs $
text "imported module " <> quotes (ppr mod) <>
text " is from package " <> quotes (ppr pkg) <>
text ", which is not among previous dependencies"
return (RecompBecause reason)
else
return UpToDate
where pkg = modulePackageKey mod
needInterface :: Module -> (ModIface -> IfG RecompileRequired)
-> IfG RecompileRequired
needInterface mod continue
= do -- Load the imported interface if possible
let doc_str = sep [ptext (sLit "need version info for"), ppr mod]
traceHiDiffs (text "Checking usages for module" <+> ppr mod)
mb_iface <- loadInterface doc_str mod ImportBySystem
-- Load the interface, but don't complain on failure;
-- Instead, get an Either back which we can test
case mb_iface of
Failed _ -> do
traceHiDiffs (sep [ptext (sLit "Couldn't load interface for module"),
ppr mod])
return MustCompile
-- Couldn't find or parse a module mentioned in the
-- old interface file. Don't complain: it might
-- just be that the current module doesn't need that
-- import and it's been deleted
Succeeded iface -> continue iface
-- | Given the usage information extracted from the old
-- M.hi file for the module being compiled, figure out
-- whether M needs to be recompiled.
checkModUsage :: PackageKey -> Usage -> IfG RecompileRequired
checkModUsage _this_pkg UsagePackageModule{
usg_mod = mod,
usg_mod_hash = old_mod_hash }
= needInterface mod $ \iface -> do
let reason = moduleNameString (moduleName mod) ++ " changed"
checkModuleFingerprint reason old_mod_hash (mi_mod_hash iface)
-- We only track the ABI hash of package modules, rather than
-- individual entity usages, so if the ABI hash changes we must
-- recompile. This is safe but may entail more recompilation when
-- a dependent package has changed.
checkModUsage this_pkg UsageHomeModule{
usg_mod_name = mod_name,
usg_mod_hash = old_mod_hash,
usg_exports = maybe_old_export_hash,
usg_entities = old_decl_hash }
= do
let mod = mkModule this_pkg mod_name
needInterface mod $ \iface -> do
let
new_mod_hash = mi_mod_hash iface
new_decl_hash = mi_hash_fn iface
new_export_hash = mi_exp_hash iface
reason = moduleNameString mod_name ++ " changed"
-- CHECK MODULE
recompile <- checkModuleFingerprint reason old_mod_hash new_mod_hash
if not (recompileRequired recompile)
then return UpToDate
else do
-- CHECK EXPORT LIST
checkMaybeHash reason maybe_old_export_hash new_export_hash
(ptext (sLit " Export list changed")) $ do
-- CHECK ITEMS ONE BY ONE
recompile <- checkList [ checkEntityUsage reason new_decl_hash u
| u <- old_decl_hash]
if recompileRequired recompile
then return recompile -- This one failed, so just bail out now
else up_to_date (ptext (sLit " Great! The bits I use are up to date"))
checkModUsage _this_pkg UsageFile{ usg_file_path = file,
usg_file_hash = old_hash } =
liftIO $
handleIO handle $ do
new_hash <- getFileHash file
if (old_hash /= new_hash)
then return recomp
else return UpToDate
where
recomp = RecompBecause (file ++ " changed")
handle =
#ifdef DEBUG
\e -> pprTrace "UsageFile" (text (show e)) $ return recomp
#else
\_ -> return recomp -- if we can't find the file, just recompile, don't fail
#endif
------------------------
checkModuleFingerprint :: String -> Fingerprint -> Fingerprint
-> IfG RecompileRequired
checkModuleFingerprint reason old_mod_hash new_mod_hash
| new_mod_hash == old_mod_hash
= up_to_date (ptext (sLit "Module fingerprint unchanged"))
| otherwise
= out_of_date_hash reason (ptext (sLit " Module fingerprint has changed"))
old_mod_hash new_mod_hash
------------------------
checkMaybeHash :: String -> Maybe Fingerprint -> Fingerprint -> SDoc
-> IfG RecompileRequired -> IfG RecompileRequired
checkMaybeHash reason maybe_old_hash new_hash doc continue
| Just hash <- maybe_old_hash, hash /= new_hash
= out_of_date_hash reason doc hash new_hash
| otherwise
= continue
------------------------
checkEntityUsage :: String
-> (OccName -> Maybe (OccName, Fingerprint))
-> (OccName, Fingerprint)
-> IfG RecompileRequired
checkEntityUsage reason new_hash (name,old_hash)
= case new_hash name of
Nothing -> -- We used it before, but it ain't there now
out_of_date reason (sep [ptext (sLit "No longer exported:"), ppr name])
Just (_, new_hash) -- It's there, but is it up to date?
| new_hash == old_hash -> do traceHiDiffs (text " Up to date" <+> ppr name <+> parens (ppr new_hash))
return UpToDate
| otherwise -> out_of_date_hash reason (ptext (sLit " Out of date:") <+> ppr name)
old_hash new_hash
up_to_date :: SDoc -> IfG RecompileRequired
up_to_date msg = traceHiDiffs msg >> return UpToDate
out_of_date :: String -> SDoc -> IfG RecompileRequired
out_of_date reason msg = traceHiDiffs msg >> return (RecompBecause reason)
out_of_date_hash :: String -> SDoc -> Fingerprint -> Fingerprint -> IfG RecompileRequired
out_of_date_hash reason msg old_hash new_hash
= out_of_date reason (hsep [msg, ppr old_hash, ptext (sLit "->"), ppr new_hash])
----------------------
checkList :: [IfG RecompileRequired] -> IfG RecompileRequired
-- This helper is used in two places
checkList [] = return UpToDate
checkList (check:checks) = do recompile <- check
if recompileRequired recompile
then return recompile
else checkList checks
{-
************************************************************************
* *
Converting things to their Iface equivalents
* *
************************************************************************
-}
tyThingToIfaceDecl :: TyThing -> IfaceDecl
tyThingToIfaceDecl (AnId id) = idToIfaceDecl id
tyThingToIfaceDecl (ATyCon tycon) = snd (tyConToIfaceDecl emptyTidyEnv tycon)
tyThingToIfaceDecl (ACoAxiom ax) = coAxiomToIfaceDecl ax
tyThingToIfaceDecl (AConLike cl) = case cl of
RealDataCon dc -> dataConToIfaceDecl dc -- for ppr purposes only
PatSynCon ps -> patSynToIfaceDecl ps
--------------------------
idToIfaceDecl :: Id -> IfaceDecl
-- The Id is already tidied, so that locally-bound names
-- (lambdas, for-alls) already have non-clashing OccNames
-- We can't tidy it here, locally, because it may have
-- free variables in its type or IdInfo
idToIfaceDecl id
= IfaceId { ifName = getOccName id,
ifType = toIfaceType (idType id),
ifIdDetails = toIfaceIdDetails (idDetails id),
ifIdInfo = toIfaceIdInfo (idInfo id) }
--------------------------
dataConToIfaceDecl :: DataCon -> IfaceDecl
dataConToIfaceDecl dataCon
= IfaceId { ifName = getOccName dataCon,
ifType = toIfaceType (dataConUserType dataCon),
ifIdDetails = IfVanillaId,
ifIdInfo = NoInfo }
--------------------------
patSynToIfaceDecl :: PatSyn -> IfaceDecl
patSynToIfaceDecl ps
= IfacePatSyn { ifName = getOccName . getName $ ps
, ifPatMatcher = to_if_pr (patSynMatcher ps)
, ifPatBuilder = fmap to_if_pr (patSynBuilder ps)
, ifPatIsInfix = patSynIsInfix ps
, ifPatUnivTvs = toIfaceTvBndrs univ_tvs'
, ifPatExTvs = toIfaceTvBndrs ex_tvs'
, ifPatProvCtxt = tidyToIfaceContext env2 prov_theta
, ifPatReqCtxt = tidyToIfaceContext env2 req_theta
, ifPatArgs = map (tidyToIfaceType env2) args
, ifPatTy = tidyToIfaceType env2 rhs_ty
}
where
(univ_tvs, ex_tvs, prov_theta, req_theta, args, rhs_ty) = patSynSig ps
(env1, univ_tvs') = tidyTyVarBndrs emptyTidyEnv univ_tvs
(env2, ex_tvs') = tidyTyVarBndrs env1 ex_tvs
to_if_pr (id, needs_dummy) = (idName id, needs_dummy)
--------------------------
coAxiomToIfaceDecl :: CoAxiom br -> IfaceDecl
-- We *do* tidy Axioms, because they are not (and cannot
-- conveniently be) built in tidy form
coAxiomToIfaceDecl ax@(CoAxiom { co_ax_tc = tycon, co_ax_branches = branches
, co_ax_role = role })
= IfaceAxiom { ifName = name
, ifTyCon = toIfaceTyCon tycon
, ifRole = role
, ifAxBranches = brListMap (coAxBranchToIfaceBranch tycon
(brListMap coAxBranchLHS branches))
branches }
where
name = getOccName ax
-- 2nd parameter is the list of branch LHSs, for conversion from incompatible branches
-- to incompatible indices
-- See Note [Storing compatibility] in CoAxiom
coAxBranchToIfaceBranch :: TyCon -> [[Type]] -> CoAxBranch -> IfaceAxBranch
coAxBranchToIfaceBranch tc lhs_s
branch@(CoAxBranch { cab_incomps = incomps })
= (coAxBranchToIfaceBranch' tc branch) { ifaxbIncomps = iface_incomps }
where
iface_incomps = map (expectJust "iface_incomps"
. (flip findIndex lhs_s
. eqTypes)
. coAxBranchLHS) incomps
-- use this one for standalone branches without incompatibles
coAxBranchToIfaceBranch' :: TyCon -> CoAxBranch -> IfaceAxBranch
coAxBranchToIfaceBranch' tc (CoAxBranch { cab_tvs = tvs, cab_lhs = lhs
, cab_roles = roles, cab_rhs = rhs })
= IfaceAxBranch { ifaxbTyVars = toIfaceTvBndrs tv_bndrs
, ifaxbLHS = tidyToIfaceTcArgs env1 tc lhs
, ifaxbRoles = roles
, ifaxbRHS = tidyToIfaceType env1 rhs
, ifaxbIncomps = [] }
where
(env1, tv_bndrs) = tidyTyClTyVarBndrs emptyTidyEnv tvs
-- Don't re-bind in-scope tyvars
-- See Note [CoAxBranch type variables] in CoAxiom
-----------------
tyConToIfaceDecl :: TidyEnv -> TyCon -> (TidyEnv, IfaceDecl)
-- We *do* tidy TyCons, because they are not (and cannot
-- conveniently be) built in tidy form
-- The returned TidyEnv is the one after tidying the tyConTyVars
tyConToIfaceDecl env tycon
| Just clas <- tyConClass_maybe tycon
= classToIfaceDecl env clas
| Just syn_rhs <- synTyConRhs_maybe tycon
= ( tc_env1
, IfaceSynonym { ifName = getOccName tycon,
ifTyVars = if_tc_tyvars,
ifRoles = tyConRoles tycon,
ifSynRhs = if_syn_type syn_rhs,
ifSynKind = tidyToIfaceType tc_env1 (synTyConResKind tycon)
})
| Just fam_flav <- famTyConFlav_maybe tycon
= ( tc_env1
, IfaceFamily { ifName = getOccName tycon,
ifTyVars = if_tc_tyvars,
ifFamFlav = to_if_fam_flav fam_flav,
ifFamKind = tidyToIfaceType tc_env1 (synTyConResKind tycon)
})
| isAlgTyCon tycon
= ( tc_env1
, IfaceData { ifName = getOccName tycon,
ifCType = tyConCType tycon,
ifTyVars = if_tc_tyvars,
ifRoles = tyConRoles tycon,
ifCtxt = tidyToIfaceContext tc_env1 (tyConStupidTheta tycon),
ifCons = ifaceConDecls (algTyConRhs tycon),
ifRec = boolToRecFlag (isRecursiveTyCon tycon),
ifGadtSyntax = isGadtSyntaxTyCon tycon,
ifPromotable = isJust (promotableTyCon_maybe tycon),
ifParent = parent })
| otherwise -- FunTyCon, PrimTyCon, promoted TyCon/DataCon
-- For pretty printing purposes only.
= ( env
, IfaceData { ifName = getOccName tycon,
ifCType = Nothing,
ifTyVars = funAndPrimTyVars,
ifRoles = tyConRoles tycon,
ifCtxt = [],
ifCons = IfDataTyCon [],
ifRec = boolToRecFlag False,
ifGadtSyntax = False,
ifPromotable = False,
ifParent = IfNoParent })
where
(tc_env1, tc_tyvars) = tidyTyClTyVarBndrs env (tyConTyVars tycon)
if_tc_tyvars = toIfaceTvBndrs tc_tyvars
if_syn_type ty = tidyToIfaceType tc_env1 ty
funAndPrimTyVars = toIfaceTvBndrs $ take (tyConArity tycon) alphaTyVars
parent = case tyConFamInstSig_maybe tycon of
Just (tc, ty, ax) -> IfDataInstance (coAxiomName ax)
(toIfaceTyCon tc)
(tidyToIfaceTcArgs tc_env1 tc ty)
Nothing -> IfNoParent
to_if_fam_flav OpenSynFamilyTyCon = IfaceOpenSynFamilyTyCon
to_if_fam_flav (ClosedSynFamilyTyCon ax) = IfaceClosedSynFamilyTyCon axn ibr
where defs = fromBranchList $ coAxiomBranches ax
ibr = map (coAxBranchToIfaceBranch' tycon) defs
axn = coAxiomName ax
to_if_fam_flav AbstractClosedSynFamilyTyCon
= IfaceAbstractClosedSynFamilyTyCon
to_if_fam_flav (BuiltInSynFamTyCon {})
= IfaceBuiltInSynFamTyCon
ifaceConDecls (NewTyCon { data_con = con }) = IfNewTyCon (ifaceConDecl con)
ifaceConDecls (DataTyCon { data_cons = cons }) = IfDataTyCon (map ifaceConDecl cons)
ifaceConDecls (DataFamilyTyCon {}) = IfDataFamTyCon
ifaceConDecls (TupleTyCon { data_con = con }) = IfDataTyCon [ifaceConDecl con]
ifaceConDecls (AbstractTyCon distinct) = IfAbstractTyCon distinct
-- The AbstractTyCon case happens when a TyCon has been trimmed
-- during tidying.
-- Furthermore, tyThingToIfaceDecl is also used in TcRnDriver
-- for GHCi, when browsing a module, in which case the
-- AbstractTyCon and TupleTyCon cases are perfectly sensible.
-- (Tuple declarations are not serialised into interface files.)
ifaceConDecl data_con
= IfCon { ifConOcc = getOccName (dataConName data_con),
ifConInfix = dataConIsInfix data_con,
ifConWrapper = isJust (dataConWrapId_maybe data_con),
ifConExTvs = toIfaceTvBndrs ex_tvs',
ifConEqSpec = map to_eq_spec eq_spec,
ifConCtxt = tidyToIfaceContext con_env2 theta,
ifConArgTys = map (tidyToIfaceType con_env2) arg_tys,
ifConFields = map getOccName
(dataConFieldLabels data_con),
ifConStricts = map (toIfaceBang con_env2) (dataConImplBangs data_con) }
where
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _) = dataConFullSig data_con
-- Tidy the univ_tvs of the data constructor to be identical
-- to the tyConTyVars of the type constructor. This means
-- (a) we don't need to redundantly put them into the interface file
-- (b) when pretty-printing an Iface data declaration in H98-style syntax,
-- we know that the type variables will line up
-- The latter (b) is important because we pretty-print type constructors
-- by converting to IfaceSyn and pretty-printing that
con_env1 = (fst tc_env1, mkVarEnv (zipEqual "ifaceConDecl" univ_tvs tc_tyvars))
-- A bit grimy, perhaps, but it's simple!
(con_env2, ex_tvs') = tidyTyVarBndrs con_env1 ex_tvs
to_eq_spec (tv,ty) = (toIfaceTyVar (tidyTyVar con_env2 tv), tidyToIfaceType con_env2 ty)
toIfaceBang :: TidyEnv -> HsImplBang -> IfaceBang
toIfaceBang _ HsNoBang = IfNoBang
toIfaceBang _ (HsUnpack Nothing) = IfUnpack
toIfaceBang env (HsUnpack (Just co)) = IfUnpackCo (toIfaceCoercion (tidyCo env co))
toIfaceBang _ HsStrict = IfStrict
toIfaceBang _ (HsSrcBang {}) = panic "toIfaceBang"
classToIfaceDecl :: TidyEnv -> Class -> (TidyEnv, IfaceDecl)
classToIfaceDecl env clas
= ( env1
, IfaceClass { ifCtxt = tidyToIfaceContext env1 sc_theta,
ifName = getOccName (classTyCon clas),
ifTyVars = toIfaceTvBndrs clas_tyvars',
ifRoles = tyConRoles (classTyCon clas),
ifFDs = map toIfaceFD clas_fds,
ifATs = map toIfaceAT clas_ats,
ifSigs = map toIfaceClassOp op_stuff,
ifMinDef = fmap getFS (classMinimalDef clas),
ifRec = boolToRecFlag (isRecursiveTyCon tycon) })
where
(clas_tyvars, clas_fds, sc_theta, _, clas_ats, op_stuff)
= classExtraBigSig clas
tycon = classTyCon clas
(env1, clas_tyvars') = tidyTyVarBndrs env clas_tyvars
toIfaceAT :: ClassATItem -> IfaceAT
toIfaceAT (ATI tc def)
= IfaceAT if_decl (fmap (tidyToIfaceType env2) def)
where
(env2, if_decl) = tyConToIfaceDecl env1 tc
toIfaceClassOp (sel_id, def_meth)
= ASSERT(sel_tyvars == clas_tyvars)
IfaceClassOp (getOccName sel_id) (toDmSpec def_meth)
(tidyToIfaceType env1 op_ty)
where
-- Be careful when splitting the type, because of things
-- like class Foo a where
-- op :: (?x :: String) => a -> a
-- and class Baz a where
-- op :: (Ord a) => a -> a
(sel_tyvars, rho_ty) = splitForAllTys (idType sel_id)
op_ty = funResultTy rho_ty
toDmSpec NoDefMeth = NoDM
toDmSpec (GenDefMeth _) = GenericDM
toDmSpec (DefMeth _) = VanillaDM
toIfaceFD (tvs1, tvs2) = (map (getFS . tidyTyVar env1) tvs1,
map (getFS . tidyTyVar env1) tvs2)
--------------------------
tidyToIfaceType :: TidyEnv -> Type -> IfaceType
tidyToIfaceType env ty = toIfaceType (tidyType env ty)
tidyToIfaceTcArgs :: TidyEnv -> TyCon -> [Type] -> IfaceTcArgs
tidyToIfaceTcArgs env tc tys = toIfaceTcArgs tc (tidyTypes env tys)
tidyToIfaceContext :: TidyEnv -> ThetaType -> IfaceContext
tidyToIfaceContext env theta = map (tidyToIfaceType env) theta
tidyTyClTyVarBndrs :: TidyEnv -> [TyVar] -> (TidyEnv, [TyVar])
tidyTyClTyVarBndrs env tvs = mapAccumL tidyTyClTyVarBndr env tvs
tidyTyClTyVarBndr :: TidyEnv -> TyVar -> (TidyEnv, TyVar)
-- If the type variable "binder" is in scope, don't re-bind it
-- In a class decl, for example, the ATD binders mention
-- (amd must mention) the class tyvars
tidyTyClTyVarBndr env@(_, subst) tv
| Just tv' <- lookupVarEnv subst tv = (env, tv')
| otherwise = tidyTyVarBndr env tv
tidyTyVar :: TidyEnv -> TyVar -> TyVar
tidyTyVar (_, subst) tv = lookupVarEnv subst tv `orElse` tv
-- TcType.tidyTyVarOcc messes around with FlatSkols
getFS :: NamedThing a => a -> FastString
getFS x = occNameFS (getOccName x)
--------------------------
instanceToIfaceInst :: ClsInst -> IfaceClsInst
instanceToIfaceInst (ClsInst { is_dfun = dfun_id, is_flag = oflag
, is_cls_nm = cls_name, is_cls = cls
, is_tcs = mb_tcs
, is_orphan = orph })
= ASSERT( cls_name == className cls )
IfaceClsInst { ifDFun = dfun_name,
ifOFlag = oflag,
ifInstCls = cls_name,
ifInstTys = map do_rough mb_tcs,
ifInstOrph = orph }
where
do_rough Nothing = Nothing
do_rough (Just n) = Just (toIfaceTyCon_name n)
dfun_name = idName dfun_id
--------------------------
famInstToIfaceFamInst :: FamInst -> IfaceFamInst
famInstToIfaceFamInst (FamInst { fi_axiom = axiom,
fi_fam = fam,
fi_tcs = roughs })
= IfaceFamInst { ifFamInstAxiom = coAxiomName axiom
, ifFamInstFam = fam
, ifFamInstTys = map do_rough roughs
, ifFamInstOrph = orph }
where
do_rough Nothing = Nothing
do_rough (Just n) = Just (toIfaceTyCon_name n)
fam_decl = tyConName $ coAxiomTyCon axiom
mod = ASSERT( isExternalName (coAxiomName axiom) )
nameModule (coAxiomName axiom)
is_local name = nameIsLocalOrFrom mod name
lhs_names = filterNameSet is_local (orphNamesOfCoCon axiom)
orph | is_local fam_decl
= NotOrphan (nameOccName fam_decl)
| not (isEmptyNameSet lhs_names)
= NotOrphan (nameOccName (head (nameSetElems lhs_names)))
| otherwise
= IsOrphan
--------------------------
toIfaceLetBndr :: Id -> IfaceLetBndr
toIfaceLetBndr id = IfLetBndr (occNameFS (getOccName id))
(toIfaceType (idType id))
(toIfaceIdInfo (idInfo id))
-- Put into the interface file any IdInfo that CoreTidy.tidyLetBndr
-- has left on the Id. See Note [IdInfo on nested let-bindings] in IfaceSyn
--------------------------
toIfaceIdDetails :: IdDetails -> IfaceIdDetails
toIfaceIdDetails VanillaId = IfVanillaId
toIfaceIdDetails (DFunId {}) = IfDFunId
toIfaceIdDetails (RecSelId { sel_naughty = n
, sel_tycon = tc }) = IfRecSelId (toIfaceTyCon tc) n
toIfaceIdDetails other = pprTrace "toIfaceIdDetails" (ppr other)
IfVanillaId -- Unexpected
toIfaceIdInfo :: IdInfo -> IfaceIdInfo
toIfaceIdInfo id_info
= case catMaybes [arity_hsinfo, caf_hsinfo, strict_hsinfo,
inline_hsinfo, unfold_hsinfo] of
[] -> NoInfo
infos -> HasInfo infos
-- NB: strictness and arity must appear in the list before unfolding
-- See TcIface.tcUnfolding
where
------------ Arity --------------
arity_info = arityInfo id_info
arity_hsinfo | arity_info == 0 = Nothing
| otherwise = Just (HsArity arity_info)
------------ Caf Info --------------
caf_info = cafInfo id_info
caf_hsinfo = case caf_info of
NoCafRefs -> Just HsNoCafRefs
_other -> Nothing
------------ Strictness --------------
-- No point in explicitly exporting TopSig
sig_info = strictnessInfo id_info
strict_hsinfo | not (isNopSig sig_info) = Just (HsStrictness sig_info)
| otherwise = Nothing
------------ Unfolding --------------
unfold_hsinfo = toIfUnfolding loop_breaker (unfoldingInfo id_info)
loop_breaker = isStrongLoopBreaker (occInfo id_info)
------------ Inline prag --------------
inline_prag = inlinePragInfo id_info
inline_hsinfo | isDefaultInlinePragma inline_prag = Nothing
| otherwise = Just (HsInline inline_prag)
--------------------------
toIfUnfolding :: Bool -> Unfolding -> Maybe IfaceInfoItem
toIfUnfolding lb (CoreUnfolding { uf_tmpl = rhs
, uf_src = src
, uf_guidance = guidance })
= Just $ HsUnfold lb $
case src of
InlineStable
-> case guidance of
UnfWhen {ug_arity = arity, ug_unsat_ok = unsat_ok, ug_boring_ok = boring_ok }
-> IfInlineRule arity unsat_ok boring_ok if_rhs
_other -> IfCoreUnfold True if_rhs
InlineCompulsory -> IfCompulsory if_rhs
InlineRhs -> IfCoreUnfold False if_rhs
-- Yes, even if guidance is UnfNever, expose the unfolding
-- If we didn't want to expose the unfolding, TidyPgm would
-- have stuck in NoUnfolding. For supercompilation we want
-- to see that unfolding!
where
if_rhs = toIfaceExpr rhs
toIfUnfolding lb (DFunUnfolding { df_bndrs = bndrs, df_args = args })
= Just (HsUnfold lb (IfDFunUnfold (map toIfaceBndr bndrs) (map toIfaceExpr args)))
-- No need to serialise the data constructor;
-- we can recover it from the type of the dfun
toIfUnfolding _ _
= Nothing
--------------------------
coreRuleToIfaceRule :: Module -> CoreRule -> IfaceRule
coreRuleToIfaceRule _ (BuiltinRule { ru_fn = fn})
= pprTrace "toHsRule: builtin" (ppr fn) $
bogusIfaceRule fn
coreRuleToIfaceRule mod rule@(Rule { ru_name = name, ru_fn = fn,
ru_act = act, ru_bndrs = bndrs,
ru_args = args, ru_rhs = rhs,
ru_auto = auto })
= IfaceRule { ifRuleName = name, ifActivation = act,
ifRuleBndrs = map toIfaceBndr bndrs,
ifRuleHead = fn,
ifRuleArgs = map do_arg args,
ifRuleRhs = toIfaceExpr rhs,
ifRuleAuto = auto,
ifRuleOrph = orph }
where
-- For type args we must remove synonyms from the outermost
-- level. Reason: so that when we read it back in we'll
-- construct the same ru_rough field as we have right now;
-- see tcIfaceRule
do_arg (Type ty) = IfaceType (toIfaceType (deNoteType ty))
do_arg (Coercion co) = IfaceCo (toIfaceCoercion co)
do_arg arg = toIfaceExpr arg
-- Compute orphanhood. See Note [Orphans] in InstEnv
-- A rule is an orphan only if none of the variables
-- mentioned on its left-hand side are locally defined
lhs_names = nameSetElems (ruleLhsOrphNames rule)
orph = case filter (nameIsLocalOrFrom mod) lhs_names of
(n : _) -> NotOrphan (nameOccName n)
[] -> IsOrphan
bogusIfaceRule :: Name -> IfaceRule
bogusIfaceRule id_name
= IfaceRule { ifRuleName = fsLit "bogus", ifActivation = NeverActive,
ifRuleBndrs = [], ifRuleHead = id_name, ifRuleArgs = [],
ifRuleRhs = IfaceExt id_name, ifRuleOrph = IsOrphan,
ifRuleAuto = True }
---------------------
toIfaceExpr :: CoreExpr -> IfaceExpr
toIfaceExpr (Var v) = toIfaceVar v
toIfaceExpr (Lit l) = IfaceLit l
toIfaceExpr (Type ty) = IfaceType (toIfaceType ty)
toIfaceExpr (Coercion co) = IfaceCo (toIfaceCoercion co)
toIfaceExpr (Lam x b) = IfaceLam (toIfaceBndr x, toIfaceOneShot x) (toIfaceExpr b)
toIfaceExpr (App f a) = toIfaceApp f [a]
toIfaceExpr (Case s x ty as)
| null as = IfaceECase (toIfaceExpr s) (toIfaceType ty)
| otherwise = IfaceCase (toIfaceExpr s) (getFS x) (map toIfaceAlt as)
toIfaceExpr (Let b e) = IfaceLet (toIfaceBind b) (toIfaceExpr e)
toIfaceExpr (Cast e co) = IfaceCast (toIfaceExpr e) (toIfaceCoercion co)
toIfaceExpr (Tick t e)
| Just t' <- toIfaceTickish t = IfaceTick t' (toIfaceExpr e)
| otherwise = toIfaceExpr e
toIfaceOneShot :: Id -> IfaceOneShot
toIfaceOneShot id | isId id
, OneShotLam <- oneShotInfo (idInfo id)
= IfaceOneShot
| otherwise
= IfaceNoOneShot
---------------------
toIfaceTickish :: Tickish Id -> Maybe IfaceTickish
toIfaceTickish (ProfNote cc tick push) = Just (IfaceSCC cc tick push)
toIfaceTickish (HpcTick modl ix) = Just (IfaceHpcTick modl ix)
toIfaceTickish (SourceNote src names) = Just (IfaceSource src names)
toIfaceTickish (Breakpoint {}) = Nothing
-- Ignore breakpoints, since they are relevant only to GHCi, and
-- should not be serialised (Trac #8333)
---------------------
toIfaceBind :: Bind Id -> IfaceBinding
toIfaceBind (NonRec b r) = IfaceNonRec (toIfaceLetBndr b) (toIfaceExpr r)
toIfaceBind (Rec prs) = IfaceRec [(toIfaceLetBndr b, toIfaceExpr r) | (b,r) <- prs]
---------------------
toIfaceAlt :: (AltCon, [Var], CoreExpr)
-> (IfaceConAlt, [FastString], IfaceExpr)
toIfaceAlt (c,bs,r) = (toIfaceCon c, map getFS bs, toIfaceExpr r)
---------------------
toIfaceCon :: AltCon -> IfaceConAlt
toIfaceCon (DataAlt dc) = IfaceDataAlt (getName dc)
toIfaceCon (LitAlt l) = IfaceLitAlt l
toIfaceCon DEFAULT = IfaceDefault
---------------------
toIfaceApp :: Expr CoreBndr -> [Arg CoreBndr] -> IfaceExpr
toIfaceApp (App f a) as = toIfaceApp f (a:as)
toIfaceApp (Var v) as
= case isDataConWorkId_maybe v of
-- We convert the *worker* for tuples into IfaceTuples
Just dc | saturated
, Just tup_sort <- tyConTuple_maybe tc
-> IfaceTuple tup_sort tup_args
where
val_args = dropWhile isTypeArg as
saturated = val_args `lengthIs` idArity v
tup_args = map toIfaceExpr val_args
tc = dataConTyCon dc
_ -> mkIfaceApps (toIfaceVar v) as
toIfaceApp e as = mkIfaceApps (toIfaceExpr e) as
mkIfaceApps :: IfaceExpr -> [CoreExpr] -> IfaceExpr
mkIfaceApps f as = foldl (\f a -> IfaceApp f (toIfaceExpr a)) f as
---------------------
toIfaceVar :: Id -> IfaceExpr
toIfaceVar v
| Just fcall <- isFCallId_maybe v = IfaceFCall fcall (toIfaceType (idType v))
-- Foreign calls have special syntax
| isExternalName name = IfaceExt name
| otherwise = IfaceLcl (getFS name)
where name = idName v
|
christiaanb/ghc
|
compiler/iface/MkIface.hs
|
bsd-3-clause
| 88,206 | 130 | 25 | 28,061 | 15,223 | 8,127 | 7,096 | -1 | -1 |
-- |
-- Module : System.Random.MRG32K3A.Simple
-- Copyright : (c) 2015 Mathias Koerner
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Portable pseudo-random number generator with small state and
-- skip-ahead that is used on CPUs and GPUs. The pupose of the code
-- is to provide a reference to test faster generators and to generate
-- the skip matrices as currently none of the Haskell random number
-- generators provide a skip-ahead.
--
module System.Random.MRG32K3A.Simple (
State,
Step,
advance,
stepBy,
seed,
nextWith,
next
-- * References
-- $references
) where
import Data.Bits
import Data.Matrix
type HalfState = Matrix Integer
type HalfStep = Matrix Integer
-- | State of MRG32k3a pseudo-random number generator
newtype State = State (HalfState,HalfState)
deriving Show
-- | Step of MRG32k3a pseudo-random number generator
newtype Step = Step (HalfStep,HalfStep)
deriving Show
-- Constants required by the generator
m1, m2, a12, a13m, a13, a21, a23m, a23 :: Integer
m1 = 2 ^ 32 - 209
m2 = 2 ^ 32 - 22853
a12 = 1403580
a13m = -810728
a13 = m1 + a13m
a21 = 527612
a23m = -1370589
a23 = m2 + a23m
halfStep1, halfStep2 :: HalfStep
halfStep1 = fromLists
[[ 0,a12,a13],
[ 1, 0, 0],
[ 0, 1, 0]]
halfStep2 = fromLists
[[a21, 0,a23],
[ 1, 0, 0],
[ 0, 1, 0]]
gen :: Step
gen = Step (halfStep1,halfStep2)
-- Start states for the random number generator.
--
-- The seed gives the number of skip aheads to perform before
-- generating random numbers.
--
start1, start2 :: HalfState
start1 = fromList 3 1 [16 .. ]
start2 = fromList 3 1 [512 .. ]
start :: State
start = State (start1,start2)
(.*.) :: Num a => (Matrix a, Matrix a) -> (Matrix a, Matrix a) -> (Matrix a, Matrix a)
(x1,x2) .*. (y1,y2) = (x1*y1,x2*y2)
(.%.) :: Integral a => (Matrix a, Matrix a) -> (a,a) -> (Matrix a, Matrix a)
(x1,x2) .%. (m1,m2) = (fmap (flip mod m1) x1,fmap (flip mod m2) x2)
-- | Return step to jump ahead or for leapfrogging.
-- Provides an efficient way to generate the stepping information to
-- either advance the random number stream by @n@ number or to generate
-- every @n@-th number.
stepBy :: Integer -> Step
stepBy n = go n id gen
where
id = Step (identity 3,identity 3)
go :: Integer -> Step -> Step -> Step
go 0 accGen pow2Gen = accGen
go n (Step accGen) (Step pow2Gen) = go (shiftR n 1) (Step accGen') (Step pow2Gen')
where
accGen' = if even n then accGen else (accGen .*. pow2Gen) .%. (m1,m2)
pow2Gen' = (pow2Gen .*. pow2Gen) .%. (m1,m2)
-- | Advance the state by @n@ random number generation steps.
advance :: Integer -> State -> State
advance n s = snd $ nextWith (stepBy n) s
-- | Advance the default start state by @n@ random number generation steps.
seed :: Integer -> State
seed n = advance n start
-- | Generate a random number.
-- Generates the next random number with the given step. The random number
-- is in the range @0 .. m1 - 1@.
nextWith :: Step -> State -> (Integer,State)
nextWith (Step g) (State s) = (r,State s')
where
s' = (g .*. s) .%. (m1,m2)
r = (fst s' ! (1,1) + snd s' ! (1,1)) `mod` m1
-- | Generate a random number.
-- Generates the next random number. The random number is in the range @0 .. m1 - 1@.
next :: State -> (Integer,State)
next = nextWith gen
-- $references
--
-- * L'Ecuyer, P. (2006) "Good parameter sets for combined multiple recursive random number generators",
-- Operations Research 47 (1) (1999) 159-164, <http://www.iro.umontreal.ca/~lecuyer/myftp/papers/combmrg2.ps>
--
-- * Bradley, T. et. al. (2011) "Parallelization Teechniques for Random Number Generators",
-- GPU Computing Gems, <http://dx.doi.org/10.1016/B978-0-12-384988-5.00016-4>
--
|
mkoerner/MRG32K3A
|
System/Random/MRG32K3A/Simple.hs
|
bsd-3-clause
| 3,821 | 0 | 12 | 810 | 982 | 577 | 405 | 63 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Translation where
import Common
import Control.Applicative ((<|>))
import Control.Monad (unless)
import Control.Monad.Trans.Maybe
import qualified Data.Text
import Env
import PrettyPrint (pprint)
import Source.Syntax as S
import Target.Syntax as T
import Unbound.Generics.LocallyNameless
type TMonad = TcMonad S.TmName S.Type
topLike :: S.Type -> Bool
topLike S.TopT = True
topLike (S.Inter a b) = topLike a && topLike b
topLike (S.Arr _ b) = topLike b
topLike _ = False
ordinary :: S.Type -> Bool
ordinary (S.Arr _ _) = True
ordinary S.IntT = True
ordinary S.BoolT = True
ordinary (S.Product _ _) = True
ordinary _ = False
transType :: S.Type -> T.Type
transType S.IntT = T.IntT
transType S.BoolT = T.BoolT
transType (S.Arr a b) = T.Arr (transType a) (transType b)
transType (S.Inter a b) = T.Product (transType a) (transType b)
transType (S.Product a b) = T.Product (transType a) (transType b)
transType S.TopT = T.UnitT
(<:) :: S.Type -> S.Type -> MaybeT TMonad T.Expr
(<:) S.IntT S.IntT = return $ T.elam ("x", T.IntT) (T.evar "x")
(<:) S.BoolT S.BoolT = return $ T.elam ("x", T.BoolT) (T.evar "x")
(<:) a@(S.Arr a1 a2) (S.Arr b1 b2) = do
c1 <- b1 <: a1
c2 <- a2 <: b2
let body = T.App c2 (T.App (T.evar "f") (T.App c1 (T.evar "x")))
return $ T.elam ("f", transType a) (T.elam ("x", transType b1) body)
(<:) t1 (S.Inter t2 t3) = do
c1 <- t1 <: t2
c2 <- t1 <: t3
let vx = T.evar "x"
return $ T.elam ("x", transType t1) (T.Pair (T.App c1 vx) (T.App c2 vx))
(<:) a@(S.Inter t1 t2) t3 =
let
f c i = return $ T.elam ("x", transType a) (coercion t3 (T.App c (T.Project (T.evar "x") i)))
f1 = do c1 <- t1 <: t3
f c1 1
f2 = do c2 <- t2 <: t3
f c2 2
in
if ordinary t3
then f1 <|> f2
else MaybeT $ return Nothing
(<:) a@(S.Product a1 a2) (S.Product b1 b2) = do
c1 <- a1 <: b1
c2 <- a2 <: b2
let vp = T.evar "p"
p1 = T.App c1 (T.Project vp 1)
p2 = T.App c2 (T.Project vp 2)
return $ T.elam ("p", transType a) (T.Pair p1 p2)
(<:) a S.TopT = return $ T.elam ("x", transType a) T.Unit
(<:) _ _ = MaybeT $ return Nothing
coercion :: S.Type -> T.Expr -> T.Expr
coercion a c =
let
go S.TopT = T.Unit
go (S.Arr a1 a2) = T.elam ("x", transType a1) (go a2)
go _ = error "Impossible!"
in
if topLike a
then go a
else c
disjoint :: S.Type -> S.Type -> Bool
disjoint = go
where
isPrimitive S.IntT = True
isPrimitive S.BoolT = True
isPrimitive _ = False
isProduct (S.Product _ _) = True
isProduct _ = False
-- i
go (S.Inter a1 a2) b = disjoint a1 b && disjoint a2 b
go a (S.Inter b1 b2) = disjoint a b1 && disjoint a b2
go (S.Arr _ a) (S.Arr _ b) = disjoint a b
go (S.Product a1 a2) (S.Product b1 b2) = disjoint a1 b1 || disjoint a2 b2
-- ax
go (S.Arr _ b) t = not (topLike b) && (isPrimitive t || isProduct t)
go t (S.Arr _ b) = not (topLike b) && (isPrimitive t || isProduct t)
go (S.Product _ _) t = isPrimitive t
go t (S.Product _ _) = isPrimitive t
go a b | isPrimitive a && isPrimitive b = a /= b
go _ _ = False
translate :: S.Expr -> Either Data.Text.Text (S.Type, T.Expr)
translate = runTcMonad . trans
trans :: S.Expr -> TMonad (S.Type, T.Expr)
trans expr = case expr of
(S.Var x) -> do
t <- lookupTy x
return (t, T.evar $ show x)
(S.IntV v) -> return (S.IntT, T.IntV v)
(S.BoolV v) -> return (S.BoolT, T.BoolV v)
(S.Anno e t) -> do
e' <- check e t
return (t, e')
(S.App f a) -> do
(arr, f') <- trans f
case arr of
(S.Arr t1 t2) -> do
a' <- check a t1
return (t2, T.App f' a')
_ -> throwStrErr $ pprint arr ++ " is not an arrow type"
(S.PrimOp op e1 e2) ->
let
ck ta ts = do
e1' <- check e1 ta
e2' <- check e2 ta
return (ts, T.PrimOp op e1' e2')
in
case op of
(Arith _) -> ck S.IntT S.IntT
(Logical _) -> ck S.IntT S.BoolT
(S.Merge e1 e2) -> do
(t1, e1') <- trans e1
(t2, e2') <- trans e2
if disjoint t1 t2
then return (S.Inter t1 t2, T.Pair e1' e2')
else throwStrErr $ pprint t1 ++ " and " ++ pprint t2 ++ " are not disjoint"
(S.If p e1 e2) -> do
p' <- check p S.BoolT
(t1, e1') <- trans e1
(t2, e2') <- trans e2
if aeq t1 t2
then return (t1, T.If p' e1' e2')
else throwStrErr $ pprint t1 ++ " and " ++ pprint t2 ++ " must be the same type"
(S.Let bnd) -> do
((x, Embed e), b) <- unbind bnd
(et, e') <- trans e
(t, b') <- extendCtx (x, et) (trans b)
return (t, T.App (T.elam (show x, transType et) b') e')
(S.Pair e1 e2) -> do
(t1, e1') <- trans e1
(t2, e2') <- trans e2
return (S.Product t1 t2, T.Pair e1' e2')
(S.Project e i) -> do
(t, e') <- trans e
unless (i == 1 || i == 2) (throwStrErr "Projection index must be 1 or 2")
case t of
(S.Product t1 t2) -> return ([t1, t2] !! (i - 1), T.Project e' i)
_ -> throwStrErr $ pprint t ++ " is not a pair type"
S.Top -> return (S.TopT, T.Unit)
_ -> throwStrErr $ "Cannot infer " ++ pprint expr
where
check :: S.Expr -> S.Type -> TMonad T.Expr
check (S.Lam bnd) (S.Arr t1 t2) = do
(x, body) <- unbind bnd
body' <- extendCtx (x, t1) (check body t2)
return $ T.elam (show x, transType t1) body'
check e t = do
(t2, e') <- trans e
if aeq t t2
then return e'
else do
cf <- runMaybeT (t2 <: t)
case cf of
Just c -> return $ T.App c e'
Nothing -> throwStrErr $ pprint e ++ " cannot be type " ++ pprint t
|
lihuanglx/disjoint-intersection
|
src/Translation.hs
|
bsd-3-clause
| 5,824 | 0 | 19 | 1,758 | 2,945 | 1,464 | 1,481 | 162 | 21 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_bind_memory2 - device extension
--
-- == VK_KHR_bind_memory2
--
-- [__Name String__]
-- @VK_KHR_bind_memory2@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 158
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- [__Deprecation state__]
--
-- - /Promoted/ to
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.1-promotions Vulkan 1.1>
--
-- [__Contact__]
--
-- - Tobias Hector
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_bind_memory2] @tobski%0A<<Here describe the issue or question you have about the VK_KHR_bind_memory2 extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2017-09-05
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - Promoted to Vulkan 1.1 Core
--
-- [__Contributors__]
--
-- - Jeff Bolz, NVIDIA
--
-- - Tobias Hector, Imagination Technologies
--
-- == Description
--
-- This extension provides versions of
-- 'Vulkan.Core10.MemoryManagement.bindBufferMemory' and
-- 'Vulkan.Core10.MemoryManagement.bindImageMemory' that allow multiple
-- bindings to be performed at once, and are extensible.
--
-- This extension also introduces 'IMAGE_CREATE_ALIAS_BIT_KHR', which
-- allows “identical” images that alias the same memory to interpret the
-- contents consistently, even across image layout changes.
--
-- == Promotion to Vulkan 1.1
--
-- All functionality in this extension is included in core Vulkan 1.1, with
-- the KHR suffix omitted. The original type, enum and command names are
-- still available as aliases of the core functionality.
--
-- == New Commands
--
-- - 'bindBufferMemory2KHR'
--
-- - 'bindImageMemory2KHR'
--
-- == New Structures
--
-- - 'BindBufferMemoryInfoKHR'
--
-- - 'BindImageMemoryInfoKHR'
--
-- == New Enum Constants
--
-- - 'KHR_BIND_MEMORY_2_EXTENSION_NAME'
--
-- - 'KHR_BIND_MEMORY_2_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core10.Enums.ImageCreateFlagBits.ImageCreateFlagBits':
--
-- - 'IMAGE_CREATE_ALIAS_BIT_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR'
--
-- - 'STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR'
--
-- == Version History
--
-- - Revision 1, 2017-05-19 (Tobias Hector)
--
-- - Pulled bind memory functions into their own extension
--
-- == See Also
--
-- 'BindBufferMemoryInfoKHR', 'BindImageMemoryInfoKHR',
-- 'bindBufferMemory2KHR', 'bindImageMemory2KHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_bind_memory2 Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_bind_memory2 ( pattern STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR
, pattern STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR
, pattern IMAGE_CREATE_ALIAS_BIT_KHR
, bindBufferMemory2KHR
, bindImageMemory2KHR
, BindBufferMemoryInfoKHR
, BindImageMemoryInfoKHR
, KHR_BIND_MEMORY_2_SPEC_VERSION
, pattern KHR_BIND_MEMORY_2_SPEC_VERSION
, KHR_BIND_MEMORY_2_EXTENSION_NAME
, pattern KHR_BIND_MEMORY_2_EXTENSION_NAME
) where
import Data.String (IsString)
import Vulkan.Core11.Promoted_From_VK_KHR_bind_memory2 (bindBufferMemory2)
import Vulkan.Core11.Promoted_From_VK_KHR_bind_memory2 (bindImageMemory2)
import Vulkan.Core11.Promoted_From_VK_KHR_bind_memory2 (BindBufferMemoryInfo)
import Vulkan.Core11.Promoted_From_VK_KHR_bind_memory2 (BindImageMemoryInfo)
import Vulkan.Core10.Enums.ImageCreateFlagBits (ImageCreateFlags)
import Vulkan.Core10.Enums.ImageCreateFlagBits (ImageCreateFlagBits(IMAGE_CREATE_ALIAS_BIT))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO))
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR"
pattern STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR"
pattern STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO
-- No documentation found for TopLevel "VK_IMAGE_CREATE_ALIAS_BIT_KHR"
pattern IMAGE_CREATE_ALIAS_BIT_KHR = IMAGE_CREATE_ALIAS_BIT
-- No documentation found for TopLevel "vkBindBufferMemory2KHR"
bindBufferMemory2KHR = bindBufferMemory2
-- No documentation found for TopLevel "vkBindImageMemory2KHR"
bindImageMemory2KHR = bindImageMemory2
-- No documentation found for TopLevel "VkBindBufferMemoryInfoKHR"
type BindBufferMemoryInfoKHR = BindBufferMemoryInfo
-- No documentation found for TopLevel "VkBindImageMemoryInfoKHR"
type BindImageMemoryInfoKHR = BindImageMemoryInfo
type KHR_BIND_MEMORY_2_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_BIND_MEMORY_2_SPEC_VERSION"
pattern KHR_BIND_MEMORY_2_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_BIND_MEMORY_2_SPEC_VERSION = 1
type KHR_BIND_MEMORY_2_EXTENSION_NAME = "VK_KHR_bind_memory2"
-- No documentation found for TopLevel "VK_KHR_BIND_MEMORY_2_EXTENSION_NAME"
pattern KHR_BIND_MEMORY_2_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_BIND_MEMORY_2_EXTENSION_NAME = "VK_KHR_bind_memory2"
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_KHR_bind_memory2.hs
|
bsd-3-clause
| 6,119 | 0 | 8 | 1,213 | 410 | 299 | 111 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiWayIf #-}
module Game.GameUtil ( spawn
, initEdict
, clearEdict
, freeEdict
, range
, useTargets
, freeEdictA
, monsterUse
, mCheckAttack
, killBox
, visible
, findTarget
, inFront
, foundTarget
, attackFinished
, onSameTeam
, megaHealthThink
, validateSelectedItem
) where
import Control.Lens ((^.), use, (.=), (+=), zoom, (&), (.~), (%~), (+~), (-~), preuse, ix)
import Control.Monad (liftM, when, unless, void)
import Data.Bits ((.&.), (.|.), complement)
import Data.Char (toLower)
import Data.Maybe (isJust, isNothing, fromJust)
import Linear (norm, normalize, dot, _z)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.Vector.Mutable as MV
import qualified Data.Vector.Unboxed as UV
import {-# SOURCE #-} Game.GameImportT
import Game.LevelLocalsT
import Game.CVarT
import Game.GameLocalsT
import Game.TraceT
import Game.EntityStateT
import Game.EdictT
import Game.GClientT
import Game.MonsterInfoT
import Game.ClientPersistantT
import Types
import QuakeRef
import QuakeState
import CVarVariables
import Game.Adapters
import qualified Constants
import qualified Client.M as M
import {-# SOURCE #-} qualified Game.GameAI as GameAI
import {-# SOURCE #-} qualified Game.GameBase as GameBase
import {-# SOURCE #-} qualified Game.GameCombat as GameCombat
import qualified Game.GameItems as GameItems
import qualified Game.Info as Info
import qualified Util.Lib as Lib
import qualified Util.Math3D as Math3D
{-
- Either finds a free edict, or allocates a new one. Try to avoid reusing
- an entity that was recently freed, because it can cause the client to
- think the entity morphed into something else instead of being removed and
- recreated, which can cause interpolated angles and bad trails.
-}
spawn :: Quake (Ref EdictT)
spawn = do
maxClientsValue <- liftM (truncate . (^.cvValue)) maxClientsCVar
numEdicts <- use $ gameBaseGlobals.gbNumEdicts
edicts <- use $ gameBaseGlobals.gbGEdicts
time <- use $ gameBaseGlobals.gbLevel.llTime
-- search for a free edict starting from index (maxClientsValue+1) up
-- to (numEdicts-1)
foundIndex <- io $ findFreeEdict (MV.drop (maxClientsValue + 1) edicts) time maxClientsValue numEdicts
case foundIndex of
Just idx -> do
let newRef = Ref idx
writeRef newRef (newEdictT idx)
initEdict newRef
return newRef
Nothing -> do
maxEntities <- use $ gameBaseGlobals.gbGame.glMaxEntities
when (numEdicts == maxEntities) $ do
err <- use $ gameBaseGlobals.gbGameImport.giError
err "ED_Alloc: no free edicts"
let newRef = Ref numEdicts
writeRef newRef (newEdictT numEdicts)
gameBaseGlobals.gbNumEdicts += 1
initEdict newRef
return newRef
where findFreeEdict :: MV.IOVector EdictT -> Float -> Int -> Int -> IO (Maybe Int)
findFreeEdict edicts levelTime maxClientsValue numEdicts = do
found <- findIndex edicts (\edict -> (not $ edict^.eInUse) && ((edict^.eFreeTime) < 2 || levelTime - (edict^.eFreeTime) > 0.5)) 0 (MV.length edicts)
return $ case found of
Nothing -> Nothing
Just idx -> if idx + maxClientsValue + 1 >= numEdicts
then Nothing
else Just (idx + maxClientsValue + 1)
findIndex :: MV.IOVector EdictT -> (EdictT -> Bool) -> Int -> Int -> IO (Maybe Int)
findIndex edicts p idx maxIdx
| idx >= maxIdx = return Nothing
| otherwise = do
edict <- MV.read edicts idx
if p edict
then return (Just idx)
else findIndex edicts p (idx + 1) maxIdx
initEdict :: Ref EdictT -> Quake ()
initEdict edictRef = do
edict <- readRef edictRef
modifyRef edictRef (\v -> v & eInUse .~ True
& eClassName .~ "noclass"
& eGravity .~ 1.0
& eEntityState .~ (newEntityStateT (Just edictRef)) { _esNumber = (edict^.eIndex) })
{-
- Call after linking a new trigger in during gameplay to force all entities
- it covers to immediately touch it.
-}
clearEdict :: Ref EdictT -> Quake ()
clearEdict edictRef = do
edict <- readRef edictRef
writeRef edictRef (newEdictT (edict^.eIndex))
-- Marks the edict as free
freeEdict :: Ref EdictT -> Quake ()
freeEdict edictRef = do
edict <- readRef edictRef
unlinkEntity <- use $ gameBaseGlobals.gbGameImport.giUnlinkEntity
unlinkEntity edictRef
maxClientsValue <- liftM (truncate . (^.cvValue)) maxClientsCVar
when ((edict^.eIndex) > maxClientsValue + Constants.bodyQueueSize) $ do
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
writeRef edictRef (newEdictT (edict^.eIndex)) { _eClassName = "freed", _eFreeTime = levelTime, _eInUse = False }
{-
- Returns the range catagorization of an entity reletive to self 0 melee
- range, will become hostile even if back is turned 1 visibility and
- infront, or visibility and show hostile 2 infront and show hostile 3 only
- triggered by damage.
-}
range :: EdictT -> EdictT -> Int
range self other =
let v = (self^.eEntityState.esOrigin) - (other^.eEntityState.esOrigin)
len = norm v
in if | len < (fromIntegral Constants.meleeDistance) -> Constants.rangeMelee
| len < 500 -> Constants.rangeNear
| len < 1000 -> Constants.rangeMid
| otherwise -> Constants.rangeFar
{-
- Use the targets.
-
- The global "activator" should be set to the entity that initiated the
- firing.
-
- If self.delay is set, a DelayedUse entity will be created that will
- actually do the SUB_UseTargets after that many seconds have passed.
-
- Centerprints any self.message to the activator.
-
- Search for (string)targetname in all entities that match
- (string)self.target and call their .use function
-}
useTargets :: Ref EdictT -> Maybe (Ref EdictT) -> Quake ()
useTargets edictRef activatorRef = do
edict <- readRef edictRef
gameImport <- use $ gameBaseGlobals.gbGameImport
let dprintf = gameImport^.giDprintf
sound = gameImport^.giSound
soundIndex = gameImport^.giSoundIndex
centerPrintf = gameImport^.giCenterPrintf
-- check for a delay
if (edict^.eDelay) /= 0
then do
-- create a temp object to fire at a later time
tmpRef <- spawn
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
when (isNothing activatorRef) $ do
dprintf "Think_Delay with no activator\n"
modifyRef tmpRef (\v -> v & eClassName .~ "DelayedUse"
& eNextThink .~ levelTime + (edict^.eDelay)
& eThink .~ Just thinkDelay
& eActivator .~ activatorRef
& eMessage .~ (edict^.eMessage)
& eTarget .~ (edict^.eTarget)
& eKillTarget .~ (edict^.eKillTarget))
else do
let actRef = fromJust activatorRef
activator <- readRef actRef
-- print the message
when (isJust (edict^.eMessage) && ((activator^.eSvFlags) .&. Constants.svfMonster) == 0) $ do
centerPrintf actRef (fromJust (edict^.eMessage))
if (edict^.eNoiseIndex) /= 0
then
sound (Just actRef) Constants.chanAuto (edict^.eNoiseIndex) 1 Constants.attnNorm 0
else do
talkIdx <- soundIndex (Just "misc/talk1.wav")
sound (Just actRef) Constants.chanAuto talkIdx 1 Constants.attnNorm 0
-- kill killtargets
done <- if (isJust (edict^.eKillTarget))
then killKillTargets Nothing (fromJust $ edict^.eKillTarget)
else return False
unless done $ do
-- fire targets
when (isJust $ edict^.eTarget) $ do
fireTargets (BC.map toLower (edict^.eClassName)) Nothing GameBase.findByTarget (fromJust $ edict^.eTarget)
where killKillTargets :: Maybe (Ref EdictT) -> B.ByteString -> Quake Bool
killKillTargets entRef killTarget = do
nextRef <- GameBase.gFind entRef GameBase.findByTarget killTarget
case nextRef of
Just newRef -> do
freeEdict newRef
edict <- readRef edictRef
if (edict^.eInUse)
then
killKillTargets nextRef killTarget
else do
dprintf <- use $ gameBaseGlobals.gbGameImport.giDprintf
dprintf "entity was removed while using killtargets\n"
return True
Nothing ->
return False
fireTargets :: B.ByteString -> Maybe (Ref EdictT) -> (EdictT -> B.ByteString -> Bool) -> B.ByteString -> Quake ()
fireTargets edictClassName ref findBy targetName = do
foundRef <- GameBase.gFind ref findBy targetName
when (isJust foundRef) $ do
let Just foundEdictRef = foundRef
foundEdict <- readRef foundEdictRef
-- doors fire area portals in a specific way
let foundEdictClassName = BC.map toLower (foundEdict^.eClassName)
if foundEdictClassName == "func_areaportal" && (any (== edictClassName) ["func_door", "func_door_rotating"])
then
fireTargets edictClassName foundRef findBy targetName
else do
dprintf <- use $ gameBaseGlobals.gbGameImport.giDprintf
if foundEdictRef == edictRef
then
dprintf "WARNING: Entity used iteself.\n"
else
when (isJust $ foundEdict^.eUse) $
entUse (fromJust $ foundEdict^.eUse) foundEdictRef (Just edictRef) activatorRef
edict <- readRef edictRef
if not (edict^.eInUse)
then dprintf "entity was removed while using targets\n"
else fireTargets edictClassName foundRef findBy targetName
thinkDelay :: EntThink
thinkDelay =
GenericEntThink "Think_Delay" $ \edictRef -> do
edict <- readRef edictRef
useTargets edictRef (edict^.eActivator)
freeEdict edictRef
return True
freeEdictA :: EntThink
freeEdictA =
GenericEntThink "G_FreeEdictA" $ \edictRef -> do
freeEdict edictRef
return False
monsterUse :: EntUse
monsterUse =
GenericEntUse "monster_use" $ \selfRef _ (Just activatorRef) -> do
self <- readRef selfRef
activator <- readRef activatorRef
let done = isJust (self^.eEnemy) || (self^.eHealth) <= 0 || ((activator^.eFlags) .&. Constants.flNoTarget) /= 0 || (isNothing (activator^.eClient) && ((activator^.eMonsterInfo.miAIFlags) .&. Constants.aiGoodGuy) == 0)
unless done $ do
modifyRef selfRef (\v -> v & eEnemy .~ Just activatorRef)
foundTarget selfRef
mCheckAttack :: EntThink
mCheckAttack =
GenericEntThink "M_CheckAttack" $ \selfRef -> do
self <- readRef selfRef
let Just enemyRef = self^.eEnemy
checkEnemyHealth selfRef enemyRef
where checkEnemyHealth :: Ref EdictT -> Ref EdictT -> Quake Bool
checkEnemyHealth selfRef enemyRef = do
self <- readRef selfRef
enemy <- readRef enemyRef
if (enemy^.eHealth) > 0
then do
let spot1 = (self^.eEntityState.esOrigin) & _z +~ fromIntegral (self^.eViewHeight)
spot2 = (enemy^.eEntityState.esOrigin) & _z +~ fromIntegral (enemy^.eViewHeight)
trace <- use $ gameBaseGlobals.gbGameImport.giTrace
traceT <- trace spot1 Nothing Nothing spot2 (Just selfRef) (Constants.contentsSolid .|. Constants.contentsMonster .|. Constants.contentsSlime .|. Constants.contentsLava .|. Constants.contentsWindow)
-- do we have a clear shot?
if (traceT^.tEnt) /= (self^.eEnemy)
then return False
else meleeAttack selfRef
else
meleeAttack selfRef
meleeAttack :: Ref EdictT -> Quake Bool
meleeAttack selfRef = do
enemyRange <- use $ gameBaseGlobals.gbEnemyRange
self <- readRef selfRef
if enemyRange == Constants.rangeMelee
then do
-- don't always melee in easy mode
skillValue <- liftM (^.cvValue) skillCVar
r <- Lib.rand
if skillValue == 0 && (r .&. 3) /= 0
then
return False
else do
let attackState = case self^.eMonsterInfo.miMelee of
Just _ -> Constants.asMelee
Nothing -> Constants.asMissile
modifyRef selfRef (\v -> v & eMonsterInfo.miAttackState .~ attackState)
return True
else
missileAttack selfRef
missileAttack :: Ref EdictT -> Quake Bool
missileAttack selfRef = do
self <- readRef selfRef
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
enemyRange <- use $ gameBaseGlobals.gbEnemyRange
if | isNothing (self^.eMonsterInfo.miAttack) -> return False
| levelTime < (self^.eMonsterInfo.miAttackFinished) -> return False
| enemyRange == Constants.rangeFar -> return False
| otherwise -> do
let maybeChance = if | (self^.eMonsterInfo.miAIFlags) .&. Constants.aiStandGround /= 0 -> Just 0.4
| enemyRange == Constants.rangeMelee -> Just 0.2
| enemyRange == Constants.rangeNear -> Just 0.1
| enemyRange == Constants.rangeMid -> Just 0.02
| otherwise -> Nothing
case maybeChance of
Nothing ->
return False
Just chance -> do
skillValue <- liftM (^.cvValue) skillCVar
let chance' = if | skillValue == 0 -> chance * 0.5
| skillValue >= 2 -> chance * 2
| otherwise -> chance
r <- Lib.randomF
if r < chance'
then do
r' <- Lib.randomF
modifyRef selfRef (\v -> v & eMonsterInfo.miAttackState .~ Constants.asMissile
& eMonsterInfo.miAttackFinished .~ levelTime + 2 * r')
return True
else do
when ((self^.eFlags) .&. Constants.flFly /= 0) $ do
r' <- Lib.randomF
let attackState = if r' < 0.3
then Constants.asSliding
else Constants.asStraight
modifyRef selfRef (\v -> v & eMonsterInfo.miAttackState .~ attackState)
return False
{-
- Kills all entities that would touch the proposed new positioning of ent.
- Ent should be unlinked before calling this!
-}
killBox :: Ref EdictT -> Quake Bool
killBox edictRef = do
edict <- readRef edictRef
trace <- use $ gameBaseGlobals.gbGameImport.giTrace
traceT <- trace (edict^.eEntityState.esOrigin)
(Just $ edict^.eMins)
(Just $ edict^.eMaxs)
(edict^.eEntityState.esOrigin)
Nothing
Constants.maskPlayerSolid
if isNothing (traceT^.tEnt) || (traceT^.tEnt) == Just worldRef
then
return True
else do
-- nail it
v3o <- use $ globals.gVec3Origin
let Just traceEntRef = traceT^.tEnt
GameCombat.damage traceEntRef
edictRef
edictRef
v3o
(edict^.eEntityState.esOrigin)
v3o
100000
0
Constants.damageNoProtection
Constants.modTelefrag
-- if we didnt' kill it, fail
traceEnt <- readRef traceEntRef
if (traceEnt^.eSolid) /= 0
then return False
else killBox edictRef
-- Returns 1 if the entity is visible to self, even if not infront()
visible :: Ref EdictT -> Ref EdictT -> Quake Bool
visible selfRef otherRef = do
self <- readRef selfRef
other <- readRef otherRef
let spot1 = (self^.eEntityState.esOrigin) & _z +~ fromIntegral (self^.eViewHeight)
spot2 = (other^.eEntityState.esOrigin) & _z +~ fromIntegral (other^.eViewHeight)
v3o <- use $ globals.gVec3Origin
trace <- use $ gameBaseGlobals.gbGameImport.giTrace
traceT <- trace spot1 (Just v3o) (Just v3o) spot2 (Just selfRef) Constants.maskOpaque
return $ (traceT^.tFraction) == 1
{-
- Finds a target.
-
- Self is currently not attacking anything, so try to find a target
-
- Returns TRUE if an enemy was sighted
-
- When a player fires a missile, the point of impact becomes a fakeplayer
- so that monsters that see the impact will respond as if they had seen the
- player.
-
- To avoid spending too much time, only a single client (or fakeclient) is
- checked each frame. This means multi player games will have slightly
- slower noticing monsters.
-}
findTarget :: Ref EdictT -> Quake Bool
findTarget selfRef = do
self <- readRef selfRef
if (self^.eMonsterInfo.miAIFlags) .&. Constants.aiGoodGuy /= 0
then
-- we skip this chunk of code here cause we always returns False
{-
if (self.goalentity != null && self.goalentity.inuse
&& self.goalentity.classname != null) {
if (self.goalentity.classname.equals("target_actor"))
return false;
}
-}
-- FIXME: look for monsters?
return False
else
checkCombatPoint self
where checkCombatPoint :: EdictT -> Quake Bool
checkCombatPoint self = do
-- if we're going to a combat point, just proceed
if (self^.eMonsterInfo.miAIFlags) .&. Constants.aiCombatPoint /= 0
then return False
else checkHearNoise self
checkHearNoise :: EdictT -> Quake Bool
checkHearNoise self = do
-- if the first spawnflag bit is set, the monster will only wake up on
-- really seeing the player, not another monster getting angry or
-- hearing something
-- revised behavior so they will wake up if they "see" a player make a
-- noise but not weapon impact/explosion noises
level <- use $ gameBaseGlobals.gbLevel
if | (level^.llSightEntityFrameNum) >= ((level^.llFrameNum) - 1) && (self^.eSpawnFlags) .&. 1 == 0 -> do
let Just clientRef = level^.llSightEntity
client <- readRef clientRef
if (client^.eEnemy) == (self^.eEnemy)
then return False
else checkClientInUse clientRef False
| (level^.llSoundEntityFrameNum) >= ((level^.llFrameNum) - 1) ->
checkClientInUse (fromJust $ level^.llSoundEntity) True
| isJust (self^.eEnemy) && (level^.llSound2EntityFrameNum) >= ((level^.llFrameNum) - 1) && (self^.eSpawnFlags) .&. 1 /= 0 ->
checkClientInUse (fromJust $ level^.llSound2Entity) True
| otherwise -> do
case level^.llSightClient of
Nothing -> return False -- no clients to get mad at
Just clientRef -> checkClientInUse clientRef False
checkClientInUse :: Ref EdictT -> Bool -> Quake Bool
checkClientInUse clientRef heardIt = do
-- if the entity went away, forget it
client <- readRef clientRef
if not (client^.eInUse)
then return False
else checkClientFlags clientRef client heardIt
checkClientFlags :: Ref EdictT -> EdictT -> Bool -> Quake Bool
checkClientFlags clientRef client heardIt = do
if | isJust (client^.eClient) ->
if (client^.eFlags) .&. Constants.flNoTarget /= 0
then return False
else actBasedOnHeardIt clientRef heardIt
| (client^.eSvFlags) .&. Constants.svfMonster /= 0 ->
case client^.eEnemy of
Nothing -> return False
Just enemyRef -> do
enemy <- readRef enemyRef
if (enemy^.eFlags) .&. Constants.flNoTarget /= 0
then return False
else actBasedOnHeardIt clientRef heardIt
| heardIt -> do
let Just ownerRef = client^.eOwner
owner <- readRef ownerRef
if (owner^.eFlags) .&. Constants.flNoTarget /= 0
then return False
else actBasedOnHeardIt clientRef heardIt
| otherwise -> return False
actBasedOnHeardIt :: Ref EdictT -> Bool -> Quake Bool
actBasedOnHeardIt clientRef heardIt = do
self <- readRef selfRef
client <- readRef clientRef
if not heardIt
then do
let r = range self client
if | r == Constants.rangeFar ->
return False
| client^.eLightLevel <= 5 ->
return False
| otherwise -> do
vis <- visible selfRef clientRef
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
if | not vis ->
return False
| r == Constants.rangeNear && fromIntegral (client^.eShowHostile) < levelTime && not (inFront self client) ->
return False
| r == Constants.rangeMid && not (inFront self client) ->
return False
| Just clientRef == (self^.eEnemy) ->
return True -- JDC false
| otherwise -> do
modifyRef selfRef (\v -> v & eEnemy .~ Just clientRef)
if (client^.eClassName) /= "player_noise"
then do
modifyRef selfRef (\v -> v & eMonsterInfo.miAIFlags %~ (.&. (complement Constants.aiSoundTarget)))
case client^.eClient of
Nothing -> do
modifyRef selfRef (\v -> v & eEnemy .~ (client^.eEnemy))
let Just enemyRef = client^.eEnemy
enemy <- readRef enemyRef
case enemy^.eClient of
Nothing -> do
modifyRef selfRef (\v -> v & eEnemy .~ Nothing)
return False
_ -> finishFindTarget
_ -> finishFindTarget
else
finishFindTarget
else do
-- heard it
vis <- visible selfRef clientRef
if (self^.eSpawnFlags) .&. 1 /= 0 && not vis
then
return False
else do
inPHS <- use $ gameBaseGlobals.gbGameImport.giInPHS
v <- inPHS (self^.eEntityState.esOrigin) (client^.eEntityState.esOrigin)
if not v
then
return False
else do
let temp = (client^.eEntityState.esOrigin) - (self^.eEntityState.esOrigin)
if norm temp > 1000 -- too far to hear
then
return False
else do
-- check area portals - if they are different and
-- not connected then we can't hear it
done <- if (client^.eAreaNum) /= (self^.eAreaNum)
then do
areasConnected <- use $ gameBaseGlobals.gbGameImport.giAreasConnected
connected <- areasConnected (self^.eAreaNum) (client^.eAreaNum)
if not connected
then return True
else return False
else
return False
if done
then
return False
else do
modifyRef selfRef (\v' -> v' & eIdealYaw .~ Math3D.vectorYaw temp)
M.changeYaw selfRef
-- hunt the sound for a bit; hopefully find
-- the real player
modifyRef selfRef (\v' -> v' & eMonsterInfo.miAIFlags %~ (.|. Constants.aiSoundTarget))
if Just clientRef == (self^.eEnemy)
then
return True
else do
modifyRef selfRef (\v' -> v' & eEnemy .~ Just clientRef)
finishFindTarget
finishFindTarget :: Quake Bool
finishFindTarget = do
-- got one
foundTarget selfRef
self <- readRef selfRef
when ((self^.eMonsterInfo.miAIFlags) .&. Constants.aiSoundTarget == 0 && isJust (self^.eMonsterInfo.miSight)) $
void $ entInteract (fromJust $ self^.eMonsterInfo.miSight) selfRef (fromJust $ self^.eEnemy) -- RESEARCH: are we sure eEnemy is Just ?
return True
-- Returns true if the entity is in front (in sight) of self
inFront :: EdictT -> EdictT -> Bool
inFront self other =
let (Just forward, _, _) = Math3D.angleVectors (self^.eEntityState.esAngles) True False False
vec = normalize ((other^.eEntityState.esOrigin) - (self^.eEntityState.esOrigin))
dot' = vec `dot` forward
in dot' > 0.3
foundTarget :: Ref EdictT -> Quake ()
foundTarget selfRef = do
self <- readRef selfRef
let Just enemyRef = self^.eEnemy
enemy <- readRef enemyRef
-- let other monsters see this monster for a while
when (isJust (enemy^.eClient)) $ do
frameNum <- use $ gameBaseGlobals.gbLevel.llFrameNum
zoom (gameBaseGlobals.gbLevel) $ do
llSightEntity .= Just selfRef
llSightEntityFrameNum .= frameNum
modifyRef selfRef (\v -> v & eLightLevel .~ 128)
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
modifyRef selfRef (\v -> v & eShowHostile .~ truncate levelTime + 1 -- wake up other monsters
& eMonsterInfo.miLastSighting .~ (enemy^.eEntityState.esOrigin)
& eMonsterInfo.miTrailTime .~ levelTime)
case self^.eCombatTarget of
Nothing ->
GameAI.huntTarget selfRef
Just combatTarget -> do
target <- GameBase.pickTarget (self^.eCombatTarget)
case target of
Nothing -> do
modifyRef selfRef (\v -> v & eGoalEntity .~ (self^.eEnemy)
& eMoveTarget .~ (self^.eEnemy))
GameAI.huntTarget selfRef
dprintf <- use $ gameBaseGlobals.gbGameImport.giDprintf
dprintf ((self^.eClassName) `B.append`
" at " `B.append`
Lib.vtos (self^.eEntityState.esOrigin) `B.append`
", combattarget " `B.append`
combatTarget `B.append`
" not found\n")
Just _ -> do
modifyRef selfRef (\v -> v & eGoalEntity .~ target
& eMoveTarget .~ target
-- clear out our combattarget, these are a one shot deal
& eCombatTarget .~ Nothing
& eMonsterInfo.miAIFlags %~ (.|. Constants.aiCombatPoint)
& eMonsterInfo.miPauseTime .~ 0)
-- clear the targetname, that point is ours!
let Just moveTargetRef = target
modifyRef moveTargetRef (\v -> v & eTargetName .~ Nothing)
-- run for it
void $ think (fromJust $ self^.eMonsterInfo.miRun) selfRef
attackFinished :: Ref EdictT -> Float -> Quake ()
attackFinished selfRef time = do
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
modifyRef selfRef (\v -> v & eMonsterInfo.miAttackFinished .~ levelTime + time)
-- Returns true, if two edicts are on the same team.
onSameTeam :: Ref EdictT -> Ref EdictT -> Quake Bool
onSameTeam edictRef otherRef = do
dmFlagsValue <- liftM (truncate . (^.cvValue)) dmFlagsCVar
if dmFlagsValue .&. (Constants.dfModelTeams .|. Constants.dfSkinTeams) == 0
then return False
else do
edictTeam <- clientTeam edictRef
otherTeam <- clientTeam otherRef
return (edictTeam == otherTeam)
-- Returns the team string of an entity
-- with respect to rteam_by_model and team_by_skin.
clientTeam :: Ref EdictT -> Quake B.ByteString
clientTeam edictRef = do
edict <- readRef edictRef
case edict^.eClient of
Nothing ->
return ""
Just (Ref gClientIdx) -> do
Just gClient <- preuse $ gameBaseGlobals.gbGame.glClients.ix gClientIdx
value <- Info.valueForKey (gClient^.gcPers.cpUserInfo) "skin"
case '/' `BC.elemIndex` value of
Nothing ->
return value
Just idx -> do
dmFlagsValue <- liftM (truncate . (^.cvValue)) dmFlagsCVar
return $ if dmFlagsValue .&. Constants.dfModelTeams /= 0
then B.take idx value
else B.drop (idx + 1) value
megaHealthThink :: EntThink
megaHealthThink =
GenericEntThink "MegaHealth_think" $ \selfRef -> do
self <- readRef selfRef
let Just ownerRef = self^.eOwner
owner <- readRef ownerRef
if (owner^.eHealth) > (owner^.eMaxHealth)
then do
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
modifyRef selfRef (\v -> v & eNextThink .~ levelTime + 1)
modifyRef ownerRef (\v -> v & eHealth -~ 1)
return False
else do
deathmatchValue <- liftM (^.cvValue) deathmatchCVar
if not ((self^.eSpawnFlags) .&. Constants.droppedItem /= 0) && deathmatchValue /= 0
then
GameItems.setRespawn selfRef 20
else
freeEdict selfRef
return False
validateSelectedItem :: Ref EdictT -> Quake ()
validateSelectedItem edictRef = do
edict <- readRef edictRef
let Just (Ref gClientIdx) = edict^.eClient
Just gClient <- preuse $ gameBaseGlobals.gbGame.glClients.ix gClientIdx
if (gClient^.gcPers.cpInventory) UV.! (gClient^.gcPers.cpSelectedItem) /= 0
then return () -- valid
else GameItems.selectNextItem edictRef (-1)
|
ksaveljev/hake-2
|
src/Game/GameUtil.hs
|
bsd-3-clause
| 32,215 | 179 | 31 | 11,885 | 7,474 | 3,789 | 3,685 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
The Desugarer: turning HsSyn into Core.
-}
{-# LANGUAGE CPP #-}
module Desugar ( deSugar, deSugarExpr ) where
import DynFlags
import HscTypes
import HsSyn
import TcRnTypes
import TcRnMonad ( finalSafeMode, fixSafeInstances )
import MkIface
import Id
import Name
import Type
import FamInstEnv
import Coercion
import InstEnv
import Class
import Avail
import CoreSyn
import CoreSubst
import PprCore
import DsMonad
import DsExpr
import DsBinds
import DsForeign
import Module
import NameSet
import NameEnv
import Rules
import TysPrim (eqReprPrimTyCon)
import TysWiredIn (coercibleTyCon )
import BasicTypes ( Activation(.. ) )
import CoreMonad ( CoreToDo(..) )
import CoreLint ( endPassIO )
import MkCore
import FastString
import ErrUtils
import Outputable
import SrcLoc
import Coverage
import Util
import MonadUtils
import OrdList
import StaticPtrTable
import Data.List
import Data.IORef
import Control.Monad( when )
{-
************************************************************************
* *
* The main function: deSugar
* *
************************************************************************
-}
-- | Main entry point to the desugarer.
deSugar :: HscEnv -> ModLocation -> TcGblEnv -> IO (Messages, Maybe ModGuts)
-- Can modify PCS by faulting in more declarations
deSugar hsc_env
mod_loc
tcg_env@(TcGblEnv { tcg_mod = mod,
tcg_src = hsc_src,
tcg_type_env = type_env,
tcg_imports = imports,
tcg_exports = exports,
tcg_keep = keep_var,
tcg_th_splice_used = tc_splice_used,
tcg_rdr_env = rdr_env,
tcg_fix_env = fix_env,
tcg_inst_env = inst_env,
tcg_fam_inst_env = fam_inst_env,
tcg_warns = warns,
tcg_anns = anns,
tcg_binds = binds,
tcg_imp_specs = imp_specs,
tcg_dependent_files = dependent_files,
tcg_ev_binds = ev_binds,
tcg_fords = fords,
tcg_rules = rules,
tcg_vects = vects,
tcg_patsyns = patsyns,
tcg_tcs = tcs,
tcg_insts = insts,
tcg_fam_insts = fam_insts,
tcg_hpc = other_hpc_info})
= do { let dflags = hsc_dflags hsc_env
print_unqual = mkPrintUnqualified dflags rdr_env
; showPass dflags "Desugar"
-- Desugar the program
; let export_set = availsToNameSet exports
target = hscTarget dflags
hpcInfo = emptyHpcInfo other_hpc_info
; (binds_cvr, ds_hpc_info, modBreaks)
<- if not (isHsBootOrSig hsc_src)
then addTicksToBinds dflags mod mod_loc export_set
(typeEnvTyCons type_env) binds
else return (binds, hpcInfo, emptyModBreaks)
; (msgs, mb_res) <- initDs hsc_env mod rdr_env type_env fam_inst_env $
do { ds_ev_binds <- dsEvBinds ev_binds
; core_prs <- dsTopLHsBinds binds_cvr
; (spec_prs, spec_rules) <- dsImpSpecs imp_specs
; (ds_fords, foreign_prs) <- dsForeigns fords
; ds_rules <- mapMaybeM dsRule rules
; ds_vects <- mapM dsVect vects
; stBinds <- dsGetStaticBindsVar >>=
liftIO . readIORef
; let hpc_init
| gopt Opt_Hpc dflags = hpcInitCode mod ds_hpc_info
| otherwise = empty
-- Stub to insert the static entries of the
-- module into the static pointer table
spt_init = sptInitCode mod stBinds
; return ( ds_ev_binds
, foreign_prs `appOL` core_prs `appOL` spec_prs
`appOL` toOL (map snd stBinds)
, spec_rules ++ ds_rules, ds_vects
, ds_fords `appendStubC` hpc_init
`appendStubC` spt_init) }
; case mb_res of {
Nothing -> return (msgs, Nothing) ;
Just (ds_ev_binds, all_prs, all_rules, vects0, ds_fords) -> do
do { -- Add export flags to bindings
keep_alive <- readIORef keep_var
; let (rules_for_locals, rules_for_imps) = partition isLocalRule all_rules
final_prs = addExportFlagsAndRules target export_set keep_alive
rules_for_locals (fromOL all_prs)
final_pgm = combineEvBinds ds_ev_binds final_prs
-- Notice that we put the whole lot in a big Rec, even the foreign binds
-- When compiling PrelFloat, which defines data Float = F# Float#
-- we want F# to be in scope in the foreign marshalling code!
-- You might think it doesn't matter, but the simplifier brings all top-level
-- things into the in-scope set before simplifying; so we get no unfolding for F#!
#ifdef DEBUG
-- Debug only as pre-simple-optimisation program may be really big
; endPassIO hsc_env print_unqual CoreDesugar final_pgm rules_for_imps
#endif
; (ds_binds, ds_rules_for_imps, ds_vects)
<- simpleOptPgm dflags mod final_pgm rules_for_imps vects0
-- The simpleOptPgm gets rid of type
-- bindings plus any stupid dead code
; endPassIO hsc_env print_unqual CoreDesugarOpt ds_binds ds_rules_for_imps
; let used_names = mkUsedNames tcg_env
; deps <- mkDependencies tcg_env
; used_th <- readIORef tc_splice_used
; dep_files <- readIORef dependent_files
; safe_mode <- finalSafeMode dflags tcg_env
; let mod_guts = ModGuts {
mg_module = mod,
mg_boot = hsc_src == HsBootFile,
mg_exports = exports,
mg_deps = deps,
mg_used_names = used_names,
mg_used_th = used_th,
mg_dir_imps = imp_mods imports,
mg_rdr_env = rdr_env,
mg_fix_env = fix_env,
mg_warns = warns,
mg_anns = anns,
mg_tcs = tcs,
mg_insts = fixSafeInstances safe_mode insts,
mg_fam_insts = fam_insts,
mg_inst_env = inst_env,
mg_fam_inst_env = fam_inst_env,
mg_patsyns = patsyns,
mg_rules = ds_rules_for_imps,
mg_binds = ds_binds,
mg_foreign = ds_fords,
mg_hpc_info = ds_hpc_info,
mg_modBreaks = modBreaks,
mg_vect_decls = ds_vects,
mg_vect_info = noVectInfo,
mg_safe_haskell = safe_mode,
mg_trust_pkg = imp_trust_own_pkg imports,
mg_dependent_files = dep_files
}
; return (msgs, Just mod_guts)
}}}
dsImpSpecs :: [LTcSpecPrag] -> DsM (OrdList (Id,CoreExpr), [CoreRule])
dsImpSpecs imp_specs
= do { spec_prs <- mapMaybeM (dsSpec Nothing) imp_specs
; let (spec_binds, spec_rules) = unzip spec_prs
; return (concatOL spec_binds, spec_rules) }
combineEvBinds :: [CoreBind] -> [(Id,CoreExpr)] -> [CoreBind]
-- Top-level bindings can include coercion bindings, but not via superclasses
-- See Note [Top-level evidence]
combineEvBinds [] val_prs
= [Rec val_prs]
combineEvBinds (NonRec b r : bs) val_prs
| isId b = combineEvBinds bs ((b,r):val_prs)
| otherwise = NonRec b r : combineEvBinds bs val_prs
combineEvBinds (Rec prs : bs) val_prs
= combineEvBinds bs (prs ++ val_prs)
{-
Note [Top-level evidence]
~~~~~~~~~~~~~~~~~~~~~~~~~
Top-level evidence bindings may be mutually recursive with the top-level value
bindings, so we must put those in a Rec. But we can't put them *all* in a Rec
because the occurrence analyser doesn't teke account of type/coercion variables
when computing dependencies.
So we pull out the type/coercion variables (which are in dependency order),
and Rec the rest.
-}
deSugarExpr :: HscEnv -> LHsExpr Id -> IO (Messages, Maybe CoreExpr)
deSugarExpr hsc_env tc_expr
= do { let dflags = hsc_dflags hsc_env
icntxt = hsc_IC hsc_env
rdr_env = ic_rn_gbl_env icntxt
type_env = mkTypeEnvWithImplicits (ic_tythings icntxt)
fam_insts = snd (ic_instances icntxt)
fam_inst_env = extendFamInstEnvList emptyFamInstEnv fam_insts
-- This stuff is a half baked version of TcRnDriver.setInteractiveContext
; showPass dflags "Desugar"
-- Do desugaring
; (msgs, mb_core_expr) <- initDs hsc_env (icInteractiveModule icntxt) rdr_env
type_env fam_inst_env $
dsLExpr tc_expr
; case mb_core_expr of
Nothing -> return ()
Just expr -> dumpIfSet_dyn dflags Opt_D_dump_ds "Desugared" (pprCoreExpr expr)
; return (msgs, mb_core_expr) }
{-
************************************************************************
* *
* Add rules and export flags to binders
* *
************************************************************************
-}
addExportFlagsAndRules
:: HscTarget -> NameSet -> NameSet -> [CoreRule]
-> [(Id, t)] -> [(Id, t)]
addExportFlagsAndRules target exports keep_alive rules prs
= mapFst add_one prs
where
add_one bndr = add_rules name (add_export name bndr)
where
name = idName bndr
---------- Rules --------
-- See Note [Attach rules to local ids]
-- NB: the binder might have some existing rules,
-- arising from specialisation pragmas
add_rules name bndr
| Just rules <- lookupNameEnv rule_base name
= bndr `addIdSpecialisations` rules
| otherwise
= bndr
rule_base = extendRuleBaseList emptyRuleBase rules
---------- Export flag --------
-- See Note [Adding export flags]
add_export name bndr
| dont_discard name = setIdExported bndr
| otherwise = bndr
dont_discard :: Name -> Bool
dont_discard name = is_exported name
|| name `elemNameSet` keep_alive
-- In interactive mode, we don't want to discard any top-level
-- entities at all (eg. do not inline them away during
-- simplification), and retain them all in the TypeEnv so they are
-- available from the command line.
--
-- isExternalName separates the user-defined top-level names from those
-- introduced by the type checker.
is_exported :: Name -> Bool
is_exported | targetRetainsAllBindings target = isExternalName
| otherwise = (`elemNameSet` exports)
{-
Note [Adding export flags]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Set the no-discard flag if either
a) the Id is exported
b) it's mentioned in the RHS of an orphan rule
c) it's in the keep-alive set
It means that the binding won't be discarded EVEN if the binding
ends up being trivial (v = w) -- the simplifier would usually just
substitute w for v throughout, but we don't apply the substitution to
the rules (maybe we should?), so this substitution would make the rule
bogus.
You might wonder why exported Ids aren't already marked as such;
it's just because the type checker is rather busy already and
I didn't want to pass in yet another mapping.
Note [Attach rules to local ids]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Find the rules for locally-defined Ids; then we can attach them
to the binders in the top-level bindings
Reason
- It makes the rules easier to look up
- It means that transformation rules and specialisations for
locally defined Ids are handled uniformly
- It keeps alive things that are referred to only from a rule
(the occurrence analyser knows about rules attached to Ids)
- It makes sure that, when we apply a rule, the free vars
of the RHS are more likely to be in scope
- The imported rules are carried in the in-scope set
which is extended on each iteration by the new wave of
local binders; any rules which aren't on the binding will
thereby get dropped
************************************************************************
* *
* Desugaring transformation rules
* *
************************************************************************
-}
dsRule :: LRuleDecl Id -> DsM (Maybe CoreRule)
dsRule (L loc (HsRule name act vars lhs _tv_lhs rhs _fv_rhs))
= putSrcSpanDs loc $
do { let bndrs' = [var | L _ (RuleBndr (L _ var)) <- vars]
; lhs' <- unsetGOptM Opt_EnableRewriteRules $
unsetWOptM Opt_WarnIdentities $
dsLExpr lhs -- Note [Desugaring RULE left hand sides]
; rhs' <- dsLExpr rhs
; dflags <- getDynFlags
; this_mod <- getModule
; (bndrs'', lhs'', rhs'') <- unfold_coerce bndrs' lhs' rhs'
-- Substitute the dict bindings eagerly,
-- and take the body apart into a (f args) form
; case decomposeRuleLhs bndrs'' lhs'' of {
Left msg -> do { warnDs msg; return Nothing } ;
Right (final_bndrs, fn_id, args) -> do
{ let is_local = isLocalId fn_id
-- NB: isLocalId is False of implicit Ids. This is good because
-- we don't want to attach rules to the bindings of implicit Ids,
-- because they don't show up in the bindings until just before code gen
fn_name = idName fn_id
final_rhs = simpleOptExpr rhs'' -- De-crap it
rule = mkRule this_mod False {- Not auto -} is_local
(snd $ unLoc name) act fn_name final_bndrs args
final_rhs
inline_shadows_rule -- Function can be inlined before rule fires
| wopt Opt_WarnInlineRuleShadowing dflags
, isLocalId fn_id || hasSomeUnfolding (idUnfolding fn_id)
-- If imported with no unfolding, no worries
= case (idInlineActivation fn_id, act) of
(NeverActive, _) -> False
(AlwaysActive, _) -> True
(ActiveBefore {}, _) -> True
(ActiveAfter {}, NeverActive) -> True
(ActiveAfter n, ActiveAfter r) -> r < n -- Rule active strictly first
(ActiveAfter {}, AlwaysActive) -> False
(ActiveAfter {}, ActiveBefore {}) -> False
| otherwise = False
; when inline_shadows_rule $
warnDs (vcat [ hang (ptext (sLit "Rule")
<+> doubleQuotes (ftext $ snd $ unLoc name)
<+> ptext (sLit "may never fire"))
2 (ptext (sLit "because") <+> quotes (ppr fn_id)
<+> ptext (sLit "might inline first"))
, ptext (sLit "Probable fix: add an INLINE[n] or NOINLINE[n] pragma on")
<+> quotes (ppr fn_id) ])
; return (Just rule)
} } }
-- See Note [Desugaring coerce as cast]
unfold_coerce :: [Id] -> CoreExpr -> CoreExpr -> DsM ([Var], CoreExpr, CoreExpr)
unfold_coerce bndrs lhs rhs = do
(bndrs', wrap) <- go bndrs
return (bndrs', wrap lhs, wrap rhs)
where
go :: [Id] -> DsM ([Id], CoreExpr -> CoreExpr)
go [] = return ([], id)
go (v:vs)
| Just (tc, args) <- splitTyConApp_maybe (idType v)
, tc == coercibleTyCon = do
let ty' = mkTyConApp eqReprPrimTyCon args
v' <- mkDerivedLocalM mkRepEqOcc v ty'
(bndrs, wrap) <- go vs
return (v':bndrs, mkCoreLet (NonRec v (mkEqBox (mkCoVarCo v'))) . wrap)
| otherwise = do
(bndrs,wrap) <- go vs
return (v:bndrs, wrap)
{-
Note [Desugaring RULE left hand sides]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For the LHS of a RULE we do *not* want to desugar
[x] to build (\cn. x `c` n)
We want to leave explicit lists simply as chains
of cons's. We can achieve that slightly indirectly by
switching off EnableRewriteRules. See DsExpr.dsExplicitList.
That keeps the desugaring of list comprehensions simple too.
Nor do we want to warn of conversion identities on the LHS;
the rule is precisly to optimise them:
{-# RULES "fromRational/id" fromRational = id :: Rational -> Rational #-}
Note [Desugaring coerce as cast]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want the user to express a rule saying roughly “mapping a coercion over a
list can be replaced by a coercion”. But the cast operator of Core (▷) cannot
be written in Haskell. So we use `coerce` for that (#2110). The user writes
map coerce = coerce
as a RULE, and this optimizes any kind of mapped' casts aways, including `map
MkNewtype`.
For that we replace any forall'ed `c :: Coercible a b` value in a RULE by
corresponding `co :: a ~#R b` and wrap the LHS and the RHS in
`let c = MkCoercible co in ...`. This is later simplified to the desired form
by simpleOptExpr (for the LHS) resp. the simplifiers (for the RHS).
************************************************************************
* *
* Desugaring vectorisation declarations
* *
************************************************************************
-}
dsVect :: LVectDecl Id -> DsM CoreVect
dsVect (L loc (HsVect _ (L _ v) rhs))
= putSrcSpanDs loc $
do { rhs' <- dsLExpr rhs
; return $ Vect v rhs'
}
dsVect (L _loc (HsNoVect _ (L _ v)))
= return $ NoVect v
dsVect (L _loc (HsVectTypeOut isScalar tycon rhs_tycon))
= return $ VectType isScalar tycon' rhs_tycon
where
tycon' | Just ty <- coreView $ mkTyConTy tycon
, (tycon', []) <- splitTyConApp ty = tycon'
| otherwise = tycon
dsVect vd@(L _ (HsVectTypeIn _ _ _ _))
= pprPanic "Desugar.dsVect: unexpected 'HsVectTypeIn'" (ppr vd)
dsVect (L _loc (HsVectClassOut cls))
= return $ VectClass (classTyCon cls)
dsVect vc@(L _ (HsVectClassIn _ _))
= pprPanic "Desugar.dsVect: unexpected 'HsVectClassIn'" (ppr vc)
dsVect (L _loc (HsVectInstOut inst))
= return $ VectInst (instanceDFunId inst)
dsVect vi@(L _ (HsVectInstIn _))
= pprPanic "Desugar.dsVect: unexpected 'HsVectInstIn'" (ppr vi)
|
urbanslug/ghc
|
compiler/deSugar/Desugar.hs
|
bsd-3-clause
| 20,043 | 27 | 28 | 7,148 | 3,258 | 1,748 | 1,510 | 279 | 8 |
-- |
-- Module: CGroup
-- Copyright: (c) 2014 Tomislav Viljetić
-- License: BSD3
-- Maintainer: Tomislav Viljetić <[email protected]>
--
-- Basic cgroup virtual filesystem operations.
--
module CGroup
( module CGroup.Types
, createCGroup
, classifyTask
, listTasks
) where
import CGroup.Types
import Control.Applicative
import Data.Attoparsec.ByteString.Char8
import Data.Set (Set)
import qualified Data.Set as Set
import System.Directory (createDirectory)
import System.FilePath ((</>))
import System.IO.Streams.Attoparsec (parseFromStream)
import System.IO.Streams.File (withFileAsInput)
-- | Create a new cgroup.
createCGroup :: CGroup -> IO ()
createCGroup =
createDirectory . cgroupPath
-- | Places a task into a cgroup.
classifyTask :: ProcessID -> CGroup -> IO ()
classifyTask pid g =
writeFile (tasksFile g) (show pid)
-- | Retrieve the tasks of a cgroup.
listTasks :: CGroup -> IO (Set ProcessID)
listTasks g =
withFileAsInput (tasksFile g) $ parseFromStream tasksParser
tasksFile :: CGroup -> FilePath
tasksFile =
(</> "tasks") . cgroupPath
tasksParser :: Parser (Set ProcessID)
tasksParser =
Set.fromList <$> many' (decimal <* endOfLine) <* endOfInput <?> "tasks"
|
4z3/cgserver
|
src/CGroup.hs
|
bsd-3-clause
| 1,244 | 0 | 10 | 224 | 288 | 167 | 121 | 29 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Text.Strict.Lens
-- Copyright : (C) 2012-2014 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
----------------------------------------------------------------------------
module Data.Text.Strict.Lens
( packed, unpacked
, builder
, text
, utf8
) where
import Control.Lens
import Data.ByteString (ByteString)
import Data.Text
import Data.Text.Encoding
import Data.Text.Lazy (toStrict)
import Data.Text.Lazy.Builder
-- $setup
-- >>> :set -XOverloadedStrings
-- | This isomorphism can be used to 'pack' (or 'unpack') strict 'Text'.
--
--
-- >>> "hello"^.packed -- :: Text
-- "hello"
--
-- @
-- 'pack' x ≡ x '^.' 'packed'
-- 'unpack' x ≡ x '^.' 'from' 'packed'
-- 'packed' ≡ 'from' 'unpacked'
-- 'packed' ≡ 'iso' 'pack' 'unpack'
-- @
packed :: Iso' String Text
packed = iso pack unpack
{-# INLINE packed #-}
-- | This isomorphism can be used to 'unpack' (or 'pack') lazy 'Text'.
--
-- >>> "hello"^.unpacked -- :: String
-- "hello"
--
-- This 'Iso' is provided for notational convenience rather than out of great need, since
--
-- @
-- 'unpacked' ≡ 'from' 'packed'
-- @
--
-- @
-- 'pack' x ≡ x '^.' 'from' 'unpacked'
-- 'unpack' x ≡ x '^.' 'packed'
-- 'unpacked' ≡ 'iso' 'unpack' 'pack'
-- @
unpacked :: Iso' Text String
unpacked = iso unpack pack
-- | Convert between strict 'Text' and 'Builder' .
--
-- @
-- 'fromText' x ≡ x '^.' 'builder'
-- 'toStrict' ('toLazyText' x) ≡ x '^.' 'from' 'builder'
-- @
builder :: Iso' Text Builder
builder = iso fromText (toStrict . toLazyText)
{-# INLINE builder #-}
-- | Traverse the individual characters in strict 'Text'.
--
-- >>> anyOf text (=='o') "hello"
-- True
--
-- When the type is unambiguous, you can also use the more general 'each'.
--
-- @
-- 'text' ≡ 'unpacked' . 'traversed'
-- 'text' ≡ 'each'
-- @
--
-- Note that when just using this as a 'Setter', @'setting' 'Data.Text.map'@ can
-- be more efficient.
text :: IndexedTraversal' Int Text Char
text = unpacked . traversed
{-# INLINE text #-}
-- | Encode/Decode a strict 'Text' to/from strict 'ByteString', via UTF-8.
--
-- >>> utf8 # "☃"
-- "\226\152\131"
utf8 :: Prism' ByteString Text
utf8 = prism' encodeUtf8 (preview _Right . decodeUtf8')
{-# INLINE utf8 #-}
|
hvr/lens
|
src/Data/Text/Strict/Lens.hs
|
bsd-3-clause
| 2,555 | 0 | 8 | 438 | 260 | 178 | 82 | 27 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
module Network.Libtorrent.Vectors where
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Cpp as C
import Network.Libtorrent.Inline
import Network.Libtorrent.String (StdString)
import Network.Libtorrent.TH (defineStdVector)
C.context libtorrentCtx
C.include "<string>"
C.include "<vector>"
C.verbatim "typedef std::vector<std::string> VectorString;"
C.using "namespace std"
$(defineStdVector "string" "VectorString" ''C'String ''C'VectorString ''StdString)
|
eryx67/haskell-libtorrent
|
src/Network/Libtorrent/Vectors.hs
|
bsd-3-clause
| 749 | 0 | 8 | 128 | 125 | 71 | 54 | 18 | 0 |
-- | Hw02
module Hw02 where
import Log
parseMessage :: String -> LogMessage
parseMessage s = makeMessage (words s)
where
makeMessage ("I":timestamp:ss) = LogMessage Info (toI timestamp) (unwords ss)
makeMessage ("W":timestamp:ss) = LogMessage Warning (toI timestamp) (unwords ss)
makeMessage ("E":severity:timestamp:ss) = LogMessage (Error (toI severity)) (toI timestamp) (unwords ss)
makeMessage _ = Unknown s
toI str = read str :: Int
parse :: String -> [LogMessage]
parse s = map parseMessage (lines s)
insert :: LogMessage -> MessageTree -> MessageTree
insert (Unknown _) t = t
insert m@(LogMessage _ ms _) t =
case t of
(Node tl v@(LogMessage _ ts _) tr) -> if ts > ms
then Node (insert m tl) v tr
else Node tl v (insert m tr)
(Node _ (Unknown _) _) -> error "what?"
Leaf -> Node Leaf m Leaf
build :: [LogMessage] -> MessageTree
build xs = foldl doInsert Leaf xs
where
doInsert t m = insert m t
inOrder :: MessageTree -> [LogMessage]
inOrder Leaf = []
inOrder (Node l v r) = (inOrder l) ++ [v] ++ (inOrder r)
whatWentWrong :: [LogMessage] -> [String]
whatWentWrong [] = []
whatWentWrong ((LogMessage (Error c) _ s):xs) = if c > 50
then s : whatWentWrong xs
else whatWentWrong xs
whatWentWrong (_:xs) = whatWentWrong xs
|
codingiam/sandbox-hs
|
src/Hw02.hs
|
bsd-3-clause
| 1,456 | 0 | 12 | 442 | 592 | 302 | 290 | 32 | 4 |
{-# LANGUAGE OverloadedStrings, GeneralizedNewtypeDeriving #-}
-- An example of embedding a custom monad into
-- Scotty's transformer stack, using ReaderT to provide access
-- to a TVar containing global state.
--
-- Note: this example is somewhat simple, as our top level
-- is IO itself. The types of 'scottyT' and 'scottyAppT' are
-- general enough to allow a Scotty application to be
-- embedded into any MonadIO monad.
module UHF.TeeVee.Application (startServer) where
import Control.Concurrent.STM
import Control.Monad.Reader
import Data.Text.Lazy (pack)
import Network.Wai.Middleware.Static
import UHF.Types (ServerData, ServerApplication)
import UHF.Actions
import Web.Scotty.Trans
startServer :: ServerData -> IO ()
startServer sync = do
-- Note that 'runM' is only called once, at startup.
let runM = flip runReaderT sync
-- 'runActionToIO' is called once per action.
runActionToIO = runM
scottyT 3000 runM runActionToIO $ do
middleware $ staticPolicy (noDots >-> addBase "static")
get "/" $ do
-- c <- tvM $ gets tickCount
text $ "waka"
|
hansonkd/UHF
|
UHF/TeeVee/Application.hs
|
bsd-3-clause
| 1,138 | 0 | 14 | 243 | 170 | 97 | 73 | 17 | 1 |
-- | Report page controller.
module HL.C.Report where
import HL.C
import HL.V.Report
import HL.M.Report
-- | Report controller.
getReportR :: Int -> FilePath -> C Html
getReportR year page =
do content <- io (getReportPage year page)
blaze (reportV year page content)
-- | Default page to go to for the given year.
getReportHomeR :: Int -> C Html
getReportHomeR year =
redirect (ReportR year "haskell.html")
|
chrisdone/hl
|
src/HL/C/Report.hs
|
bsd-3-clause
| 421 | 0 | 10 | 79 | 118 | 62 | 56 | 11 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.NV.MultisampleFilterHint
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/NV/multisample_filter_hint.txt NV_multisample_filter_hint> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.NV.MultisampleFilterHint (
-- * Enums
gl_MULTISAMPLE_FILTER_HINT_NV
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/NV/MultisampleFilterHint.hs
|
bsd-3-clause
| 692 | 0 | 4 | 78 | 37 | 31 | 6 | 3 | 0 |
----------------------------------------------------------------
-- Модуль приложения
-- Скрипты графического интерфейса (HScript)
-- Действия
----------------------------------------------------------------
module WebUI.Scripts.HScript
( module ConMonRWS
, HSL (..)
, TypeHLang (..)
, HLanguage (..)
, BuilderHSL (..)
, HBConfig (..), defaultHBConfig
, HBLog (..)
, HLFinish (..)
, runScript
) where
-- Импорт модулей
import Prelude as PRL
import Control.Monad.RWS as ConMonRWS
import System.IO.Unsafe (unsafePerformIO)
-- | Тип монадного трансформера для HScript
-- Формат для RWST монады: RWST r w s m a
type HSL l a = RWST HBConfig [HBLog] l IO a
-- | Тип языка
data TypeHLang = THLScript
| THLBinary
| THLOther String
deriving (Show, Eq)
-- | Данные конфигурации билдера скрипта
data HBConfig = HBConfig { hbc_titleComment :: String -- ^ Комментарий в заголовке кода скрипта
, hbc_entryLine :: String -- ^ Разделитель строк
, hbc_tabSpace :: String -- ^ Строка табуляции (если пуста, то код будет без табуляции)
, hbc_empty :: String -- ^ Пустой символ
}
-- | Данные конфигурации билдера скрипта
defaultHBConfig :: HBConfig
defaultHBConfig = HBConfig { hbc_titleComment = "Generate by HScript from the HFitUI library"
, hbc_entryLine = ""
, hbc_tabSpace = " "
, hbc_empty = ""
}
-- | Данные лога билдера скрипта
data HBLog = HBLog String
-- | Класс скриптового языка
class HLanguage a where
nameHLang :: a -> String
typeHLang :: a -> TypeHLang
initHLang :: a
emptyHLang :: a
beginH :: a
endH :: a
-- | Класс билдера скриптов
class BuilderHSL a where
buildHSL :: HBConfig -> a -> String
buildHSL hbConf _ = hbc_empty hbConf
buildHSL_L :: HBConfig -> a -> String
buildHSL_L _ _ = error "HScript Error! There was a call method \'buildHSL_L\' from class \'BuilderHSL\' for unintended type."
buildHSL_R :: HBConfig -> a -> String
buildHSL_R _ _ = error "HScript Error! There was a call method \'buildHSL_R\' from class \'BuilderHSL\' for unintended type."
-- | Данные завершения скрипта
data HLFinish = HLFinish
instance BuilderHSL HLFinish where
buildHSL _ _ = ""
buildHSL_L _ _ = ""
buildHSL_R _ _ = ""
-- | Запустить скрипт на его генерацию
runScript :: (HLanguage l, BuilderHSL l, BuilderHSL a)
=> HBConfig
-> HSL l a
-> String
runScript hbConf srcHSL =
unsafePerformIO $ do
(s, log) <- execRWST srcHSL hbConf initHLang
return $ buildHSL hbConf s
|
iqsf/HFitUI
|
src/WebUI/Scripts/HScript.hs
|
bsd-3-clause
| 3,439 | 0 | 9 | 1,062 | 502 | 296 | 206 | 55 | 1 |
module Biolab.Interfaces.MySql (
DbReadable(..),
ExpDesc(..),
PlateDesc(..),
DbMeasurement(..),
WellDesc(..),
SelectCriteria(..),
SampleQuery (..),
readTable,
dbConnectInfo,
mesFromDB,
loadExpDataDB,
fromNullString,
loadMes,
)
where
import Database.HDBC.MySQL
import Database.HDBC
import Data.ByteString.UTF8 (toString)
import Data.Function (on)
import Data.Maybe (fromMaybe)
import Data.DateTime (fromSeconds, DateTime)
import Data.Char (ord)
import Biolab.Types
import Biolab.Interfaces.Fluorophores
import Data.List (find, nub, sort, intercalate, group)
import Control.Monad.Error (runErrorT)
import Control.Monad (join)
import Control.Monad.IO.Class (liftIO)
import Data.Either.Unwrap (fromRight)
import Data.ConfigFile (emptyCP, readfile, get)
import qualified Data.Vector as V
import qualified Data.Map as M
type ExpId = String
data SampleQuery = SampleQuery {sqExpId :: [String], sqPlate :: [Int], sqWell :: [Well] } deriving (Eq, Ord, Show, Read)
sampleQueryToSC :: SampleQuery -> SelectCriteria
sampleQueryToSC (SampleQuery {sqExpId =seid, sqPlate =sp, sqWell =sw})
| null seid && null sp && null sw = SelectCriteria "" []
| otherwise = SelectCriteria wc vals
where
vals = map toSql seid ++ map toSql sp ++ concatMap wellToSql sw
wc = "WHERE ( " ++ (intercalate " ) AND ( " . filter (not . null) $ [eid,ps,ws]) ++ " ) "
eid = intercalate " OR " (replicate (length seid) " exp_id = ? ")
ps = intercalate " OR " (replicate (length sp) " plate = ? ")
ws = intercalate " OR " (replicate (length sw) " ( col = ? AND row = ? ) ")
wellFromInts :: Int -> Int -> Well
wellFromInts r c = Well { wRow = ['a'..'h'] !! r, wColumn = c + 1 }
wellToSql :: Well -> [SqlValue]
wellToSql w = [toSql $ wColumn w - 1, toSql $ ((-) `on` ord) (wRow w) 'a']
-- consider adding table names to configuration file as well.
dbConnectInfo :: FilePath -> IO MySQLConnectInfo
dbConnectInfo cf = do
rv <- runErrorT $
do
cp <- join $ liftIO $ readfile emptyCP cf
let sect = "MYSQL"
host <- get cp sect "host"
user <- get cp sect "user"
passwd <- get cp sect "password"
dbname <- get cp sect "dbname"
port <- get cp sect "port"
unixsock <- get cp sect "unixsocket"
return $ MySQLConnectInfo {
mysqlHost = host,
mysqlUser = user,
mysqlPassword = passwd,
mysqlDatabase = dbname,
mysqlPort = port,
mysqlUnixSocket = unixsock,
mysqlGroup = Nothing
}
return $ fromRight rv
maxVal = 70000 -- this hack will need resolving in the future...
data SelectCriteria = SelectCriteria {scWhere :: String, scVals :: [SqlValue]}
deriving (Show)
class DbReadable a where
dbRead :: [SqlValue] -> a
data ExpDesc = ExpDesc {edExp :: ExpId, edDesc :: String} deriving (Show)
instance DbReadable ExpDesc where
dbRead [SqlByteString exp_id, SqlByteString desc] =
ExpDesc {
edExp = toString exp_id,
edDesc = toString desc
}
data WellDesc = WellDesc {wdExp :: ExpId, wdPlate :: Int, wdWell :: Well, wdDesc :: String} deriving (Show, Eq)
instance DbReadable WellDesc where
dbRead [SqlByteString exp_id, SqlInt32 p, SqlInt32 row, SqlInt32 col, SqlByteString desc] =
WellDesc {
wdExp = toString exp_id,
wdPlate = fromIntegral p,
wdWell = (wellFromInts `on` fromIntegral) row col,
wdDesc = toString desc
}
data PlateDesc = PlateDesc {pdExp :: ExpId, pdPlate :: Int, pdDesc :: String, pdOwner :: Maybe String, pdProject :: Maybe String} deriving (Show)
fromNullString :: SqlValue -> Maybe String
fromNullString SqlNull = Nothing
fromNullString (SqlByteString s) = Just . toString $ s
instance DbReadable PlateDesc where
dbRead [SqlByteString exp_id, SqlInt32 p, SqlByteString desc, owner, project] =
PlateDesc {
pdExp = toString exp_id,
pdPlate = fromIntegral p,
pdDesc = toString desc,
pdOwner = fromNullString owner,
pdProject = fromNullString project
}
data DbMeasurement = DbMeasurement { dbmExpDesc :: ExpId, dbmPlate :: Int, dbmTime :: DateTime, dbmType :: String, dbmWell :: Well, dbmVal :: Double } deriving (Eq, Show)
instance DbReadable DbMeasurement where
dbRead [SqlByteString exp_id, SqlInt32 plate_num, SqlByteString mt, SqlInt32 row, SqlInt32 col, SqlInt32 timestamp, v] =
DbMeasurement {
dbmExpDesc = toString exp_id,
dbmPlate = fromIntegral plate_num,
dbmTime = fromSeconds . fromIntegral $ timestamp,
dbmType = toString mt,
dbmWell = well,
dbmVal = val v
}
where
val (SqlDouble x) = if x == 0 then maxVal else x
val SqlNull = maxVal
well = (wellFromInts `on` fromIntegral) row col
readTable :: (DbReadable a) => MySQLConnectInfo -> String -> Maybe SelectCriteria -> IO [a]
readTable db_conf t_name msc = do
conn <- connectMySQL db_conf
let where_clause = fromMaybe "" . fmap scWhere $ msc
let where_params = fromMaybe [] . fmap scVals $ msc
entries <- quickQuery' conn ("SELECT * FROM " ++ t_name ++ " " ++ where_clause) where_params
return . map dbRead $ entries
loadExpDataDB :: FilePath -> ExpId -> Int -> IO ExpData
loadExpDataDB cf exp_id p = do
db_conf <- dbConnectInfo cf
readings <- readTable db_conf "tecan_readings" (Just $ SelectCriteria "where exp_id = ? AND plate = ?" [toSql exp_id, toSql p])
well_labels <- readTable db_conf "tecan_labels" (Just $ SelectCriteria "where exp_id = ? AND plate = ?" [toSql exp_id, toSql p])
return . makeExpData well_labels $ mesFromDB readings
makeExpData :: [WellDesc] -> [(SampleId,[ColonyMeasurements RawMeasurement])] -> ExpData
makeExpData ws ss = M.fromList [ (l, l_samples l) | l <- labels]
where
labels = nub . map wdDesc $ ws
l_samples l = M.fromList [ (s,fromMaybe (not_found s) . lookup s $ ss) | s <- l_ids l]
l_ids l = map wdTosid . filter ((l ==) . wdDesc) $ ws
not_found s = error $ "couldn't find id:" ++ show s ++ " in measurements"
wdTosid :: WellDesc -> SampleId
wdTosid wd = SampleId {sidExpId = wdExp wd, sidPlate = wdPlate wd, sidWell = wdWell wd}
dbMesSampleId :: DbMeasurement -> SampleId
dbMesSampleId m = SampleId {sidExpId = dbmExpDesc m, sidPlate = dbmPlate m, sidWell = dbmWell m}
dbMesType :: DbMeasurement -> MesType
dbMesType (DbMeasurement {dbmType = mt}) = flVals mt
-- assumes all measurements are of the same ColonyId
samples :: [DbMeasurement] -> [ColonyMeasurements RawMeasurement]
samples dbms = zipWith ($) (map mes $ mts dbms) (repeat dbms)
where
mts = nub . map dbMesType
mes mt = binDbMes mt . filter ((mt ==) . dbMesType)
-- assumes all measurements are of the same ColonyId and have the same type (checked below).
binDbMes :: MesType -> [DbMeasurement] -> ColonyMeasurements RawMeasurement
binDbMes (Absorbance a) dbm = AbsorbanceMeasurement (AbsorbanceSample {asWaveLength = a, asMes = rawMes dbm})
binDbMes (Fluorescence a b) dbm = FluorescenseMeasurement (FluorescenseSample {flExcitation = a, flEmission = b, flMes = rawMes dbm})
binDbMes (Luminesense a) dbm = LuminesenseMeasurement (LuminescenseSample {lsWaveLength = a, lsMes = rawMes dbm})
rawMes :: [DbMeasurement] -> RawColonyMeasurements
rawMes dbm
| single_type dbm && single_colony dbm = V.fromList . sort . map (\x -> (dbmTime x, RawMeasurement . dbmVal $ x)) $ dbm
| otherwise = error $ "mesurements of multiple types/colonies: " ++ show dbm
where
single_type = (1 ==) . length . group . map dbmType
single_colony = (1 ==) . length . group . map dbMesSampleId
colonySamples :: SampleId -> [DbMeasurement] -> [DbMeasurement]
colonySamples sid = filter ((sid ==) . dbMesSampleId)
mesFromDB :: [DbMeasurement] -> [(SampleId,([ColonyMeasurements RawMeasurement]))]
mesFromDB dbms = [ (sid, samples . colonySamples sid $ dbms) | sid <- sids dbms]
where
sids = nub . map dbMesSampleId
loadMes :: MySQLConnectInfo -> SampleQuery -> IO [(SampleId,[ColonyMeasurements RawMeasurement])]
loadMes db_conf sq = fmap mesFromDB $ readTable db_conf "tecan_readings" (Just $ sampleQueryToSC sq)
|
uriba/biolab-interfaces
|
Biolab/Interfaces/MySql.hs
|
bsd-3-clause
| 8,489 | 0 | 14 | 2,073 | 2,722 | 1,466 | 1,256 | 157 | 1 |
{-- snippet all --}
module Main where
import qualified PodMainGUI
main = PodMainGUI.main "podresources.glade"
{-- /snippet all --}
|
binesiyu/ifl
|
examples/ch23/PodLocalMain.hs
|
mit
| 133 | 0 | 6 | 20 | 20 | 13 | 7 | 3 | 1 |
{-- snippet import --}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
import Control.Concurrent.STM
import Control.Monad
data Item = Scroll
| Wand
| Banjo
deriving (Eq, Ord, Show)
newtype Gold = Gold Int
deriving (Eq, Ord, Show, Num)
newtype HitPoint = HitPoint Int
deriving (Eq, Ord, Show, Num)
type Inventory = TVar [Item]
type Health = TVar HitPoint
type Balance = TVar Gold
data Player = Player {
balance :: Balance,
health :: Health,
inventory :: Inventory
}
{-- /snippet import --}
{-- snippet removeInv --}
removeInv :: Eq a => a -> [a] -> Maybe [a]
removeInv x xs =
case takeWhile (/= x) xs of
(_:ys) -> Just ys
[] -> Nothing
{-- /snippet removeInv --}
{-- snippet basicTransfer --}
basicTransfer qty fromBal toBal = do
fromQty <- readTVar fromBal
toQty <- readTVar toBal
writeTVar fromBal (fromQty - qty)
writeTVar toBal (toQty + qty)
{-- /snippet basicTransfer --}
{-- snippet transfer --}
transfer :: Gold -> Balance -> Balance -> STM ()
transfer qty fromBal toBal = do
fromQty <- readTVar fromBal
when (qty > fromQty) $
retry
writeTVar fromBal (fromQty - qty)
readTVar toBal >>= writeTVar toBal . (qty +)
{-- /snippet transfer --}
transferTest :: STM (Gold, Gold)
{-- snippet transferTest --}
transferTest = do
alice <- newTVar (12 :: Gold)
bob <- newTVar 4
basicTransfer 3 alice bob
liftM2 (,) (readTVar alice) (readTVar bob)
{-- /snippet transferTest --}
{-- snippet types --}
basicTransfer :: Gold -> Balance -> Balance -> STM ()
maybeGiveItem :: Item -> Inventory -> Inventory -> STM Bool
{-- /snippet types --}
{-- snippet maybeGiveItem --}
maybeGiveItem item fromInv toInv = do
fromList <- readTVar fromInv
case removeInv item fromList of
Nothing -> return False
Just newList -> do
writeTVar fromInv newList
destItems <- readTVar toInv
writeTVar toInv (item : destItems)
return True
{-- /snippet maybeGiveItem --}
{-- snippet giveItem --}
giveItem :: Item -> Inventory -> Inventory -> STM ()
giveItem item fromInv toInv = do
fromList <- readTVar fromInv
case removeInv item fromList of
Nothing -> retry
Just newList -> do
writeTVar fromInv newList
readTVar toInv >>= writeTVar toInv . (item :)
{-- /snippet giveItem --}
{-- snippet maybeSellItem --}
maybeSellItem :: Item -> Gold -> Player -> Player -> STM Bool
maybeSellItem item price buyer seller = do
given <- maybeGiveItem item (inventory seller) (inventory buyer)
if given
then do
basicTransfer price (balance buyer) (balance seller)
return True
else return False
{-- /snippet maybeSellItem --}
{-- snippet sellItem --}
sellItem :: Item -> Gold -> Player -> Player -> STM ()
sellItem item price buyer seller = do
giveItem item (inventory seller) (inventory buyer)
transfer price (balance buyer) (balance seller)
{-- /snippet sellItem --}
{-- snippet trySellItem --}
trySellItem :: Item -> Gold -> Player -> Player -> STM Bool
trySellItem item price buyer seller =
sellItem item price buyer seller >> return True
`orElse`
return False
{-- /snippet trySellItem --}
{-- snippet crummyList --}
crummyList :: [(Item, Gold)] -> Player -> Player
-> STM (Maybe (Item, Gold))
crummyList list buyer seller = go list
where go [] = return Nothing
go (this@(item,price) : rest) = do
sellItem item price buyer seller
return (Just this)
`orElse`
go rest
{-- /snippet crummyList --}
{-- snippet shoppingList --}
shoppingList :: [(Item, Gold)] -> Player -> Player
-> STM (Maybe (Item, Gold))
shoppingList list buyer seller = maybeSTM . msum $ map sellOne list
where sellOne this@(item,price) = do
sellItem item price buyer seller
return this
{-- /snippet shoppingList --}
{-- snippet maybeSTM --}
maybeSTM :: STM a -> STM (Maybe a)
maybeSTM m = (Just `liftM` m) `orElse` return Nothing
{-- /snippet maybeSTM --}
{-- snippet maybeM --}
maybeM :: MonadPlus m => m a -> m (Maybe a)
maybeM m = (Just `liftM` m) `mplus` return Nothing
{-- /snippet maybeM --}
{-- snippet bogusSale --}
bogusTransfer qty fromBal toBal = do
fromQty <- atomically $ readTVar fromBal
-- window of inconsistency
toQty <- atomically $ readTVar toBal
atomically $ writeTVar fromBal (fromQty - qty)
-- window of inconsistency
atomically $ writeTVar toBal (toQty + qty)
bogusSale :: Item -> Gold -> Player -> Player -> IO ()
bogusSale item price buyer seller = do
atomically $ giveItem item (inventory seller) (inventory buyer)
bogusTransfer price (balance buyer) (balance seller)
{-- /snippet bogusSale --}
{-- snippet newPlayer --}
newPlayer :: Gold -> HitPoint -> [Item] -> STM Player
newPlayer balance health inventory =
Player `liftM` newTVar balance
`ap` newTVar health
`ap` newTVar inventory
populateWorld :: STM [Player]
populateWorld = sequence [ newPlayer 20 20 [Wand, Banjo],
newPlayer 10 12 [Scroll] ]
{-- /snippet newPlayer --}
{-- snippet consistentBalance --}
consistentBalance :: [Player] -> STM (STM ())
consistentBalance players = do
initialTotal <- totalBalance
return $ do
curTotal <- totalBalance
when (curTotal /= initialTotal) $
error "inconsistent global balance"
where totalBalance = foldM addBalance 0 players
addBalance a b = (a+) `liftM` readTVar (balance b)
{-- /snippet consistentBalance --}
{-- snippet tryBogusSale --}
tryBogusSale = do
players@(alice:bob:_) <- atomically populateWorld
atomically $ alwaysSucceeds =<< consistentBalance players
bogusSale Wand 5 alice bob
{-- /snippet tryBogusSale --}
|
binesiyu/ifl
|
examples/ch28/GameInventory.hs
|
mit
| 5,777 | 9 | 14 | 1,359 | 1,817 | 907 | 910 | 126 | 2 |
--
--
--
-----------------
-- Exercise 4.31.
-----------------
--
--
--
module E'4'31 where
import Prelude hiding ( gcd )
gcd :: Integer -> Integer -> Integer
gcd a b
-- Euclid's algorithm:
| b == 0 = a
| otherwise = gcd b (a `mod` b)
|
pascal-knodel/haskell-craft
|
_/links/E'4'31.hs
|
mit
| 265 | 0 | 8 | 77 | 79 | 47 | 32 | 6 | 1 |
module Llvm.Asm.Syntax
(module Llvm.Asm.Syntax.Parser.Module
,module Llvm.Asm.Syntax.Printer.LlvmPrint
) where
import Llvm.Asm.Syntax.Parser.Module
import Llvm.Asm.Syntax.Printer.LlvmPrint
|
mlite/hLLVM
|
src/Llvm/Asm/Syntax.hs
|
bsd-3-clause
| 210 | 0 | 5 | 33 | 43 | 32 | 11 | 5 | 0 |
module HEP.Util (module M, module P) where
import HEP.Util.Matrix as M
import HEP.Util.Polynomial as P
|
cbpark/hep-kinematics
|
src/HEP/Util.hs
|
bsd-3-clause
| 108 | 0 | 4 | 20 | 32 | 23 | 9 | 3 | 0 |
{-# LANGUAGE TypeOperators, TypeFamilies, TupleSections #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -Wall #-}
----------------------------------------------------------------------
-- |
-- Module : LambdaCCC.StateTrie
-- Copyright : (c) 2014 Tabula, Inc.
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Memoizing State monad
----------------------------------------------------------------------
module LambdaCCC.StateTrie
( StateTrieX, StateTrie(..)
, toState, fromState
, get, put, runStateTrie, evalStateTrie, execStateTrie
) where
import Control.Arrow (first)
import Control.Applicative (Applicative(..))
import Control.Monad.State -- mtl
import Data.MemoTrie (HasTrie(..),(:->:))
import Circat.Rep (Rep,HasRep(..))
-- | 'StateTrie' inner representation
type StateTrieX s a = s :->: (a,s)
-- | Memoizing state monad
newtype StateTrie s a = StateTrie { unStateTrie :: StateTrieX s a }
-- | Operate inside a 'StateTrie'.
inStateTrie :: (StateTrieX s a -> StateTrieX t b)
-> (StateTrie s a -> StateTrie t b)
inStateTrie = StateTrie <~ unStateTrie
{-# INLINE inStateTrie #-}
{- unused
inStateTrie2 :: (StateTrieX s a -> StateTrieX t b -> StateTrieX u c)
-> (StateTrie s a -> StateTrie t b -> StateTrie u c)
inStateTrie2 = inStateTrie <~ unStateTrie
-}
-- | Run a memoized stateful computation
runStateTrie :: HasTrie s => StateTrie s a -> s -> (a,s)
runStateTrie (StateTrie t) = untrie t
{-# INLINE runStateTrie #-}
-- | Run a memoized stateful computation and return just value
evalStateTrie :: HasTrie s => StateTrie s a -> s -> a
evalStateTrie = (result.result) fst runStateTrie
{-# INLINE evalStateTrie #-}
-- | Run a memoized stateful computation and return just state
execStateTrie :: HasTrie s => StateTrie s a -> s -> s
execStateTrie = (result.result) snd runStateTrie
{-# INLINE execStateTrie #-}
instance HasTrie s => Functor (StateTrie s) where
fmap = inStateTrie . fmap . first
{-# INLINE fmap #-}
instance HasTrie s => Applicative (StateTrie s) where
pure a = StateTrie (trie (a,))
(<*>) = inState2 (<*>)
{-# INLINE pure #-}
{-# INLINE (<*>) #-}
-- | 'State'-to-'StateTrie' adapter
fromState :: HasTrie s => State s a -> StateTrie s a
fromState = StateTrie . trie . runState
{-# INLINE fromState #-}
-- | 'StateTrie'-to-'State' adapter
toState :: HasTrie s => StateTrie s a -> State s a
toState = state . untrie . unStateTrie
{-# INLINE toState #-}
-- | Transform using 'State' view
inState :: (HasTrie s, HasTrie t) =>
(State s a -> State t b)
-> (StateTrie s a -> StateTrie t b)
inState = fromState <~ toState
{-# INLINE inState #-}
-- | Transform using 'State' view
inState2 :: (HasTrie s, HasTrie t, HasTrie u) =>
(State s a -> State t b -> State u c)
-> (StateTrie s a -> StateTrie t b -> StateTrie u c)
inState2 = inState <~ toState
{-# INLINE inState2 #-}
instance HasTrie s => Monad (StateTrie s) where
return = pure
m >>= f = joinST (fmap f m)
{-# INLINE return #-}
{-# INLINE (>>=) #-}
joinST :: HasTrie s => StateTrie s (StateTrie s a) -> StateTrie s a
joinST = fromState . join . fmap toState . toState
{-# INLINE joinST #-}
-- joinST = inState (join . fmap toState)
-- = inState ((=<<) toState)
instance HasTrie s => MonadState s (StateTrie s) where
state = StateTrie . trie
-- TODO: Perhaps use 'state' in the definitions of pure and fromState.
type instance Rep (StateTrie s a) = StateTrieX s a
instance HasRep (StateTrie s a) where
repr (StateTrie t) = t
abst = StateTrie
{--------------------------------------------------------------------
Misc
--------------------------------------------------------------------}
-- | Add post- & pre-processing
(<~) :: (b -> b') -> (a' -> a) -> ((a -> b) -> (a' -> b'))
(h <~ f) g = h . g . f
-- | Add post-processing
result :: (b -> b') -> ((a -> b) -> (a -> b'))
result = (.)
-- result = (<~ id)
|
capn-freako/lambda-ccc
|
src/LambdaCCC/StateTrie.hs
|
bsd-3-clause
| 4,006 | 0 | 12 | 812 | 1,004 | 555 | 449 | 69 | 1 |
module Skell.Frontend.MsgPack where
-- Con este metodo se puede crear una tercera aplicacion que solo realize pequeñas funcionalidades
--
import Control.Monad (forever)
import Control.Monad.State.Strict (evalStateT)
import Data.MessagePack
import Data.Binary
import Data.Default
import Pipes
import Pipes.Safe
import Pipes.Lift
import Pipes.Network.TCP.Safe
import qualified Pipes.Binary as P
import Skell.Types
-- Solicitud
-- _1 Int -> 0 -> tipo solicitud, msgid = para poder enlazar los mensajes, nombre del metodo y params
-- Respuesta
-- _1 Int -> 1 -> respuesta, msgid, Object -> error sino nil, Objeto -> resultado sino nil
-- Notificacion
-- _1 Int -> 2 -> Notificacion, Method, Params, este no espera una repuesta por parte del cliente
data RPCMsg = Request (Int, Int, String, [Object])
| Response (Int, Int, Object, Object)
| Notification (Int, String, [Object])
deriving (Eq, Show)
-- TODO: El tipo `Int´ varia segun la plataforma usar Word32|8...
instance Binary RPCMsg where
put (Request x) = put x
put (Response x) = put x
put (Notification x) = put x
get = do t <- get :: Get Int
case t of
0 -> do msgid <- get -- :: Int
method <- get -- :: String
params <- get -- :: [Object]
return $ Request (0, msgid, method, params)
1 -> do msgid <- get -- :: Int
err <- get -- :: Object
result <- get -- :: Object
return $ Response (1, msgid, err, result)
2 -> do method <- get -- :: String
params <- get -- :: [Object]
return $ Notification (2, method, params)
x -> error $ "Fail to parse RPCMsg in Binary: " ++ show x
msgPackFrontend :: PSkell -> IO ()
msgPackFrontend = server "127.0.0.1" "4000"
-- TODO: Fromserver y toServer no se sabe que es lo que terminan por hacer implementarlo con sockets
server :: HostName -> ServiceName -> PSkell -> IO ()
-- serve host port model = P.withSocketsDo $ runEffect $ evalStateP def $ do
-- (sock, _sockAddr) <- liftIO $ P.bindSock (P.Host host) port
-- liftIO $ print "HEllo"
-- P.fromSocket sock 4096 >-> getRequest >-> wrapModel model >-> sendResponse >-> P.toSocket sock
-- liftIO $ P.closeSock sock
server host port model = runSafeT . runEffect . evalStateP def $ forever $
fromServe 4096 (HostAny) port
>-> getRequest
>-> wrapModel model
>-> sendResponse
>-> toServe (Host host) port
where
wrapModel :: Pipe ISkell OSkell IOSkell () -> Pipe (Int, ISkell) (Int, OSkell) IOSkell ()
wrapModel m = do
(msgid, iSkell) <- await
((lift $ return iSkell) >~ m) >-> helper msgid
helper :: Int -> Pipe OSkell (Int, OSkell) IOSkell ()
helper i = do x <- await
yield (i, x)
-- Hay que tener en cuenta msgid que tiene que enviar el Output. Tiene que haber mayor comunicacion entre
-- getRequest <-> sendResponse
getRequest :: Pipe P.ByteString (Int, ISkell) IOSkell ()
getRequest = do
liftIO $ print "GetRequest"
bs <- await
liftIO $ print "GetRequest"
request <- evalStateT P.decode (yield bs)
case request of
Left _ -> yield (-1,undefined) -- Throw an error to IOSkell to debug
Right x -> yield (1,processMsg x)
-- Para el diccionario de metodos disponibles se pueden sacar de IOSkell para permitir que los plugins añadan mayor funcionalidad
processMsg :: RPCMsg -> ISkell
processMsg = undefined
sendResponse :: Pipe (Int, OSkell) P.ByteString IOSkell ()
sendResponse = do
liftIO $ print "SendResponse"
(i, _) <- await
liftIO $ print "send"
P.encode $ Response (1, i, toObject (), toObject ("Hello i expect that this string arrive to its destiny"::String))
processResponse :: OSkell -> RPCMsg
processResponse _ = undefined
|
damianfral/Skell
|
src/Skell/UI/MsgPack.hs
|
bsd-3-clause
| 4,117 | 0 | 15 | 1,238 | 929 | 494 | 435 | 69 | 2 |
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-} -- uniplate patterns
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Syntax.OnlineTree
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Module defining the 'Tree' used as part of many 'Mode's.
module Yi.Syntax.OnlineTree (Tree(..), manyToks,
tokAtOrBefore) where
import Control.Applicative (Applicative ((<*>), pure), (<$>))
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
import Yi.IncrementalParse (P, Parser (Look), symbol)
import Yi.Lexer.Alex (Tok)
import Yi.Syntax.Tree (IsTree (emptyNode, uniplate), tokAtOrBefore)
data Tree a = Bin (Tree a) (Tree a)
| Leaf a
| Tip
deriving (Show, Functor, Foldable, Traversable)
instance IsTree Tree where
emptyNode = Tip
uniplate (Bin l r) = ([l,r],\[l',r'] -> Bin l' r')
uniplate t = ([],const t)
manyToks :: P (Tok t) (Tree (Tok t))
manyToks = manyToks' 1
manyToks' :: Int -> P a (Tree a)
manyToks' n = Look (pure Tip) (\_ -> Bin <$> subTree n <*> manyToks' (n * 2))
subTree :: Int -> P a (Tree a)
subTree n = Look (pure Tip) . const $ case n of
0 -> pure Tip
1 -> Leaf <$> symbol (const True)
_ -> let m = n `div` 2 in Bin <$> subTree m <*> subTree m
|
TOSPIO/yi
|
src/library/Yi/Syntax/OnlineTree.hs
|
gpl-2.0
| 1,488 | 0 | 13 | 352 | 475 | 269 | 206 | 30 | 3 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="it-IT">
<title>Online Menu | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Ricerca</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
msrader/zap-extensions
|
src/org/zaproxy/zap/extension/onlineMenu/resources/help_it_IT/helpset_it_IT.hs
|
apache-2.0
| 974 | 92 | 29 | 160 | 402 | 213 | 189 | -1 | -1 |
module Network.Wai.Middleware.AcceptOverride
( acceptOverride
) where
import Network.Wai
import Control.Monad (join)
import Data.ByteString (ByteString)
acceptOverride :: Middleware
acceptOverride app req =
app req'
where
req' =
case join $ lookup "_accept" $ queryString req of
Nothing -> req
Just a -> req { requestHeaders = changeVal "Accept" a $ requestHeaders req}
changeVal :: Eq a
=> a
-> ByteString
-> [(a, ByteString)]
-> [(a, ByteString)]
changeVal key val old = (key, val)
: filter (\(k, _) -> k /= key) old
|
erikd/wai
|
wai-extra/Network/Wai/Middleware/AcceptOverride.hs
|
mit
| 634 | 0 | 13 | 196 | 203 | 111 | 92 | 19 | 2 |
main :: IO ()
main = do
print $ r "1E100000"
print $ r "1E100000000"
print $ r "1E100000000000"
print $ r "1E100000000000000"
print $ r "1E100000000000000000"
print $ r "1E100000000000000000000"
r :: String -> Double
r = read
|
seereason/ghcjs
|
test/pkg/base/t7034.hs
|
mit
| 239 | 0 | 8 | 52 | 101 | 42 | 59 | 10 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
import Control.Concurrent (forkIO,threadDelay)
import Control.Concurrent.STM
import Control.Exception (finally)
import Control.Monad.Error
import Control.Applicative
import Text.Printf (printf)
data LogJob = LogMsg String | LogDone
data Result = Res String | Ready
data Task =
Page {name :: String,
links :: [Task]}
| Done
deriving (Eq,Show)
p = Page "one" [
Page ".two" [
Page "..four" [],
Page "..five" []
],
Page ".three" [
Page "..six" [],
Page "..seven" []
]]
main = do
results <- newTChanIO
-- for sending jobs to workers
jobQueue <- newTChanIO
logChannel <- newTChanIO
activeLogging <- newTVarIO 1
_ <- forkIO $
(logService logChannel)
`finally`
(atomically $ modifyTVar activeLogging (subtract 1))
let logSync s = atomically $ writeTChan logChannel (LogMsg s)
let k = 4
-- the number of workers currently running
activeWorkers <- newTVarIO k
logSync (printf "start %d threads...\n" k)
-- start worker threads
forM_ [1..k] $ \n -> forkIO $
(worker logSync results jobQueue n)
`finally`
(atomically $ modifyTVar activeWorkers (subtract 1))
-- add element to queue
atomically $ writeTChan jobQueue p
let loop = do
(r, jobsDone) <- atomically $ (,) <$> readTChan results <*> isEmptyTChan jobQueue
case r of
Res m -> logSync ("result was " ++ m ++ ", loop...") >> loop
Ready -> do
logSync "ready"
if jobsDone then do
logSync "finishing workers..."
atomically $ replicateM_ k (writeTChan jobQueue Done)
waitFor activeWorkers
atomically $ (writeTChan logChannel LogDone)
waitFor activeLogging
else (logSync $ "more mainloop") >> loop
loop
logService :: TChan LogJob -> IO ()
logService c = loop
where loop = do
x <- atomically $ readTChan c
case x of
LogMsg m -> print m >> loop
LogDone -> print "done logging!" >> return ()
waitFor :: TVar Int -> IO ()
waitFor alive = atomically $ do
count <- readTVar alive
check (count == 0)
worker :: (String -> IO ()) ->
TChan Result ->
TChan Task ->
Int ->
IO ()
worker logSync results jobQueue i = loop
where
-- Consume jobs until we are told to exit.
loop = do
job <- atomically $ readTChan jobQueue
logSync $ printf "[worker %d] consuming another job" i
threadDelay $ 100*1000
case job of
Done -> return ()
(Page n ls) -> do
logSync (printf "received page %s" n)
if (not . null) ls
then atomically $ mapM_ (writeTChan jobQueue) ls >> writeTChan results (Res $ "checked " ++ n)
else atomically $ writeTChan results (Ready)
loop
|
marcmo/linkchecker
|
haskell/attic/simple.hs
|
mit
| 3,046 | 9 | 23 | 1,014 | 955 | 454 | 501 | 82 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiWayIf #-}
module Demo (main) where
import Control.Concurrent (threadDelay)
import Control.Applicative
import Control.Monad
import Foreign.C.Types
import Linear
import Linear.Affine ( Point(P) )
import Data.Word
import Data.StateVar
import Data.Monoid
import Data.Maybe
import qualified SDL
white, red, green, blue :: V4 Word8
white = V4 maxBound maxBound maxBound maxBound
red = V4 maxBound 0 0 maxBound
green = V4 0 maxBound 0 maxBound
blue = V4 0 0 maxBound maxBound
colors = [red, green, blue]
main :: IO ()
main = do
SDL.initializeAll
window <- SDL.createWindow "Demo" SDL.defaultWindow { SDL.windowInitialSize = V2 800 600 }
SDL.showWindow window
renderer <- SDL.createRenderer window (-1) SDL.defaultRenderer
let loop x = do
events <- SDL.pollEvents
if events /= [] then putStrLn $ show events else return ()
let (Any quit, Last newX) = foldMap (\case
SDL.QuitEvent -> (Any True, mempty)
SDL.KeyboardEvent e ->
if | SDL.keyboardEventKeyMotion e == SDL.Pressed ->
case SDL.keysymScancode (SDL.keyboardEventKeysym e) of
SDL.Scancode1 -> (Any False, Last (Just 0))
SDL.Scancode2 -> (Any False, Last (Just 1))
SDL.Scancode3 -> (Any False, Last (Just 2))
SDL.ScancodeQ -> (Any True, mempty)
_ -> mempty
| otherwise -> mempty
_ -> mempty) $
map SDL.eventPayload events
x' = newX <|> x
SDL.rendererDrawColor renderer $= white
SDL.clear renderer
SDL.rendererDrawColor renderer $= colors!!(fromJust x)
SDL.fillRect renderer (Just (SDL.Rectangle (P (V2 100 100)) (V2 100 200)))
SDL.present renderer
unless quit $ loop x'
loop $ Just 0
SDL.destroyWindow window
SDL.quit
|
PseudoPower/AFSM
|
draft/demo.hs
|
mit
| 2,033 | 0 | 29 | 627 | 660 | 331 | 329 | -1 | -1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SpeechSynthesisUtterance
(js_newSpeechSynthesisUtterance, newSpeechSynthesisUtterance,
js_setText, setText, js_getText, getText, js_setLang, setLang,
js_getLang, getLang, js_setVoice, setVoice, js_getVoice, getVoice,
js_setVolume, setVolume, js_getVolume, getVolume, js_setRate,
setRate, js_getRate, getRate, js_setPitch, setPitch, js_getPitch,
getPitch, start, end, error, pause, resume, mark, boundary,
SpeechSynthesisUtterance, castToSpeechSynthesisUtterance,
gTypeSpeechSynthesisUtterance)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe
"new window[\"SpeechSynthesisUtterance\"]($1)"
js_newSpeechSynthesisUtterance ::
JSString -> IO SpeechSynthesisUtterance
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance Mozilla SpeechSynthesisUtterance documentation>
newSpeechSynthesisUtterance ::
(MonadIO m, ToJSString text) => text -> m SpeechSynthesisUtterance
newSpeechSynthesisUtterance text
= liftIO (js_newSpeechSynthesisUtterance (toJSString text))
foreign import javascript unsafe "$1[\"text\"] = $2;" js_setText ::
SpeechSynthesisUtterance -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.text Mozilla SpeechSynthesisUtterance.text documentation>
setText ::
(MonadIO m, ToJSString val) =>
SpeechSynthesisUtterance -> val -> m ()
setText self val = liftIO (js_setText (self) (toJSString val))
foreign import javascript unsafe "$1[\"text\"]" js_getText ::
SpeechSynthesisUtterance -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.text Mozilla SpeechSynthesisUtterance.text documentation>
getText ::
(MonadIO m, FromJSString result) =>
SpeechSynthesisUtterance -> m result
getText self = liftIO (fromJSString <$> (js_getText (self)))
foreign import javascript unsafe "$1[\"lang\"] = $2;" js_setLang ::
SpeechSynthesisUtterance -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.lang Mozilla SpeechSynthesisUtterance.lang documentation>
setLang ::
(MonadIO m, ToJSString val) =>
SpeechSynthesisUtterance -> val -> m ()
setLang self val = liftIO (js_setLang (self) (toJSString val))
foreign import javascript unsafe "$1[\"lang\"]" js_getLang ::
SpeechSynthesisUtterance -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.lang Mozilla SpeechSynthesisUtterance.lang documentation>
getLang ::
(MonadIO m, FromJSString result) =>
SpeechSynthesisUtterance -> m result
getLang self = liftIO (fromJSString <$> (js_getLang (self)))
foreign import javascript unsafe "$1[\"voice\"] = $2;" js_setVoice
::
SpeechSynthesisUtterance -> Nullable SpeechSynthesisVoice -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.voice Mozilla SpeechSynthesisUtterance.voice documentation>
setVoice ::
(MonadIO m) =>
SpeechSynthesisUtterance -> Maybe SpeechSynthesisVoice -> m ()
setVoice self val
= liftIO (js_setVoice (self) (maybeToNullable val))
foreign import javascript unsafe "$1[\"voice\"]" js_getVoice ::
SpeechSynthesisUtterance -> IO (Nullable SpeechSynthesisVoice)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.voice Mozilla SpeechSynthesisUtterance.voice documentation>
getVoice ::
(MonadIO m) =>
SpeechSynthesisUtterance -> m (Maybe SpeechSynthesisVoice)
getVoice self = liftIO (nullableToMaybe <$> (js_getVoice (self)))
foreign import javascript unsafe "$1[\"volume\"] = $2;"
js_setVolume :: SpeechSynthesisUtterance -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.volume Mozilla SpeechSynthesisUtterance.volume documentation>
setVolume ::
(MonadIO m) => SpeechSynthesisUtterance -> Float -> m ()
setVolume self val = liftIO (js_setVolume (self) val)
foreign import javascript unsafe "$1[\"volume\"]" js_getVolume ::
SpeechSynthesisUtterance -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.volume Mozilla SpeechSynthesisUtterance.volume documentation>
getVolume :: (MonadIO m) => SpeechSynthesisUtterance -> m Float
getVolume self = liftIO (js_getVolume (self))
foreign import javascript unsafe "$1[\"rate\"] = $2;" js_setRate ::
SpeechSynthesisUtterance -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.rate Mozilla SpeechSynthesisUtterance.rate documentation>
setRate :: (MonadIO m) => SpeechSynthesisUtterance -> Float -> m ()
setRate self val = liftIO (js_setRate (self) val)
foreign import javascript unsafe "$1[\"rate\"]" js_getRate ::
SpeechSynthesisUtterance -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.rate Mozilla SpeechSynthesisUtterance.rate documentation>
getRate :: (MonadIO m) => SpeechSynthesisUtterance -> m Float
getRate self = liftIO (js_getRate (self))
foreign import javascript unsafe "$1[\"pitch\"] = $2;" js_setPitch
:: SpeechSynthesisUtterance -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.pitch Mozilla SpeechSynthesisUtterance.pitch documentation>
setPitch ::
(MonadIO m) => SpeechSynthesisUtterance -> Float -> m ()
setPitch self val = liftIO (js_setPitch (self) val)
foreign import javascript unsafe "$1[\"pitch\"]" js_getPitch ::
SpeechSynthesisUtterance -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.pitch Mozilla SpeechSynthesisUtterance.pitch documentation>
getPitch :: (MonadIO m) => SpeechSynthesisUtterance -> m Float
getPitch self = liftIO (js_getPitch (self))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.onstart Mozilla SpeechSynthesisUtterance.onstart documentation>
start :: EventName SpeechSynthesisUtterance Event
start = unsafeEventName (toJSString "start")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.onend Mozilla SpeechSynthesisUtterance.onend documentation>
end :: EventName SpeechSynthesisUtterance Event
end = unsafeEventName (toJSString "end")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.onerror Mozilla SpeechSynthesisUtterance.onerror documentation>
error :: EventName SpeechSynthesisUtterance UIEvent
error = unsafeEventName (toJSString "error")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.onpause Mozilla SpeechSynthesisUtterance.onpause documentation>
pause :: EventName SpeechSynthesisUtterance Event
pause = unsafeEventName (toJSString "pause")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.onresume Mozilla SpeechSynthesisUtterance.onresume documentation>
resume :: EventName SpeechSynthesisUtterance Event
resume = unsafeEventName (toJSString "resume")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.onmark Mozilla SpeechSynthesisUtterance.onmark documentation>
mark :: EventName SpeechSynthesisUtterance Event
mark = unsafeEventName (toJSString "mark")
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SpeechSynthesisUtterance.onboundary Mozilla SpeechSynthesisUtterance.onboundary documentation>
boundary :: EventName SpeechSynthesisUtterance Event
boundary = unsafeEventName (toJSString "boundary")
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SpeechSynthesisUtterance.hs
|
mit
| 8,520 | 90 | 10 | 1,140 | 1,572 | 868 | 704 | 108 | 1 |
-- | A few simple linear algebra functions, basically just algorithms
-- needed for this project, and so not terribly general. The emphasis is
-- on taming division.
{-# LANGUAGE ParallelListComp #-}
module Math.Linear (
Matrix, mkMatrix, det, polyDet, subres, charPoly, orthoBasis, solve
) where
import Prelude ()
import Math.Base
import Data.Array
import Data.Ratio
import Math.Poly
-- Need instances.
import Math.Poly.Extra ()
-- | This library uses the Haskell '98 @Array@ type. Another could
-- be substituted, but for the algorithms here the immutable array
-- operations are not noticeably slower or more memory-inefficient,
-- as the arithmetical operations dwarf any differences.
type Matrix a = Array (Int, Int) a
-- | Division-free algorithm for computing the characteristic polynomial
-- in O(n^4) time, based on Rote (2001). It can be used to find the
-- determinant in the absence of a @Divisible@ instance.
charPoly :: (Eq a, Num a) => Matrix a -> Poly a
charPoly mx = fromCoeffs $ fst $ layer n where
n = (fst $ snd $ bounds mx) + 1 :: Int
shel pn ar = (ar!(n,n) : pn, ar)
layer 0 = shel [] $ mkMatrix (n+1) [ if r==c then 1 else 0 | r <- [0..n], c <- [0..n] ]
layer l = let (pn, pl) = layer (l-1) in shel pn $ array ((0,0), (n,n)) $
[ ((r, c), sum [ mx!(x, r)*pl!(x, c) | x <- [c..n-1] ])
| r <- [1..n-1], c <- [0..r-1] ] ++
[ ((c1, c1), -tot) | c1 <- [0..n]
| tot <- scanl (+) 0 [ sum [ mx!(r,c)*pl!(r,c) | r <- [c..n-1] ] | c <- [0..n-1] ] ]
-- | Compute determinant using only exact divisions.
det :: (Eq a, Divisible a) => Matrix a -> a
det mx0 = if sz<0 then 1 else loop sz 1 mx0 where
sz = fst $ snd $ bounds mx0
loop n lpv mx = case [ r | r <- [0..n], mx!(r, n) /= 0 ] of
[] -> 0
k:_ -> let pv = mx!(k, n) in
if n==0 then pv
else loop (n-1) pv $ mkMatrix n
[ lpv `divout_` (mx!(r, c)*pv-mx!(k, c)*v) |
r <- [0..n], r/= k, let v = mx!(r, n), c <- [0..n-1] ]
-- | Compute the polynomial determinant, using only exact divisions.
-- This uses the interpretation that the matrix is a list of
-- polynomials.
polyDet :: (Eq a, Divisible a) => [Poly a] -> Poly a
polyDet ps = det $ mkMatrix (m+1) $ concat rows where
m = length ps - 1; n = maximum (map degree ps)
rows = [ replicate (m - length t) 0 ++ map unit t ++ [p]
| p <- ps, let t = reverse (drop (n-m+1) (coeffs p)) ]
-- | Compute the kth subresultant of two polynomials using the
-- polynomial determinant.
subres :: (Eq a, Divisible a) => Int -> Poly a -> Poly a -> Poly a
subres k p q = polyDet $ up p q ++ up q p where
up a b = let d = degree b in [ nud i a | i <- [0..d-k-1] ]
nud i r = fromCoeffs (replicate i 0 ++ coeffs r)
-- | Simple function to solve a linear equation with `Integral` elements,
-- by casting to a Ratio type.
solve :: (Integral a) => Matrix a -> [a] -> [a]
solve mx0 vec = loop sz vmx where
sz = fst $ snd $ bounds mx0
vmx = listArray ((0,-1), (sz,sz))
[ fromIntegral $ if -1==c then vec!!r else mx0!(r, c) | r <- [0..sz], c <- [-1..sz] ]
loop n mx = if n==0 then backsolve nmx else loop (n-1) nmx where
k = head [ r | r <- [0..n], mx!(r,n) /= 0 ]; pv = mx!(k, n)
nmx = dorows n $ mx // do c <- [-1..n]; [ ((k, c), mx!(n, c)), ((n, c), mx!(k, c)/pv) ]
dorows n mx =
mx // [ ((r, c), mx!(r, c)-mx!(n, c)*v) | r <- [0..n-1], let v = mx!(r, n), c <- [-1..n-1] ]
backsolve nmx = map numerator x where
x = [ nmx!(r, -1) - sum [ nmx!(r, i)*x!!i | i <- [0..r-1] ] | r <- [0..sz] ]
-- | Compute an orthogonal basis for a vector. The first element is assumed to
-- be nonzero; it would be simple to remove this restriction, but I do not need to.
orthoBasis :: Num a => [a] -> [[a]]
orthoBasis (vh:vt) = zipWith vone [1..] vt where
vone k e = -e : [ if k==i then vh else 0 | i <- [1..length vt] ]
-- n = length vt
-- | Build a square matrix of the given size @n@ from a list of length @n^2@.
-- It will be @(0,0)@-based.
mkMatrix :: Int -> [a] -> Matrix a
mkMatrix n = listArray ((0, 0), (n-1, n-1))
|
galenhuntington/etr
|
Math/Linear.hs
|
mit
| 4,079 | 1 | 23 | 1,010 | 1,900 | 1,037 | 863 | 56 | 4 |
module GHCJS.DOM.CSSKeyframesRule (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/CSSKeyframesRule.hs
|
mit
| 46 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, FlexibleContexts #-}
{- |
The HList library
(C) 2004, Oleg Kiselyov, Ralf Laemmel, Keean Schupke
Generic implementations of type equality and disequality
-}
module Data.HList.TypeEqBoolGeneric where
import Data.HList.FakePrelude
instance TypeEqTrue x x
instance Fail () => TypeEqFalse x x
instance TypeEqFalse x y
|
bjornbm/HList-classic
|
Data/HList/TypeEqBoolGeneric.hs
|
mit
| 406 | 0 | 7 | 84 | 51 | 27 | 24 | 6 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
module Yage.Core.GLFW.Base where
import Yage.Prelude
import Control.Monad.Exception
import Yage.Core.Application.Exception
import qualified Graphics.UI.GLFW as GLFW
import Yage.Core.Application.Types
import Data.Version
import System.IO.Unsafe (unsafePerformIO)
--------------------------------------------------------------------------------
{-# INLINE glfw #-}
glfw :: (Throws InternalException l) => IO a -> Application l a
glfw = wrapException IOException . io
{-# INLINE initGlfw #-}
initGlfw :: (Throws InternalException l) => Application l ()
initGlfw = do
inited <- glfw $ GLFW.init
unless inited $ throw $ IOException . toException $ InitException
{-# INLINE terminateGlfw #-}
terminateGlfw :: (Throws InternalException l) => Application l ()
terminateGlfw = glfw $ GLFW.terminate
glfwVersion :: Version
glfwVersion = toVersion (unsafePerformIO GLFW.getVersion) where
toVersion GLFW.Version{..} = Version [versionMajor, versionMinor, versionRevision] []
|
MaxDaten/yage-core
|
src/Yage/Core/GLFW/Base.hs
|
mit
| 1,135 | 0 | 11 | 223 | 256 | 144 | 112 | 24 | 1 |
module Problem3 ( getElemAtIndex ) where
getElemAtIndex :: [a] -> Int -> a
getElemAtIndex list index = last (take index list)
|
chanind/haskell-99-problems
|
Problem3.hs
|
mit
| 126 | 0 | 7 | 21 | 46 | 25 | 21 | 3 | 1 |
-- Copyright 2015 Mitchell Kember. Subject to the MIT License.
-- Project Euler: Problem 23
-- Non-abundant sums
module Problem23 where
import Common (memoize, properDivisors)
limit :: Int
limit = 28123
isAbundant :: Int -> Bool
isAbundant = memoize (1, limit) go where go n = sum (properDivisors n) > n
isAbundantSum :: Int -> Bool
isAbundantSum n = any works [1..half]
where
half = n `div` 2
works m = isAbundant m && isAbundant (n - m)
solve :: Int
solve = sum . filter (not . isAbundantSum) $ [1..limit]
|
mk12/euler
|
haskell/Problem23.hs
|
mit
| 524 | 0 | 10 | 107 | 174 | 96 | 78 | 12 | 1 |
{-# LANGUAGE CPP #-}
import Control.Monad.State.Lazy (execStateT)
import Data.List (intersperse)
import Lens.Micro.Platform ((.=))
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Options.Applicative
import Yi hiding (option)
import Yi.Config.Simple.Types
import Yi.Buffer.Misc (lineMoveRel)
import Yi.Config.Default.HaskellMode (configureHaskellMode)
import Yi.Config.Default.JavaScriptMode (configureJavaScriptMode)
import Yi.Config.Default.MiscModes (configureMiscModes)
#ifdef VIM
import Yi.Config.Default.Vim (configureVim)
#endif
#ifdef VTY
import Yi.Config.Default.Vty (configureVty)
#endif
#ifdef EMACS
import Yi.Config.Default.Emacs (configureEmacs)
#endif
#ifdef PANGO
import Yi.Config.Default.Pango (configurePango)
#endif
frontends :: [(String, ConfigM ())]
frontends = [
#ifdef PANGO
("pango", configurePango),
#endif
#ifdef VTY
("vty", configureVty),
#endif
("", return ())
]
keymaps :: [(String, ConfigM ())]
keymaps = [
#ifdef EMACS
("emacs", configureEmacs),
#endif
#ifdef VIM
("vim", configureVim),
#endif
("", return ())
]
data CommandLineOptions = CommandLineOptions {
frontend :: Maybe String
, keymap :: Maybe String
, startOnLine :: Maybe Int
, files :: [String]
}
commandLineOptions :: Parser (Maybe CommandLineOptions)
commandLineOptions = flag' Nothing
( long "version"
<> short 'v'
<> help "Show the version number")
<|> (Just <$> (CommandLineOptions
<$> optional (strOption
( long "frontend"
<> short 'f'
<> metavar "FRONTEND"
<> help "The frontend to use (default is pango)"))
<*> optional (strOption
( long "keymap"
<> short 'k'
<> metavar "KEYMAP"
<> help "The keymap to use (default is emacs)"))
<*> optional (option auto
( long "line"
<> short 'l'
<> metavar "NUM"
<> help "Open the (last) file on line NUM"))
<*> many (argument str (metavar "FILES..."))
))
main :: IO ()
main = do
mayClo <- execParser opts
case mayClo of
Nothing -> putStrLn "Yi 0.16.0"
Just clo -> do
let openFileActions = intersperse (EditorA newTabE) (map (YiA . openNewFile) (files clo))
moveLineAction = YiA $ withCurrentBuffer (lineMoveRel (fromMaybe 0 (startOnLine clo)))
cfg <- execStateT
(runConfigM (myConfig (frontend clo) (keymap clo) >> (startActionsA .= (openFileActions ++ [moveLineAction]))))
defaultConfig
startEditor cfg Nothing
where
opts = info (helper <*> commandLineOptions)
( fullDesc
<> progDesc "Edit files"
<> header "Yi - a flexible and extensible text editor written in haskell")
myConfig :: Maybe String -> Maybe String -> ConfigM ()
myConfig f k = do
-- Lookup f in the frontends list or pick the first element of the frontends list if
-- f is nothing or do nothing if f is not found in the frontends list.
case f of
Nothing -> snd (head frontends)
Just f' -> fromMaybe (return ()) (lookup f' frontends)
-- Same as above, but then with k and keymaps
case k of
Nothing -> snd (head keymaps)
Just k' -> fromMaybe (return ()) (lookup k' keymaps)
configureHaskellMode
configureJavaScriptMode
configureMiscModes
|
noughtmare/yi
|
yi/Main.hs
|
gpl-2.0
| 3,351 | 0 | 22 | 811 | 925 | 499 | 426 | 73 | 3 |
{-# LANGUAGE CPP, TypeFamilies, DeriveDataTypeable #-}
module PGIP.GraphQL.Result.StringReference where
import Data.Data
newtype StringReference = StringReference { id :: String
} deriving (Show, Typeable, Data)
|
spechub/Hets
|
PGIP/GraphQL/Result/StringReference.hs
|
gpl-2.0
| 257 | 0 | 6 | 69 | 42 | 27 | 15 | 5 | 0 |
solveRPN :: String -> Float
solveRPN = head . foldl foldingFunction [] . words
where
foldingFunction (x:y:ys) "*" = (y * x):ys
foldingFunction (x:y:ys) "+" = (y + x):ys
foldingFunction (x:y:ys) "-" = (y - x):ys
foldingFunction (x:y:ys) "/" = (y / x):ys
foldingFunction (x:y:ys) "^" = (y ** x):ys
foldingFunction (x:xs) "ln" = log x:xs
foldingFunction xs "sum" = [sum xs]
foldingFunction xs numberString = read numberString:xs
|
lamontu/learning_haskell
|
reverse_polish_notation.hs
|
gpl-3.0
| 503 | 0 | 10 | 145 | 249 | 129 | 120 | 10 | 8 |
import System.Random
randoms' :: (RandomGen g, Random a) => g -> [a]
randoms' gen = let (value, newGen) = random gen in value:randoms' newGen
finiteRandoms :: (RandomGen g, Random a, Num n, Eq n) => n -> g -> ([a], g)
finiteRandoms 0 gen = ([], gen)
finiteRandoms n gen = let (value, newGen) = random gen
(restOfList, finalGen) = finiteRandoms (n-1) newGen
in (value:restOfList, finalGen)
|
medik/lang-hack
|
Haskell/LearnYouAHaskell/c09/random.hs
|
gpl-3.0
| 455 | 0 | 11 | 132 | 201 | 106 | 95 | 8 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Grid.Helpers
(
gridModifyTick,
gridModifyCamera,
gridClearEvents,
gridIsEmpty,
gridCameraCmdsIsComplete,
gridCameraCmdsCount,
gridPushCameraCmds,
gridPushCameraCmd,
gridSetCameraCmds,
gridClearCameraCmds,
turnFromDir,
dirNodeNode,
turnMultNode,
module Game.Grid.Helpers.Path,
module Game.Grid.Helpers.Segment,
module Game.Grid.Helpers.Camera,
) where
import MyPrelude
import Game
import Game.Grid.GridWorld
import Game.Grid.Helpers.Path
import Game.Grid.Helpers.Segment
import Game.Grid.Helpers.Camera
--------------------------------------------------------------------------------
--
gridClearEvents :: GridWorld -> GridWorld
gridClearEvents grid =
grid
{
gridPath = pathClearEvents (gridPath grid),
gridEvents = []
}
gridIsEmpty :: GridWorld -> Bool
gridIsEmpty grid =
pathArraySize (gridPath grid) == 0
--------------------------------------------------------------------------------
--
gridModifyCamera :: GridWorld -> (Camera -> Camera) -> GridWorld
gridModifyCamera grid f =
grid { gridCamera = f (gridCamera grid) }
gridModifyTick :: GridWorld -> (Tick -> Tick) -> GridWorld
gridModifyTick grid f =
grid { gridTick = f (gridTick grid) }
--------------------------------------------------------------------------------
-- CameraCommand
-- fixme: also GridEvent?
gridCameraCmdsIsComplete :: GridWorld -> Bool
gridCameraCmdsIsComplete grid =
(null $ gridCameraCommands grid) && (gridCameraCommandTick grid <= worldTick grid)
gridCameraCmdsCount :: GridWorld -> UInt
gridCameraCmdsCount grid =
gridCameraCommandCount grid
gridPushCameraCmd :: GridWorld -> CameraCommand -> GridWorld
gridPushCameraCmd grid cmd =
gridPushCameraCmds grid [cmd]
gridPushCameraCmds :: GridWorld -> [CameraCommand] -> GridWorld
gridPushCameraCmds grid cmds =
gridSetCameraCmds grid $ gridCameraCommands grid ++ cmds
gridSetCameraCmds :: GridWorld -> [CameraCommand] -> GridWorld
gridSetCameraCmds grid cmds =
grid
{
gridCameraCommands = cmds,
gridCameraCommandCount = 0
}
gridClearCameraCmds :: GridWorld -> GridWorld
gridClearCameraCmds grid =
gridSetCameraCmds grid []
--------------------------------------------------------------------------------
--
dirNodeNode :: Node -> Node -> Dir
dirNodeNode node node' =
case nodeDiff node node' of
Node x y z -> Dir (fI $ signum x) (fI $ signum y) (fI $ signum z)
turnMultNode :: Turn -> Node -> Node
turnMultNode (Turn a0 a1 a2 b0 b1 b2 c0 c1 c2) (Node n0 n1 n2) =
Node (n0 * fI a0 + n1 * fI b0 + n2 * fI c0)
(n0 * fI a1 + n1 * fI b1 + n2 * fI c1)
(n0 * fI a2 + n1 * fI b2 + n2 * fI c2)
turnFromDir :: Turn -> Dir -> Turn
turnFromDir turn dir =
let diff = case helper (turnInverse turn) dir of
Dir 1 0 0 -> straightTurn
Dir 0 1 0 -> upTurn
Dir 0 0 1 -> rightTurn
Dir (-1) 0 0 -> backTurn
Dir 0 (-1) 0 -> downTurn
Dir 0 0 (-1) -> leftTurn
Dir 0 0 0 -> straightTurn
_ -> error "turnFromDir: no such direction"
in diff `mappend` turn
where
helper (Turn x0 x1 x2
y0 y1 y2
z0 z1 z2)
(Dir d0 d1 d2) =
Dir (x0 * d0 + y0 * d1 + z0 * d2)
(x1 * d0 + y1 * d1 + z1 * d2)
(x2 * d0 + y2 * d1 + z2 * d2)
|
karamellpelle/grid
|
source/Game/Grid/Helpers.hs
|
gpl-3.0
| 4,302 | 0 | 14 | 1,098 | 1,028 | 552 | 476 | 86 | 8 |
{-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-name-shadowing #-}
module QFeldspar.QDSL
(Qt,FO,Type,Num,Eq,Ord,C,String,Rep,
Word32,Float,
(*),(+),(-),(==),(<),div,(/),mod,i2f,round,
sqrt,sin,cos,atan2,cis,ilog2,
Bool(..),
-- ifThenElse
Complex(..),
realPart,imagPart,
-- abstraction and application
-- ((,)),
fst,snd,
Maybe(..),
maybe,
Array,
Ary,
mkArr,lnArr,ixArr,
Vec(..),
(.&.),(.|.),xor,shfRgt,shfLft,complement,
return,(>>=),(.),
hashTable,
while, save,
qdsl,evaluate,translate,translateF,compile,compileF,
dbg1,dbg15,dbg2,dbg3,dbg4,dbg45,dbg5,dbg6,
dbgw1,dbgw15,dbgw2,dbgw3,dbgw4,dbgw45,dbgw5,dbgw6,
testQt,testNrmQt,testNrmSmpQt,testDpF,toDp,wrp,
ghoF{-,nghoF-}{-,ngho-},trmEql,CDSL.makeIP,CDSL.makeIPAt,
compileFunction,
translateWith,translateFWith) where
import QFeldspar.MyPrelude
import qualified QFeldspar.MyPrelude as MP
import QFeldspar.Expression.Utils.Show.GADTFirstOrder()
import QFeldspar.Expression.Utils.Show.GADTHigherOrder()
import QFeldspar.Expression.Utils.Show.MiniFeldspar()
import QFeldspar.CDSL (Dp)
import qualified QFeldspar.CDSL as CDSL
import qualified QFeldspar.Singleton as S
import QFeldspar.Expression.Utils.TemplateHaskell
(trmEql,stripNameSpace)
import QFeldspar.Conversion
import QFeldspar.Expression.Conversion ()
import QFeldspar.Expression.Conversions.Evaluation.MiniFeldspar ()
import QFeldspar.Expression.Conversions.Lifting(cnvFOHO)
import qualified QFeldspar.Expression.ADTUntypedNamed as AUN
import qualified QFeldspar.Expression.ADTUntypedDebruijn as AUD
import qualified QFeldspar.Expression.GADTTyped as GTD
import qualified QFeldspar.Expression.GADTFirstOrder as GFO
import qualified QFeldspar.Expression.GADTHigherOrder as GHO
import qualified QFeldspar.Expression.MiniFeldspar as MWS
import qualified Language.Haskell.TH.Syntax as TH
import qualified QFeldspar.Expression.Utils.ADTUntypedNamed as AUN
import qualified QFeldspar.Type.ADT as TA
import qualified QFeldspar.Type.GADT as TG
import qualified QFeldspar.Environment.Scoped as ES
import qualified QFeldspar.Environment.Typed as ET
import qualified QFeldspar.Nat.ADT as NA
import qualified QFeldspar.Normalisation as GFO
import QFeldspar.Prelude.Haskell hiding (save,while)
import qualified QFeldspar.Prelude.Haskell as PH
import QFeldspar.Prelude.Environment (etTG)
import qualified QFeldspar.Prelude.HaskellEnvironment as PHE
import QFeldspar.Expression.Conversions.EtaPrims(etaPrms)
type Data a = TH.Q (TH.TExp a)
type Qt a = Data a
type C = String
type Type a = S.HasSin TG.Typ a
type Rep a = (Type a , FO a)
class FO a where {}
instance FO MP.Bool where {}
instance FO MP.Word32 where {}
instance FO MP.Float where {}
instance (FO a , FO b) => FO (a , b) where {}
instance FO a => FO (MP.Ary a) where {}
instance FO (MP.Complex MP.Float) where {}
while :: FO a => (a -> MP.Bool) -> (a -> a) -> a -> a
while = PH.while
save :: FO a => a -> a
save = PH.save
dn :: TH.Name
dn = (TH.Name (TH.OccName "dummyy") TH.NameS)
dummy :: Data a
dummy = MP.return (TH.TExp (TH.VarE dn))
wrp :: Type a => Data a -> AUN.Exp TH.Name
wrp = expand
['(>>=) := [|| \m -> \k ->
case m of
{Nothing -> Nothing ; Just x -> k x} ||],
'maybe := [|| \x -> \g -> \m ->
case m of
{Nothing -> x ; Just y -> g y} ||],
'return := [|| \x -> Just x ||],
'(.) := [|| \f -> \g -> \x -> f (g x) ||],
'realPart := [|| \x -> PHE.realPart x ||],
'imagPart := [|| \x -> PHE.imagPart x ||],
'div := [|| \x -> \y -> PHE.divWrd x y ||],
'(/) := [|| \x -> \y -> PHE.divFlt x y ||],
'(.&.) := [|| \x -> \y -> PHE.andWrd x y ||],
'(.|.) := [|| \x -> \y -> PHE.orWrd x y ||],
'xor := [|| \x -> \y -> PHE.xorWrd x y ||],
'shfRgt := [|| \x -> \y -> PHE.shrWrd x y ||],
'shfLft := [|| \x -> \y -> PHE.shlWrd x y ||],
'complement := [|| \x -> PHE.cmpWrd x ||],
'i2f := [|| \x -> PHE.i2f x ||],
'cis := [|| \x -> PHE.cis x ||],
'ilog2 := [|| \x -> PHE.ilog2 x ||],
'sqrt := [|| \x -> PHE.sqrtFlt x ||],
'hashTable := [|| PHE.hshTbl ||]]
. wrpTyp
wrpTyp :: forall a. Type a => Data a -> Data a
wrpTyp ee = do e <- ee
return (TH.TExp (TH.SigE (TH.unType e)
(frmRgtZro (cnv (S.sin :: TG.Typ a , ())))))
translate :: forall a.
(Type a , FO a) =>
Qt a -> Dp a
translate f = frmRgtZro (cnv (wrp f , etTG , PHE.esTH))
translateF :: forall a b.
(Type a , Type b) =>
Qt (a -> b) -> Dp a -> Dp b
translateF f = let e :: GFO.Exp PHE.Prelude '[] (a -> b) =
frmRgtZro (cnv (wrp f , etTG , PHE.esTH))
e' :: GHO.Exp PHE.Prelude (a -> b) =
cnvFOHO (GFO.nrm e)
in frmRgtZro (cnv (e' , ()))
evaluate :: forall a.
(Type a , FO a) =>
Qt a -> a
evaluate = CDSL.evaluate . translate
compileFunction :: (FO a , Type a , Type b) => Qt (a -> b) -> C
compileFunction = CDSL.compileF' False True True . translateF
compile :: forall a.
(Type a, FO a) =>
Bool -> Bool -> Qt a -> C
compile b1 b2 = CDSL.compile b1 b2 . translate
compileF :: forall a b.
(Type a , Type b , FO a) =>
Bool -> Bool -> Qt (a -> b) -> C
compileF b1 b2 = CDSL.compileF b1 b2 . translateF
dbg1 :: Type a => Qt a -> AUN.Exp TH.Name
dbg1 e = frmRgtZro (cnv (e,etTG , PHE.esTH))
dbg15 :: Type a => Qt a -> AUN.Exp TH.Name
dbg15 e = let e' = frmRgtZro (cnv (e,etTG , PHE.esTH))
in frmRgtZro (etaPrms etTG PHE.esTH e')
dbg2 :: Type a => Qt a -> AUD.Exp
dbg2 e = frmRgtZro (cnv(e,etTG , PHE.esTH))
dbg3 :: Type a => Qt a -> GTD.Exp (S.Len PHE.Prelude) 'NA.Zro TA.Typ
dbg3 e = frmRgtZro (cnv(e,etTG , PHE.esTH))
dbg4 :: Type a => Qt a -> GFO.Exp PHE.Prelude '[] a
dbg4 e = frmRgtZro (cnv(e,etTG , PHE.esTH))
dbg45 :: Type a => Qt a -> GFO.Exp PHE.Prelude '[] a
dbg45 e = let e' = frmRgtZro (cnv(e,etTG , PHE.esTH))
in GFO.nrm e'
dbg5 :: Type a => Qt a -> GHO.Exp PHE.Prelude a
dbg5 e = frmRgtZro (cnv(e,etTG , PHE.esTH))
dbg6 :: Type a => Qt a -> Dp a
dbg6 e = frmRgtZro (cnv(e,etTG , PHE.esTH))
dbgw1 :: Type a => Qt a -> AUN.Exp TH.Name
dbgw1 e = frmRgtZro (cnv (wrp e,etTG , PHE.esTH))
dbgw15 :: Type a => Qt a -> AUN.Exp TH.Name
dbgw15 e = let e' = frmRgtZro (cnv (wrp e,etTG , PHE.esTH))
in frmRgtZro (etaPrms etTG PHE.esTH e')
dbgw2 :: Type a => Qt a -> AUD.Exp
dbgw2 e = frmRgtZro (cnv(wrp e,etTG , PHE.esTH))
dbgw3 :: Type a => Qt a -> GTD.Exp (S.Len PHE.Prelude) 'NA.Zro TA.Typ
dbgw3 e = frmRgtZro (cnv(wrp e,etTG , PHE.esTH))
dbgw4 :: Type a => Qt a -> GFO.Exp PHE.Prelude '[] a
dbgw4 e = frmRgtZro (cnv(wrp e,etTG , PHE.esTH))
dbgw45 :: Type a => Qt a -> GFO.Exp PHE.Prelude '[] a
dbgw45 e = let e' = frmRgtZro (cnv(wrp e,etTG , PHE.esTH))
in GFO.nrm e'
dbgw5 :: Type a => Qt a -> GHO.Exp PHE.Prelude a
dbgw5 e = frmRgtZro (cnv(wrp e,etTG , PHE.esTH))
dbgw6 :: Type a => Qt a -> Dp a
dbgw6 e = frmRgtZro (cnv(wrp e,etTG , PHE.esTH))
ghoF :: (Type a , Type b) =>
Qt (a -> b) -> GHO.Exp PHE.Prelude (a -> b)
ghoF e = frmRgtZro (cnv(wrp e,etTG , PHE.esTH))
-- nghoF :: (Type a , Type b) => Qt (a -> b) -> GHO.Exp Prelude (a -> b)
-- nghoF e = nrm (ghoF e)
-- ngho :: Type a => Qt a -> GHO.Exp Prelude a
-- ngho e = nrm (gho e)
qdsl :: (FO a , Type a , Type b) => Qt (a -> b) -> C
qdsl = compileF True True
-- For paper
testQt :: Qt a -> Qt a -> Bool
testQt = trmEql
toDp :: (Type a , Type b) => Qt (a -> b) -> Dp a -> Dp b
toDp = translateF
testNrmQt :: (Type a , Type b) => Qt (a -> b) -> Qt (a -> b) -> Bool
testNrmQt x y = testDpF (toDp x) (toDp y)
testNrmSmpQt :: (Type a , Type b) => Qt (a -> b) -> Qt (a -> b) -> Bool
testNrmSmpQt x y = testDpF (CDSL.simplifyF (toDp x)) (CDSL.simplifyF (toDp y))
testDpF :: (Type a , Type b) => (Dp a -> Dp b) -> (Dp a -> Dp b) -> Bool
testDpF = CDSL.trmEqlF
toAUN :: Qt a -> MP.ErrM (AUN.Exp TH.Name)
toAUN ee = MP.evalStateT
(cnv (ee,etTG , PHE.esTH)) 0
data Sbs where
(:=) :: TH.Name -> Qt a -> Sbs
expand :: [Sbs] -> Qt a -> AUN.Exp TH.Name
expand sbs ee = MP.frmRgt
(do ee' <- toAUN ee
MP.foldM
(\ e (n := es) -> do es' <- toAUN es
MP.return (AUN.sbs (stripNameSpace n) es' e))
ee' sbs)
translateWith :: (Type a , FO a) => ET.Env TG.Typ s -> ES.Env (S.Len s) TH.Name -> TH.Q (TH.TExp a) -> MWS.Exp s a
translateWith et es e = frmRgtZro (cnv (e , et , es))
translateFWith :: forall a b s. (Type a , FO a , Type b , FO b) =>
ET.Env TG.Typ s -> ES.Env (S.Len s) TH.Name ->
TH.Q (TH.TExp (a -> b)) -> (MWS.Exp s a -> MWS.Exp s b)
translateFWith et es f = let e :: GFO.Exp s '[] (a -> b) = frmRgtZro (cnv (f , et , es))
e' :: GHO.Exp s (a -> b) = cnvFOHO (GFO.nrm e)
in frmRgtZro (cnv (e' , ()))
|
shayan-najd/QFeldspar
|
QFeldspar/QDSL.hs
|
gpl-3.0
| 9,465 | 56 | 59 | 2,674 | 4,194 | 2,303 | 1,891 | -1 | -1 |
module Handler.PlaybackGrant where
import Import
import Model.ConsumedGrant (consumePlaybackGrant)
getPlaybackGrantR :: Text -> PlaybackGrantId -> Handler Value
getPlaybackGrantR ruid pgid = do
now <- liftIO getCurrentTime
mr <- getMessageRender
grant' <- fromMaybeM
(sendResponseStatus status404 $ object ["message" .= mr MsgPlaybackGrantNotFound]) $
runDB $ selectFirst
[ PlaybackGrantId ==. pgid
, PlaybackGrantRecordingUID ==. ruid
, PlaybackGrantExpires >. now ]
[ Asc PlaybackGrantExpires ]
grant <- consumePlaybackGrant grant'
sendResponseStatus status200 $ object ["playbackGrant" .= grant]
|
rumuki/rumuki-server
|
src/Handler/PlaybackGrant.hs
|
gpl-3.0
| 719 | 0 | 16 | 191 | 165 | 81 | 84 | 16 | 1 |
-- instance Monoid Ordering where
-- mempty = EQ
-- LT `mappend` _ = LT
-- EQ `mappend` y = y
-- GT `mappend` _ = GT
lengthCompare :: String -> String -> Ordering
lengthCompare x y = (length x `compare` length y) `mappend`
(vowels x `compare` vowels y) `mappend`
(x `compare` y)
where vowels = length . filter (`elem` "aeiou")
|
lamontu/learning_haskell
|
monoid_ordering.hs
|
gpl-3.0
| 388 | 0 | 9 | 122 | 100 | 59 | 41 | 5 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Main where
import Diagrams.Prelude
import Diagrams.Backend.Drawille
import Diagrams.Backend.Rasterific
-- Taken from diagrams manual
theSq = square 1 # lwN 0.01
example =
hcat' (with & sep .~ 0.2)
(map (\s -> theSq # scale s) [0.5, 0.8, 1, 1.5, 2])
# scale 20
main = putStrLn $ renderToString (RasterificOptions (Width 250)) example
|
zudov/diagrams-drawille
|
examples/Example.hs
|
gpl-3.0
| 395 | 0 | 12 | 69 | 130 | 72 | 58 | 11 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.QuickFuzz.Gen.Code.Js where
import Data.Default
import Test.QuickCheck
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.State
import Data.List
import Language.ECMAScript3.PrettyPrint
import Language.ECMAScript3.Syntax
import Test.QuickFuzz.Derive.Arbitrary
import Test.QuickFuzz.Derive.Fixable
import Test.QuickFuzz.Derive.Show
import Test.QuickFuzz.Gen.FormatInfo
import Test.QuickFuzz.Gen.Base.ByteString
import Test.QuickFuzz.Gen.Base.String
import qualified Data.ByteString.Lazy.Char8 as L8
type Js = JavaScript ()
devArbitrary ''Js
-- devShow ''
jsInfo :: FormatInfo Js NoActions
jsInfo = def
{ encode = L8.pack . show . prettyPrint
, random = arbitrary
, value = show
, ext = "js"
}
|
elopez/QuickFuzz
|
src/Test/QuickFuzz/Gen/Code/Js.hs
|
gpl-3.0
| 895 | 0 | 9 | 123 | 183 | 120 | 63 | 28 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.WebPropertyAdWordsLinks.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a web property-AdWords link to which the user has access.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.webPropertyAdWordsLinks.get@.
module Network.Google.Resource.Analytics.Management.WebPropertyAdWordsLinks.Get
(
-- * REST Resource
ManagementWebPropertyAdWordsLinksGetResource
-- * Creating a Request
, managementWebPropertyAdWordsLinksGet
, ManagementWebPropertyAdWordsLinksGet
-- * Request Lenses
, mwpawlgWebPropertyId
, mwpawlgAccountId
, mwpawlgWebPropertyAdWordsLinkId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.webPropertyAdWordsLinks.get@ method which the
-- 'ManagementWebPropertyAdWordsLinksGet' request conforms to.
type ManagementWebPropertyAdWordsLinksGetResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"entityAdWordsLinks" :>
Capture "webPropertyAdWordsLinkId" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] EntityAdWordsLink
-- | Returns a web property-AdWords link to which the user has access.
--
-- /See:/ 'managementWebPropertyAdWordsLinksGet' smart constructor.
data ManagementWebPropertyAdWordsLinksGet = ManagementWebPropertyAdWordsLinksGet'
{ _mwpawlgWebPropertyId :: !Text
, _mwpawlgAccountId :: !Text
, _mwpawlgWebPropertyAdWordsLinkId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ManagementWebPropertyAdWordsLinksGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mwpawlgWebPropertyId'
--
-- * 'mwpawlgAccountId'
--
-- * 'mwpawlgWebPropertyAdWordsLinkId'
managementWebPropertyAdWordsLinksGet
:: Text -- ^ 'mwpawlgWebPropertyId'
-> Text -- ^ 'mwpawlgAccountId'
-> Text -- ^ 'mwpawlgWebPropertyAdWordsLinkId'
-> ManagementWebPropertyAdWordsLinksGet
managementWebPropertyAdWordsLinksGet pMwpawlgWebPropertyId_ pMwpawlgAccountId_ pMwpawlgWebPropertyAdWordsLinkId_ =
ManagementWebPropertyAdWordsLinksGet'
{ _mwpawlgWebPropertyId = pMwpawlgWebPropertyId_
, _mwpawlgAccountId = pMwpawlgAccountId_
, _mwpawlgWebPropertyAdWordsLinkId = pMwpawlgWebPropertyAdWordsLinkId_
}
-- | Web property ID to retrieve the AdWords link for.
mwpawlgWebPropertyId :: Lens' ManagementWebPropertyAdWordsLinksGet Text
mwpawlgWebPropertyId
= lens _mwpawlgWebPropertyId
(\ s a -> s{_mwpawlgWebPropertyId = a})
-- | ID of the account which the given web property belongs to.
mwpawlgAccountId :: Lens' ManagementWebPropertyAdWordsLinksGet Text
mwpawlgAccountId
= lens _mwpawlgAccountId
(\ s a -> s{_mwpawlgAccountId = a})
-- | Web property-AdWords link ID.
mwpawlgWebPropertyAdWordsLinkId :: Lens' ManagementWebPropertyAdWordsLinksGet Text
mwpawlgWebPropertyAdWordsLinkId
= lens _mwpawlgWebPropertyAdWordsLinkId
(\ s a -> s{_mwpawlgWebPropertyAdWordsLinkId = a})
instance GoogleRequest
ManagementWebPropertyAdWordsLinksGet where
type Rs ManagementWebPropertyAdWordsLinksGet =
EntityAdWordsLink
type Scopes ManagementWebPropertyAdWordsLinksGet =
'["https://www.googleapis.com/auth/analytics.edit",
"https://www.googleapis.com/auth/analytics.readonly"]
requestClient
ManagementWebPropertyAdWordsLinksGet'{..}
= go _mwpawlgAccountId _mwpawlgWebPropertyId
_mwpawlgWebPropertyAdWordsLinkId
(Just AltJSON)
analyticsService
where go
= buildClient
(Proxy ::
Proxy ManagementWebPropertyAdWordsLinksGetResource)
mempty
|
rueshyna/gogol
|
gogol-analytics/gen/Network/Google/Resource/Analytics/Management/WebPropertyAdWordsLinks/Get.hs
|
mpl-2.0
| 4,851 | 0 | 17 | 1,050 | 468 | 279 | 189 | 85 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CodeDeploy.GetDeploymentGroup
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Gets information about a deployment group.
--
-- <http://docs.aws.amazon.com/codedeploy/latest/APIReference/API_GetDeploymentGroup.html>
module Network.AWS.CodeDeploy.GetDeploymentGroup
(
-- * Request
GetDeploymentGroup
-- ** Request constructor
, getDeploymentGroup
-- ** Request lenses
, gdgApplicationName
, gdgDeploymentGroupName
-- * Response
, GetDeploymentGroupResponse
-- ** Response constructor
, getDeploymentGroupResponse
-- ** Response lenses
, gdgrDeploymentGroupInfo
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CodeDeploy.Types
import qualified GHC.Exts
data GetDeploymentGroup = GetDeploymentGroup
{ _gdgApplicationName :: Text
, _gdgDeploymentGroupName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'GetDeploymentGroup' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gdgApplicationName' @::@ 'Text'
--
-- * 'gdgDeploymentGroupName' @::@ 'Text'
--
getDeploymentGroup :: Text -- ^ 'gdgApplicationName'
-> Text -- ^ 'gdgDeploymentGroupName'
-> GetDeploymentGroup
getDeploymentGroup p1 p2 = GetDeploymentGroup
{ _gdgApplicationName = p1
, _gdgDeploymentGroupName = p2
}
-- | The name of an existing AWS CodeDeploy application within the AWS user
-- account.
gdgApplicationName :: Lens' GetDeploymentGroup Text
gdgApplicationName =
lens _gdgApplicationName (\s a -> s { _gdgApplicationName = a })
-- | The name of an existing deployment group for the specified application.
gdgDeploymentGroupName :: Lens' GetDeploymentGroup Text
gdgDeploymentGroupName =
lens _gdgDeploymentGroupName (\s a -> s { _gdgDeploymentGroupName = a })
newtype GetDeploymentGroupResponse = GetDeploymentGroupResponse
{ _gdgrDeploymentGroupInfo :: Maybe DeploymentGroupInfo
} deriving (Eq, Read, Show)
-- | 'GetDeploymentGroupResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'gdgrDeploymentGroupInfo' @::@ 'Maybe' 'DeploymentGroupInfo'
--
getDeploymentGroupResponse :: GetDeploymentGroupResponse
getDeploymentGroupResponse = GetDeploymentGroupResponse
{ _gdgrDeploymentGroupInfo = Nothing
}
-- | Information about the deployment group.
gdgrDeploymentGroupInfo :: Lens' GetDeploymentGroupResponse (Maybe DeploymentGroupInfo)
gdgrDeploymentGroupInfo =
lens _gdgrDeploymentGroupInfo (\s a -> s { _gdgrDeploymentGroupInfo = a })
instance ToPath GetDeploymentGroup where
toPath = const "/"
instance ToQuery GetDeploymentGroup where
toQuery = const mempty
instance ToHeaders GetDeploymentGroup
instance ToJSON GetDeploymentGroup where
toJSON GetDeploymentGroup{..} = object
[ "applicationName" .= _gdgApplicationName
, "deploymentGroupName" .= _gdgDeploymentGroupName
]
instance AWSRequest GetDeploymentGroup where
type Sv GetDeploymentGroup = CodeDeploy
type Rs GetDeploymentGroup = GetDeploymentGroupResponse
request = post "GetDeploymentGroup"
response = jsonResponse
instance FromJSON GetDeploymentGroupResponse where
parseJSON = withObject "GetDeploymentGroupResponse" $ \o -> GetDeploymentGroupResponse
<$> o .:? "deploymentGroupInfo"
|
dysinger/amazonka
|
amazonka-codedeploy/gen/Network/AWS/CodeDeploy/GetDeploymentGroup.hs
|
mpl-2.0
| 4,335 | 0 | 9 | 872 | 503 | 305 | 198 | 65 | 1 |
func = do
abc <- foo
--abc
return ()
|
lspitzner/brittany
|
data/Test264.hs
|
agpl-3.0
| 39 | 0 | 7 | 11 | 22 | 10 | 12 | -1 | -1 |
-- Example of an drawing graphics onto a canvas.
import Graphics.UI.Gtk
import Graphics.Rendering.Cairo
import Graphics.UI.Gtk.Gdk.EventM
main = do
initGUI
dia <- dialogNew
dialogAddButton dia stockOk ResponseOk
contain <- dialogGetUpper dia
canvas <- drawingAreaNew
canvas `on` sizeRequest $ return (Requisition 40 40)
ctxt <- cairoCreateContext Nothing
text <- layoutEmpty ctxt
text `layoutSetText` "Hello World."
canvas `on` exposeEvent $ updateCanvas text
boxPackStartDefaults contain canvas
widgetShow canvas
dialogRun dia
return ()
updateCanvas :: PangoLayout -> EventM EExpose Bool
updateCanvas text = do
win <- eventWindow
liftIO $ do
(width',height') <- drawableGetSize win
let width = realToFrac width'
height = realToFrac height'
-- Draw using the cairo api
renderWithDrawable win $ do
setSourceRGB 1 0 0
setLineWidth 20
setLineCap LineCapRound
setLineJoin LineJoinRound
moveTo 30 30
lineTo (width-30) (height-30)
lineTo (width-30) 30
lineTo 30 (height-30)
stroke
setSourceRGB 1 1 0
setLineWidth 4
save
translate (width / 2) (height / 2)
scale (width / 2) (height / 2)
arc 0 0 1 (135 * pi/180) (225 * pi/180)
restore
stroke
setSourceRGB 0 0 0
moveTo 30 (realToFrac height / 4)
rotate (pi/4)
showLayout text
return True
|
thiagoarrais/gtk2hs
|
demo/cairo/Drawing.hs
|
lgpl-2.1
| 1,376 | 0 | 16 | 326 | 493 | 225 | 268 | -1 | -1 |
{-# LANGUAGE GADTs #-}
module HN.Optimizer.Utils where
import Compiler.Hoopl
import qualified Data.Map as M
import Data.Maybe
import HN.Optimizer.ClassyLattice
import HN.Optimizer.Node
rewriteExitF :: (DefinitionNode -> f -> Maybe DefinitionNode) -> Node e x -> f -> Maybe (Graph Node e x)
rewriteExitF _ (Entry _) _ = Nothing
rewriteExitF rewriteDefinition (Exit n) f = mkLast . Exit <$> rewriteDefinition n f
rewriteExitB :: (DefinitionNode -> FactBase f -> Maybe DefinitionNode) -> Node e x -> Fact x f -> Maybe (Graph Node e x)
rewriteExitB _ (Entry _) _ = Nothing
rewriteExitB rf (Exit dn) f = mkLast . Exit <$> rf dn f
transferExitF :: (DefinitionNode -> f -> FactBase f) -> Node e x -> f -> Fact x f
transferExitF _ (Entry _) f = f
transferExitF tf (Exit n) f = tf n f
transferExitB :: (DefinitionNode -> FactBase f -> f) -> Node e x -> Fact x f -> f
transferExitB _ (Entry _) f = f
transferExitB tf (Exit n) f = tf n f
mergeFact label current base = let
update fact = (fact, M.insert label fact base)
in case M.lookup label base of
Nothing -> update current
Just baseFact -> case join (OldFact baseFact) (NewFact current) of
Nothing -> (baseFact, base)
Just newFact -> update newFact
transferMapExitB :: Lattice f => (DefinitionNode -> FactBase f -> f) -> Node e x -> Fact x (MapFact f) -> MapFact f
transferMapExitB _ (Entry l) (curFact, factBase) = mergeFact l curFact factBase
transferMapExitB tf (Exit dn) f = (tf dn (mapMap fst $ convertFactBase f), bot)
type MapFact f = (f, M.Map Label f)
transferMapExitF :: Lattice f => (DefinitionNode -> f -> [(Label, f)]) -> Node e x -> MapFact f -> Fact x (MapFact f)
transferMapExitF _ (Entry l) (curFact, factBase) = mergeFact l curFact factBase
transferMapExitF tf nn @ (Exit n) (f, m) = distributeFact nn $ (,) bot $ foldr (uncurry $ M.insertWith mereJoin) m $ tf n f
noTransferMapF :: Lattice f => FwdTransfer Node (MapFact f)
noTransferMapF = mkFTransfer $ transferMapExitF (\_ _ -> [])
noTransferMapB :: Lattice f => BwdTransfer Node (MapFact f)
noTransferMapB = mkBTransfer $ transferMapExitB (\_ _ -> bot)
convertFactBase :: Lattice f => FactBase (MapFact f) -> FactBase (MapFact f)
convertFactBase f = mapSquare $ foldr foo M.empty $ concatMap ff $ mapToList f where
ff (l, (f, m)) = (l, f) : M.toList m
foo (l, f) = M.insertWith mereJoin l f
mapSquare1 :: Lattice f => M.Map Label f -> Label -> MapFact f
mapSquare1 m l = (fromMaybe bot $ M.lookup l m, m)
mapSquare :: Lattice f => M.Map Label f -> FactBase (MapFact f)
mapSquare m = mapFromList $ map ff $ M.keys m where
ff l = (l, mapSquare1 m l)
instance Functor LabelMap where
fmap = mapMap
|
ingvar-lynn/HNC
|
HN/Optimizer/Utils.hs
|
lgpl-3.0
| 2,644 | 1 | 13 | 505 | 1,206 | 603 | 603 | 48 | 3 |
module SPrintIncAccum where
import Control.Monad.Trans.State
import Data.Monoid
sPrintIncAccum :: (Num a, Show a) => StateT a IO String
sPrintIncAccum =
StateT $
\a -> putStrLn ("Hi: " <> show a) >> return (show a, a + 1)
|
thewoolleyman/haskellbook
|
26/14/maor/SPrintIncAccum.hs
|
unlicense
| 234 | 0 | 11 | 49 | 90 | 49 | 41 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
-- Copyright 2014 (c) Diego Souza <[email protected]>
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Leela.Network.WarpServer
( WarpServer (logger)
, warpServer
, stopRouter
, newWarpServer
) where
import Data.IORef
import Data.Monoid ((<>))
import System.ZMQ4
import Leela.Logger
import Data.Foldable (toList)
import Control.Monad
import qualified Data.Sequence as Sq
import Leela.Data.LQL
import Leela.Data.Time
import Leela.HZMQ.Pipe
import Leela.Data.Graph
import Leela.Data.Types
import qualified Leela.Data.L2Map as M
import System.IO.Unsafe
import Control.Exception
import Leela.HZMQ.Router
import Control.Concurrent
import Leela.Data.Counter
import Leela.Data.QDevice
import Leela.Data.Timeout
import Leela.Data.Endpoint
import Leela.Storage.Graph
import Leela.Data.LQL.Read
import Leela.Data.LQL.Show
import qualified Leela.Storage.Passwd as P
import qualified Data.ByteString.Lazy as L
import Leela.Storage.Passwd
import Data.ByteString.Char8 (unpack)
import Leela.Data.TimeSeries
import Control.Concurrent.STM
import Leela.Network.Protocol
import Data.ByteString.Builder
import Leela.Network.GrepProtocol
import Data.Double.Conversion.ByteString
data WarpServer = WarpServer { logger :: Logger
, stat :: IORef [(String, [Endpoint])]
, passwd :: IORef P.Passwd
, warpGrep :: Pipe Push
, fdseq :: Counter FH
, tManager :: TimeoutManager
, fdlist :: M.L2Map L.ByteString FH (TVar (Handle, Time, QDevice Reply))
, checkpoint :: IORef (Time, FH)
}
data Stream a = Chunk a
| Error SomeException
| EOF
showDouble :: Double -> String
showDouble = unpack . toShortest
serverLimit :: Int
serverLimit = 128 * (unsafePerformIO getNumCapabilities)
useTimeoutInMs :: Num a => a
useTimeoutInMs = 90 * 1000
readPasswd :: WarpServer -> IO P.Passwd
readPasswd = readIORef . passwd
dumpStat :: WarpServer -> IO [(L.ByteString, L.ByteString)]
dumpStat core =
liftM (concatMap dumpEntry) $ readIORef (stat core)
where
dumpEntry (k, []) = [(showEndpoint k, "")]
dumpEntry (k, [e]) = [(showEndpoint k, dumpEndpoint e)]
dumpEntry (k, (e:es)) = (showEndpoint k, dumpEndpoint e) : dumpEntry (k, es)
showEndpoint k = toLazyByteString $ string7 "endpoint/" <> string7 k
newWarpServer :: Logger -> Pipe Push -> IORef [(String, [Endpoint])] -> IORef P.Passwd -> IO WarpServer
newWarpServer syslog pipe statdb secretdb = makeState
where
makeState = do
time <- now
liftM4 (WarpServer syslog statdb secretdb pipe)
newCounter timeoutManager M.empty (newIORef (time, 0))
makeCheckpoint :: WarpServer -> Time -> FH -> IO ()
makeCheckpoint srv t1 fh1 = do
new <- atomicModifyIORef' (checkpoint srv) (uncurry modify)
when new (notice (logger srv) (printf "creating checkpoint: %f:%d" (seconds t1) fh1))
where
elapsed t1 t0 = milliseconds $ diff t1 t0
modify t0 fh0 =
if (elapsed t1 t0 > useTimeoutInMs)
then ((t1, fh1), True)
else ((t0, fh0), False)
makeFD :: WarpServer -> User -> (Time -> Handle -> FH -> QDevice Reply -> IO ()) -> IO ()
makeFD srv (User u) cc = do
fd <- next (fdseq srv)
(active, th) <- open (tManager srv) (useTimeoutInMs * 1000) (closeFDTimeout srv (User u, fd))
if (active > serverLimit)
then do
purge th
warning (logger srv) (printf "REJECT %d" active)
else do
notice (logger srv) (printf "ACCEPT %d : %d" fd active)
dev <- qnew 16
time <- snapshot
val <- newTVarIO (th, time, dev)
M.insert u fd val (fdlist srv)
makeCheckpoint srv time fd
cc time th fd dev
withFD :: WarpServer -> (User, FH) -> (Maybe (QDevice Reply) -> IO ()) -> IO ()
withFD srv ((User u), fh) action = do
mvalue <- M.lookup u fh (fdlist srv)
case mvalue of
Nothing -> do
(_, fh0) <- readIORef (checkpoint srv)
when (fh >= fh0) (action Nothing)
Just tvar -> do
(th, _, dev) <- readTVarIO tvar
touch th
action $ Just dev
closeFDTimeout :: WarpServer -> (User, FH) -> IO ()
closeFDTimeout srv (u, fh) = do
warning (logger srv) (printf "TIMEOUT %d" fh)
closeFD srv (u, fh)
closeFD :: WarpServer -> (User, FH) -> IO ()
closeFD srv (User u, fh) = do
mvalue <- M.delete u fh (fdlist srv)
case mvalue of
Nothing -> return ()
Just value -> do
(th, t0, dev) <- readTVarIO value
t1 <- snapshot
purge th
qclose dev
notice (logger srv) (printf "CLOSE %d [%s ms]" fh (showDouble $ milliseconds (diff t1 t0)))
touchQWrite :: Handle -> QDevice a -> a -> IO ()
touchQWrite th q a = touch th >> qwrite q a
navigate :: (GraphBackend m) => m -> Handle -> QDevice Reply -> (Matcher, [(Bool, GUID -> Matcher)]) -> IO ()
navigate db thandle queue (source, pipeline) = do
srcpipe <- qlink 16 queue
forkSource srcpipe
dstpipe <- foldM forkFilter srcpipe pipeline
copy asReply dstpipe queue
where
two (_, b, c) = (c, b)
asReply (Chunk (feed, path))
| null feed && null path = (True, Nothing)
| null feed = (True, Just $ Item (Path path))
| otherwise = (True, Just $ Item (List $ map (Path . (: path) . two) feed))
asReply EOF = (False, Nothing)
asReply (Error e) = throw e
selectIf nilOk io xs
| nilOk || (not $ null xs) = io xs
| otherwise = return ()
runFilter nilOk srcpipe f dstpipe = do
mg <- qread srcpipe
case mg of
Nothing -> touch thandle
Just (Chunk ([], path)) -> do
unless (null path) (touchQWrite thandle queue $ Item $ Path path)
runFilter nilOk srcpipe f dstpipe
Just (Chunk (feed, path)) -> do
forM_ feed (\(_, b, c) ->
query db (selectIf nilOk $ touchQWrite thandle dstpipe . Chunk . (, (c, b) : path)) (f c))
runFilter nilOk srcpipe f dstpipe
Just chunk -> touchQWrite thandle dstpipe chunk
forkSource dstpipe =
forkFinally
(query db (selectIf False $ touchQWrite thandle dstpipe . Chunk . (, [])) source)
(onTerm dstpipe)
where
onTerm dstpipe (Left e) = qTryWrite dstpipe (Error e)
onTerm dstpipe _ = touchQWrite thandle dstpipe EOF
forkFilter srcpipe (nilOk, f) = do
dstpipe <- qlink 16 queue
forkFinally
(runFilter nilOk srcpipe f dstpipe)
(onTerm dstpipe)
return dstpipe
where
onTerm dstpipe (Left e) = qTryWrite dstpipe (Error e)
onTerm _ _ = return ()
evalLQL :: (GraphBackend m, AttrBackend m) => Context -> m -> WarpServer -> Handle -> QDevice Reply -> [LQL] -> IO ()
evalLQL _ _ _ thandle queue [] = touchQWrite thandle queue Last
evalLQL ctx db core thandle queue (x:xs) = do
case x of
PathStmt q ->
navigate db thandle queue q
TAttrListStmt g a ->
enumTAttrs db (touchQWrite thandle queue . Item . NAttrs g) g a
KAttrListStmt g a ->
enumKAttrs db (touchQWrite thandle queue . Item . NAttrs g) g a
KAttrGetStmt g a _ ->
getAttr db g a >>= touchQWrite thandle queue . Item . KAttr g a
TAttrGetStmt g a (Range t0 t1) pipeline -> do
loadTAttr db pipeline (onTimeseries (touchQWrite thandle queue . (Item . TAttr g a))) g a t0 t1
touchQWrite thandle queue (Item $ TAttr g a [])
TAttrLastStmt guid attr -> do
let toTAttr (g, a, t, v) = TAttr g a [(t, v)]
scanLast db guid attr (\value ->
case value of
Just [] -> return ()
Just xs -> touchQWrite thandle queue (Item $ List (map toTAttr xs))
Nothing -> touchQWrite thandle queue (Fail 500 (Just "error reading from storage")))
StatStmt -> do
state <- dumpStat core
touchQWrite thandle queue (Item $ Stat state)
AlterStmt journal
| Sq.length journal > 1000 ->
touchQWrite thandle queue (Fail 413 (Just $ printf "too many write requests [%d > 1000]" (Sq.length journal)))
| otherwise -> do
names <- exec db (toList journal)
mapM_ (\(u, t, k, n, g) -> touchQWrite thandle queue (Item $ Name u t k n g)) names
NameStmt _ guids -> do
names <- getName db (toList guids)
forM_ names (\(u, t, k, n, g) ->
touchQWrite thandle queue (Item $ Name u t k n g))
GUIDStmt user names -> do
guids <- getGUID db [(targetUser user, uTree user, k, n) | (k, n) <- toList names]
forM_ guids (\(u, t, k, n, g) ->
touchQWrite thandle queue (Item $ Name u t k n g))
GrepStmt u query -> do
t <- now
broadcast ctx 1000 (warpGrep core) [encodeEventMessage $ ControlMsg t query]
touchQWrite thandle queue (Item $ Name (uUser u) (uTree u) (Kind "lql/grep") (Node $ renderGrep query) (grepID query))
evalLQL ctx db core thandle queue xs
evalFinalizer :: Logger -> Time -> FH -> QDevice Reply -> Either SomeException () -> IO ()
evalFinalizer syslog t0 chan dev (Left e) = do
t <- liftM (`diff` t0) snapshot
warning syslog $ printf "FAILURE: %s %s [%s ms]" (show chan) (show e) (showDouble $ milliseconds t)
qTryWrite dev (encodeE e)
qclose dev
evalFinalizer syslog t0 chan dev (Right _) = do
t <- liftM (`diff` t0) snapshot
notice syslog $ printf "SUCCESS: %s [%s ms]" (show chan) (showDouble $ milliseconds t)
qclose dev
process :: (GraphBackend m, AttrBackend m) => Context -> m -> WarpServer -> Query -> (Reply -> IO ()) -> IO ()
process ctx storage srv (Begin sig msg) flush = void $ forkIO $ yield >>
case (chkloads (parseLQL $ sigUser sig) msg) of
Left _ -> flush $ Fail 400 (Just "syntax error")
Right stmts -> makeFD srv (sigUser sig) (\time thandle fh dev -> do
if (level (logger srv) >= NOTICE)
then notice (logger srv) (printf "BEGIN %s %d" (lqlDescr stmts) fh)
else info (logger srv) (printf "BEGIN %s %d" (show msg) fh)
flush $ Done fh
result <- try $ evalLQL ctx storage srv thandle dev stmts
evalFinalizer (logger srv) time fh dev result)
process _ _ srv (Fetch sig fh) flush = void $ forkIO $ do
let channel = (sigUser sig, fh)
notice (logger srv) (printf "FETCH %d" fh)
withFD srv channel $ \mdev ->
case mdev of
Nothing -> flush $ Fail 404 $ Just "no such channel"
Just dev -> qread dev >>= \msg ->
case msg of
Just r -> do
when (isEOF r) (closeFD srv (sigUser sig, fh))
flush r
Nothing -> do
closeFD srv (sigUser sig, fh)
flush Last
process _ _ srv (Close _ sig fh) flush = do
closeFD srv (sigUser sig, fh)
flush Last
warpServer :: (GraphBackend m, AttrBackend m) => WarpServer -> NominalDiffTime -> TimeCache -> Endpoint -> Context -> m -> IO RouterFH
warpServer core ttl tcache addr ctx storage =
startRouter (logger core) addr ctx (worker (logBackend storage handleGraphEvent handleAttrEvent) core ttl)
where
worker db core ttl = Worker f (return . encode . encodeE)
where
f msg flush = do
time <- readCache tcache
secretdb <- readPasswd core
case (decode (time, ttl) (readSecret secretdb) msg) of
Left e@(Fail c m) -> do
notice (logger core) (printf "FAIL %d %s" c (maybe "" id m))
flush (encode e)
Left e -> flush (encode e)
Right q -> process ctx db core q (flush . encode)
handleAttrEvent :: [AttrEvent] -> IO ()
handleAttrEvent e = do
t <- readCache tcache
ok <- push (warpGrep core) [encodeEventMessage $ AttrDataMsg t e]
unless ok $ debug (logger core) (printf "warpserver: dropping attr event [queue full]")
handleGraphEvent :: [GraphEvent] -> IO ()
handleGraphEvent e = do
t <- readCache tcache
ok <- push (warpGrep core) [encodeEventMessage $ GraphDataMsg t e]
unless ok $ debug (logger core) (printf "warpserver: dropping graph event [queue full]")
|
locaweb/leela
|
src/warpdrive/src/Leela/Network/WarpServer.hs
|
apache-2.0
| 13,327 | 0 | 24 | 4,072 | 4,804 | 2,414 | 2,390 | 276 | 13 |
{- | module: Network.Riak.CRDT
copyright: (c) 2016 Sentenai
author: Antonio Nikishaev <[email protected]>
license: Apache
CRDT operations
* Haskell-side
* Haskell values: 'Counter', 'Set' etc
* ADT for operations: 'CounterOp', 'SetOp' etc
* 'modify' to locally modify a value (matching riak-side behaviour)
* Riak-side
* 'get' to get a current value
* 'sendModify' to ask Riak to apply modifications
TL;DR example
>>> let c = Counter 41
>>> let op = CounterInc 1
>>> modify op c
Counter 42
>>> get conn "counters" "bucket" "key"
Just (DTCounter (Counter 41))
>>> sendModify conn "counters" "bucket" "key" [op] >> get conn "counters" "bucket" "key"
Just (DTCounter (Counter 42))
-}
{-# LANGUAGE TypeFamilies, OverloadedStrings, ScopedTypeVariables, PatternGuards #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
module Network.Riak.CRDT (module Network.Riak.CRDT.Types,
get,
CRDT(..))
where
import Data.Default.Class
import qualified Data.Map as M
import Data.Proxy
import Data.Semigroup
import qualified Data.Set as S
import Network.Riak.CRDT.Ops
import Network.Riak.CRDT.Riak
import Network.Riak.CRDT.Types
import Network.Riak.Types
-- | Modify a counter by applying operations ops
modifyCounter :: CounterOp -> Counter -> Counter
modifyCounter op c = c <> Counter i
where CounterInc i = op
-- | Modify a set by applying operations ops
modifySet :: SetOp -> Set -> Set
modifySet op (Set c) = Set (c `S.union` adds S.\\ rems)
where SetOpsComb adds rems = toOpsComb op
modifyMap :: MapOp -> Map -> Map
modifyMap (MapRemove field) (Map mc) = Map $ M.delete field mc
modifyMap (MapUpdate path op) m = modifyMap1 path op m
modifyMap1 :: MapPath -> MapValueOp -> Map -> Map
modifyMap1 (MapPath (e :| [])) op m = modMap mf op m
where mf = MapField (mapEntryTag op) e
modifyMap1 (MapPath (e :| (r:rs))) op (Map m')
= Map $ M.alter (Just . f) (MapField MapMapTag e) m'
where f :: Maybe MapEntry -> MapEntry
f Nothing = f (Just $ MapMap def)
f (Just (MapMap m)) = MapMap . modifyMap1 (MapPath (r :| rs)) op $ m
f (Just z) = z
modMap :: MapField -> MapValueOp -> Map -> Map
modMap ix op (Map m) = Map $ M.alter (Just . modifyMapValue op) ix m
modifyMapValue :: MapValueOp -> Maybe MapEntry -> MapEntry
modifyMapValue (MapSetOp op) = modifyEntry (Proxy :: Proxy Set) op
modifyMapValue (MapCounterOp op) = modifyEntry (Proxy :: Proxy Counter) op
modifyMapValue (MapMapOp op) = modifyEntry (Proxy :: Proxy Map) op
modifyMapValue (MapFlagOp op) = modifyEntry (Proxy :: Proxy Flag) op
modifyMapValue (MapRegisterOp op) = modifyEntry (Proxy :: Proxy Register) op
modifyFlag :: FlagOp -> Flag -> Flag
modifyFlag (FlagSet x) = const (Flag x)
modifyRegister :: RegisterOp -> Register -> Register
modifyRegister (RegisterSet x) = const (Register x)
-- | Types that can be held inside 'Map'
class Default a => MapCRDT a where
type MapOperation_ a :: *
mapModify :: MapOperation_ a -> a -> a
-- | modify a maybe-absent 'MapEntry'
modifyEntry :: Proxy a -> MapOperation_ a -> Maybe MapEntry -> MapEntry
modifyEntry _ op Nothing = toEntry . mapModify op $ (def :: a)
modifyEntry _ op (Just e) | Just v <- fromEntry e = toEntry . mapModify op $ (v :: a)
| otherwise = e
toEntry :: a -> MapEntry
fromEntry :: MapEntry -> Maybe a
instance MapCRDT Flag where
type MapOperation_ Flag = FlagOp
mapModify = modifyFlag
fromEntry (MapFlag f) = Just f
fromEntry _ = Nothing
toEntry = MapFlag
instance MapCRDT Set where
type MapOperation_ Set = SetOp
mapModify = modify
fromEntry (MapSet s) = Just s
fromEntry _ = Nothing
toEntry = MapSet
instance MapCRDT Counter where
type MapOperation_ Counter = CounterOp
mapModify = modify
fromEntry (MapCounter s) = Just s
fromEntry _ = Nothing
toEntry = MapCounter
instance MapCRDT Register where
type MapOperation_ Register = RegisterOp
mapModify = modifyRegister
fromEntry (MapRegister s) = Just s
fromEntry _ = Nothing
toEntry = MapRegister
instance MapCRDT Map where
type MapOperation_ Map = MapOp
mapModify = modify
fromEntry (MapMap s) = Just s
fromEntry _ = Nothing
toEntry = MapMap
-- | CRDT types
class MapCRDT a => CRDT a op | a -> op, op -> a where
-- | Modify a value by applying an operation
modify :: op -> a -> a
-- | Request riak a modification
sendModify :: Connection
-> BucketType -> Bucket -> Key
-> [op] -> IO ()
instance CRDT Counter CounterOp where
modify = modifyCounter
sendModify = counterSendUpdate
instance CRDT Set SetOp where
modify = modifySet
sendModify = setSendUpdate
instance CRDT Map MapOp where
modify = modifyMap
sendModify = mapSendUpdate
|
k-bx/riak-haskell-client
|
src/Network/Riak/CRDT.hs
|
apache-2.0
| 4,959 | 0 | 13 | 1,205 | 1,359 | 711 | 648 | 97 | 3 |
module BloomFilter where
import Data.Array.ST (STUArray, runSTUArray)
import Data.Array.Unboxed (UArray)
import Data.Word
import Control.Monad (liftM)
import Control.Monad.ST (ST, runST)
import qualified Data.Array.Base as ST
import Data.Array.MArray (getBounds, newArray, readArray, writeArray)
import Prelude hiding (elem, length, notElem)
import Murmurhash
data Bloom a = B {
blmHash :: a -> [Word32]
, blmArray :: UArray Word32 Bool
}
data MutBloom s a = MB {
mutHash :: a -> [Word32]
, mutArray :: STUArray s Word32 Bool
}
create :: (a -> [Word32]) -- ^ hash functions
-> Int -- ^ num bits
-> Bloom a
create = undefined
new :: (a -> [Word32]) -> Word32 -> ST s (MutBloom s a)
new hash numBits = MB hash `liftM` newArray (0, numBits - 1) False
length :: MutBloom s a -> ST s Word32
length bf = (succ . snd) `liftM` getBounds (mutArray bf)
insert :: MutBloom s a -> a -> ST s ()
insert bf elt = indicies bf elt >>= mapM_ (\bit -> writeArray (mutArray bf) bit True)
indicies :: MutBloom s a -> a -> ST s [Word32]
indicies bf elt = do
modulus <- length bf
return $ map (`mod` modulus) (mutHash bf elt)
elem, notElem :: a -> MutBloom s a -> ST s Bool
elem elt bf = indicies bf elt >>= allM (readArray (mutArray bf))
notElem elt bf = not `liftM` elem elt bf
allM :: Monad m => (a -> m Bool) -> [a] -> m Bool
allM p (x : xs) = do
ok <- p x
if ok then allM p xs else return False
allM _ [] = return True
fromList :: (a -> [Word32]) -> Word32 -> [a] -> Bloom a
fromList hash numBits values = runST $ do
mb <- new hash numBits
mapM_ (insert mb) values
B hash `liftM` ST.unsafeFreeze (mutArray mb)
|
songpp/my-haskell-playground
|
src/BloomFilter.hs
|
apache-2.0
| 1,703 | 0 | 11 | 416 | 743 | 396 | 347 | 43 | 2 |
module Propellor.CmdLine (
defaultMain,
processCmdLine,
) where
import System.Environment (getArgs)
import Data.List
import System.Exit
import System.PosixCompat
import Network.Socket
import Propellor.Base
import Propellor.Gpg
import Propellor.Git
import Propellor.Git.VerifiedBranch
import Propellor.Bootstrap
import Propellor.Spin
import Propellor.Types.CmdLine
import qualified Propellor.Property.Docker as Docker
import qualified Propellor.Property.Chroot as Chroot
import qualified Propellor.Shim as Shim
import Utility.FileSystemEncoding
usage :: Handle -> IO ()
usage h = hPutStrLn h $ unlines
[ "Usage:"
, " with no arguments, provision the current host"
, ""
, " --init"
, " initialize ~/.propellor"
, " hostname"
, " provision the current host as if it had the specified hostname"
, " --spin targethost [--via relayhost]"
, " provision the specified host"
, " --build"
, " recompile using your current config"
, " --add-key keyid"
, " add an additional signing key to the private data"
, " --rm-key keyid"
, " remove a signing key from the private data"
, " --list-fields"
, " list private data fields"
, " --set field context"
, " set a private data field"
, " --unset field context"
, " clear a private data field"
, " --unset-unused"
, " clear unused fields from the private data"
, " --dump field context"
, " show the content of a private data field"
, " --edit field context"
, " edit the content of a private data field"
, " --merge"
, " combine multiple spins into a single git commit"
, " --check"
, " double-check that propellor can actually run here"]
usageError :: [String] -> IO a
usageError ps = do
usage stderr
error ("(Unexpected: " ++ show ps)
processCmdLine :: IO CmdLine
processCmdLine = go =<< getArgs
where
go ("--check":_) = return Check
go ("--spin":ps) = case reverse ps of
(r:"--via":hs) -> Spin
<$> mapM hostname (reverse hs)
<*> pure (Just r)
_ -> Spin <$> mapM hostname ps <*> pure Nothing
go ("--build":[]) = return Build
go ("--add-key":k:[]) = return $ AddKey k
go ("--rm-key":k:[]) = return $ RmKey k
go ("--set":f:c:[]) = withprivfield f c Set
go ("--unset":f:c:[]) = withprivfield f c Unset
go ("--unset-unused":[]) = return UnsetUnused
go ("--dump":f:c:[]) = withprivfield f c Dump
go ("--edit":f:c:[]) = withprivfield f c Edit
go ("--list-fields":[]) = return ListFields
go ("--merge":[]) = return Merge
go ("--help":_) = do
usage stdout
exitFailure
go ("--boot":_:[]) = return $ Update Nothing -- for back-compat
go ("--serialized":s:[]) = serialized Serialized s
go ("--continue":s:[]) = serialized Continue s
go ("--gitpush":fin:fout:_) = return $ GitPush (Prelude.read fin) (Prelude.read fout)
go ("--run":h:[]) = go [h]
go (h:[])
| "--" `isPrefixOf` h = usageError [h]
| otherwise = Run <$> hostname h
go [] = do
s <- takeWhile (/= '\n') <$> readProcess "hostname" ["-f"]
if null s
then errorMessage "Cannot determine hostname! Pass it on the command line."
else return $ Run s
go v = usageError v
withprivfield s c f = case readish s of
Just pf -> return $ f pf (Context c)
Nothing -> errorMessage $ "Unknown privdata field " ++ s
serialized mk s = case readish s of
Just cmdline -> return $ mk cmdline
Nothing -> errorMessage $ "serialization failure (" ++ s ++ ")"
data CanRebuild = CanRebuild | NoRebuild
-- | Runs propellor on hosts, as controlled by command-line options.
defaultMain :: [Host] -> IO ()
defaultMain hostlist = withConcurrentOutput $ do
useFileSystemEncoding
Shim.cleanEnv
checkDebugMode
cmdline <- processCmdLine
debug ["command line: ", show cmdline]
go CanRebuild cmdline
where
go cr (Serialized cmdline) = go cr cmdline
go _ Check = return ()
go cr Build = buildFirst Nothing cr Build $ return ()
go _ (Set field context) = setPrivData field context
go _ (Unset field context) = unsetPrivData field context
go _ (UnsetUnused) = unsetPrivDataUnused hostlist
go _ (Dump field context) = dumpPrivData field context
go _ (Edit field context) = editPrivData field context
go _ ListFields = listPrivDataFields hostlist
go _ (AddKey keyid) = addKey keyid
go _ (RmKey keyid) = rmKey keyid
go _ c@(ChrootChain _ _ _ _) = Chroot.chain hostlist c
go _ (DockerChain hn cid) = Docker.chain hostlist hn cid
go _ (DockerInit hn) = Docker.init hn
go _ (GitPush fin fout) = gitPushHelper fin fout
go cr (Relay h) = forceConsole >>
updateFirst Nothing cr (Update (Just h)) (update (Just h))
go _ (Update Nothing) = forceConsole >>
fetchFirst (onlyprocess (update Nothing))
go _ (Update (Just h)) = update (Just h)
go _ Merge = mergeSpin
go cr cmdline@(Spin hs mrelay) = buildFirst Nothing cr cmdline $ do
unless (isJust mrelay) commitSpin
forM_ hs $ \hn -> withhost hn $ spin mrelay hn
go cr cmdline@(Run hn) = ifM ((==) 0 <$> getRealUserID)
( updateFirst (findHost hostlist hn) cr cmdline $ runhost hn
, fetchFirst $ go cr (Spin [hn] Nothing)
)
go cr cmdline@(SimpleRun hn) = forceConsole >>
fetchFirst (buildFirst (findHost hostlist hn) cr cmdline (runhost hn))
-- When continuing after a rebuild, don't want to rebuild again.
go _ (Continue cmdline) = go NoRebuild cmdline
withhost :: HostName -> (Host -> IO ()) -> IO ()
withhost hn a = maybe (unknownhost hn hostlist) a (findHost hostlist hn)
runhost hn = onlyprocess $ withhost hn mainProperties
onlyprocess = onlyProcess (localdir </> ".lock")
unknownhost :: HostName -> [Host] -> IO a
unknownhost h hosts = errorMessage $ unlines
[ "Propellor does not know about host: " ++ h
, "(Perhaps you should specify the real hostname on the command line?)"
, "(Or, edit propellor's config.hs to configure this host)"
, "Known hosts: " ++ unwords (map hostName hosts)
]
-- Builds propellor (when allowed) and if it looks like a new binary,
-- re-execs it to continue.
-- Otherwise, runs the IO action to continue.
--
-- The Host should only be provided when dependencies should be installed
-- as needed to build propellor.
buildFirst :: Maybe Host -> CanRebuild -> CmdLine -> IO () -> IO ()
buildFirst h CanRebuild cmdline next = do
oldtime <- getmtime
buildPropellor h
newtime <- getmtime
if newtime == oldtime
then next
else continueAfterBuild cmdline
where
getmtime = catchMaybeIO $ getModificationTime "propellor"
buildFirst _ NoRebuild _ next = next
continueAfterBuild :: CmdLine -> IO a
continueAfterBuild cmdline = go =<< boolSystem "./propellor"
[ Param "--continue"
, Param (show cmdline)
]
where
go True = exitSuccess
go False = exitWith (ExitFailure 1)
fetchFirst :: IO () -> IO ()
fetchFirst next = do
whenM hasOrigin $
void fetchOrigin
next
updateFirst :: Maybe Host -> CanRebuild -> CmdLine -> IO () -> IO ()
updateFirst h canrebuild cmdline next = ifM hasOrigin
( updateFirst' h canrebuild cmdline next
, next
)
-- If changes can be fetched from origin, Builds propellor (when allowed)
-- and re-execs the updated propellor binary to continue.
-- Otherwise, runs the IO action to continue.
updateFirst' :: Maybe Host -> CanRebuild -> CmdLine -> IO () -> IO ()
updateFirst' h CanRebuild cmdline next = ifM fetchOrigin
( do
buildPropellor h
continueAfterBuild cmdline
, next
)
updateFirst' _ NoRebuild _ next = next
-- Gets the fully qualified domain name, given a string that might be
-- a short name to look up in the DNS.
hostname :: String -> IO HostName
hostname s = go =<< catchDefaultIO [] dnslookup
where
dnslookup = getAddrInfo (Just canonname) (Just s) Nothing
canonname = defaultHints { addrFlags = [AI_CANONNAME] }
go (AddrInfo { addrCanonName = Just v } : _) = pure v
go _
| "." `isInfixOf` s = pure s -- assume it's a fqdn
| otherwise =
error $ "cannot find host " ++ s ++ " in the DNS"
|
ArchiveTeam/glowing-computing-machine
|
src/Propellor/CmdLine.hs
|
bsd-2-clause
| 7,824 | 44 | 14 | 1,562 | 2,599 | 1,310 | 1,289 | 187 | 25 |
module Spire.Canonical.Embedder where
import Control.Applicative
import Data.Monoid (mempty)
import Unbound.LocallyNameless hiding ( Spine )
import Spire.Canonical.Types
import Spire.Expression.Types
import qualified Spire.Canonical.Builtins as B
----------------------------------------------------------------------
embedV :: Value -> FreshM Check
embedV VTT = return $ cVar B.tt
embedV VTrue = return $ cVar B.true
embedV VFalse = return $ cVar B.false
embedV VNil = return $ cVar B.nil
embedV VEmp = return $ cVar B._Emp
embedV VUnit = return $ cVar B._Unit
embedV VBool = return $ cVar B._Bool
embedV VString = return $ cVar B._String
embedV VEnum = return $ cVar B._Enum
embedV VTel = return $ cVar B._Tel
embedV VType = return $ cVar B._Type
embedV VRefl = return $ CRefl
embedV VHere = return $ CHere
embedV (VQuotes s) = return $ Infer (IQuotes s)
embedV (VThere t) = CThere <$> embedV t
embedV (VEnd i) = CEnd <$> embedV i
embedV (VTag _E) = Infer <$> (IApp (iVar B._Tag) <$> embedV _E)
embedV (VDesc _I) = Infer <$> (IApp (iVar B._Desc) <$> embedV _I)
embedV (VSg _A _B) = Infer <$> (ISg <$> embedV _A <*> embedVB _B)
embedV (VPi _A _B) = Infer <$> (IPi <$> embedV _A <*> embedVB _B)
embedV (VEq _A a _B b) = Infer <$>
(IEq <$> (IAnn <$> embedV a <*> embedV _A) <*> (IAnn <$> embedV b <*> embedV _B))
embedV (VFix l _P _I _D p i) = Infer <$>
iApps (iVar B._Fix) <$> sequence
[ embedV l
, embedV _P
, embedV _I
, embedV _D
, embedV p
, embedV i
]
embedV (VRec i _D) = CRec <$> embedV i <*> embedV _D
embedV (VInit xs) = CInit <$> embedV xs
embedV (VArg _A _B) = CArg <$> embedV _A <*> embedVB _B
embedV (VCons x xs) = Infer <$>
iApps (iVar B.cons) <$> sequence [embedV x , embedV xs]
embedV (VExt _A _B) = Infer <$>
iApps (iVar B._Ext) <$> sequence [ embedV _A , embedVF _B ]
embedV (VPair a b) = CPair <$> embedV a <*> embedV b
embedV (VLam b) = CLam <$> embedVB b
embedV (VNeut nm fs) = Infer <$> embedN nm fs
----------------------------------------------------------------------
embedN :: Nom -> Spine -> FreshM Infer
embedN nm Id = return $ IVar nm
embedN nm (Pipe fs (EApp a)) = IApp <$> embedN nm fs <*> embedV a
embedN nm (Pipe fs (EFunc _I _X i)) =
iApps (iVar B._Func) <$> sequence
[ embedV _I
, Infer <$> embedN nm fs
, embedVF _X
, embedV i
]
embedN nm (Pipe fs (EHyps _I _X _M i xs)) =
iApps (iVar B._Hyps) <$> sequence
[ embedV _I
, Infer <$> embedN nm fs
, embedVF _X
, embedVF2 _M
, embedV i
, embedV xs
]
embedN nm (Pipe fs (EProve _I _X _M m i xs)) =
iApps (iVar B.prove) <$> sequence
[ embedV _I
, Infer <$> embedN nm fs
, embedVF _X
, embedVF2 _M
, embedVF2 m
, embedV i
, embedV xs
]
embedN nm (Pipe fs (EInd l _P _I _D p _M m i)) =
iApps (iVar B.ind) <$> sequence
[ embedV l
, embedV _P
, embedV _I
, embedV _D
, embedV p
, embedVF2 _M
, embedVF3 m
, embedV i
, Infer <$> embedN nm fs
]
embedN nm (Pipe fs (EElimUnit _P ptt)) =
iApps (iVar B.elimUnit) <$> sequence
[ embedVF _P
, embedV ptt
, Infer <$> embedN nm fs
]
embedN nm (Pipe fs (EElimBool _P pt pf)) =
iApps (iVar B.elimBool) <$> sequence
[ embedVF _P
, embedV pt
, embedV pf
, Infer <$> embedN nm fs
]
embedN nm (Pipe fs (EElimPair _A _B _P ppair)) =
iApps (iVar B.elimPair) <$> sequence
[ embedV _A
, embedVF _B
, embedVF _P
, embedVF2 ppair
, Infer <$> embedN nm fs
]
embedN nm (Pipe fs (EElimEq _A x _P prefl y)) =
iApps (iVar B.elimEq) <$> sequence
[ embedV _A
, embedV x
, embedVF2 _P
, embedV prefl
, embedV y
, Infer <$> embedN nm fs
]
embedN nm (Pipe fs (EElimEnum _P pn pc)) =
iApps (iVar B.elimEnum) <$> sequence
[ embedVF _P
, embedV pn
, embedVF3 pc
, Infer <$> embedN nm fs
]
embedN nm (Pipe fs (EElimTel _P pemp pext)) =
iApps (iVar B.elimTel) <$> sequence
[ embedVF _P
, embedV pemp
, embedVF3 pext
, Infer <$> embedN nm fs
]
embedN nm (Pipe fs (EElimDesc _I _P pend prec parg)) =
iApps (iVar B.elimDesc) <$> sequence
[ embedV _I
, embedVF _P
, embedVF pend
, embedVF3 prec
, embedVF3 parg
, Infer <$> embedN nm fs
]
embedN nm (Pipe fs (EBranches _P)) =
iApps (iVar B._Branches) <$> sequence
[ Infer <$> embedN nm fs
, embedVF _P
]
embedN nm (Pipe fs (ECase _E _P cs)) =
iApps (iVar B._case) <$> sequence
[ embedV _E
, embedVF _P
, embedV cs
, Infer <$> embedN nm fs
]
----------------------------------------------------------------------
embedVF :: Bind Nom Value -> FreshM Check
embedVF bnd = CLam <$> embedVB bnd
embedVF2 :: Bind Nom2 Value -> FreshM Check
embedVF2 bnd_a = do
((nm_x , nm_y) , a) <- unbind bnd_a
a' <- embedV a
return $
CLam $ bind nm_x $
CLam $ bind nm_y $
a'
embedVF3 :: Bind Nom3 Value -> FreshM Check
embedVF3 bnd_a = do
((nm_x , nm_y , nm_z) , a) <- unbind bnd_a
a' <- embedV a
return $
CLam $ bind nm_x $
CLam $ bind nm_y $
CLam $ bind nm_z $
a'
embedVB :: Alpha a => Bind a Value -> FreshM (Bind a Check)
embedVB bnd_a = do
(nm , a) <- unbind bnd_a
a' <- embedV a
return $ bind nm a'
embedVDef :: VDef -> FreshM CDef
embedVDef (VDef nm a _A) = CDef nm <$> embedV a <*> embedV _A
----------------------------------------------------------------------
|
spire/spire
|
src/Spire/Canonical/Embedder.hs
|
bsd-3-clause
| 5,829 | 0 | 13 | 1,819 | 2,410 | 1,175 | 1,235 | 167 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Numeral.FR.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Numeral.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale FR Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (NumeralValue 0)
[ "0"
, "zero"
, "zéro"
]
, examples (NumeralValue 1)
[ "1"
, "un"
, "une"
]
, examples (NumeralValue 11)
[ "onze"
]
, examples (NumeralValue 17)
[ "dix sept"
, "dix-sept"
]
, examples (NumeralValue 21)
[ "vingt et un"
, "vingt-et-un"
]
, examples (NumeralValue 23)
[ "vingt trois"
, "vingt-trois"
]
, examples (NumeralValue 70)
[ "soixante dix"
]
, examples (NumeralValue 71)
[ "soixante onze"
]
, examples (NumeralValue 78)
[ "soixante dix huit"
]
, examples (NumeralValue 73)
[ "soixante treize"
]
, examples (NumeralValue 80)
[ "quatre vingt"
]
, examples (NumeralValue 81)
[ "quatre vingt un"
]
, examples (NumeralValue 82)
[ "quatre vingt deux"
]
, examples (NumeralValue 90)
[ "quatre vingt dix"
]
, examples (NumeralValue 91)
[ "quatre vingt onze"
]
, examples (NumeralValue 92)
[ "quatre vingt douze"
]
, examples (NumeralValue 99)
[ "quatre vingt dix neuf"
]
, examples (NumeralValue 33)
[ "33"
, "trente trois"
, "trente-trois"
, "trente 3"
]
, examples (NumeralValue 118)
[ "cent dix-huit"
]
, examples (NumeralValue 4020)
[ "quatre mille vingt"
]
, examples (NumeralValue 100000)
[ "100.000"
, "100000"
, "100K"
, "100k"
, "cent mille"
, "100 000"
]
, examples (NumeralValue 3000000)
[ "3M"
, "3000K"
, "3000000"
, "3.000.000"
, "trois millions"
]
, examples (NumeralValue 1200000)
[ "1.200.000"
, "1200000"
, "1,2M"
, "1200K"
, ",0012G"
, "un million deux cent mille"
]
, examples (NumeralValue (-1200000))
[ "- 1.200.000"
, "-1200000"
, "moins 1200000"
, "-1,2M"
, "-1200K"
, "-,0012G"
]
, examples (NumeralValue 6.7)
[ "6,7"
]
, examples (NumeralValue 6700.54)
[ "6.700,54"
, "6 700,54"
]
]
|
facebookincubator/duckling
|
Duckling/Numeral/FR/Corpus.hs
|
bsd-3-clause
| 3,344 | 0 | 11 | 1,488 | 634 | 359 | 275 | 95 | 1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.SGIX.VertexPreclip
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.SGIX.VertexPreclip (
-- * Extension Support
glGetSGIXVertexPreclip,
gl_SGIX_vertex_preclip,
-- * Enums
pattern GL_VERTEX_PRECLIP_HINT_SGIX,
pattern GL_VERTEX_PRECLIP_SGIX
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/SGIX/VertexPreclip.hs
|
bsd-3-clause
| 696 | 0 | 5 | 95 | 52 | 39 | 13 | 8 | 0 |
{- Binary-experiment test -}
{-# LANGUAGE PackageImports ,FlexibleInstances ,MultiParamTypeClasses#-}
module PkgBE(BEPkg(..)) where
import Data.Word
import Control.Exception
import Data.Monoid
import Control.Applicative
import qualified "binary-experiment" Data.Binary.Serialize as BE (serialize,deserializeOrFail)
import "binary-experiment" Data.Binary.Serialize
-- import "binary-experiment-0.1.0.0" Data.Binary.Serialize.Class -- as BE -- hiding (encode,decode)
import "binary-experiment" Data.Binary.Serialize.Encode
import "binary-experiment" Data.Binary.Serialize.Decode
import Types
import Test.Data
-- t = let Encoding e = BE.encode (33::Int32) in e OutStreamEnd
data BEPkg a = BEPkg a deriving (Eq,Show)
instance Arbitrary a => Arbitrary (BEPkg a) where arbitrary = fmap BEPkg arbitrary
{-
instance Binary a => Serialize (BEPkg a) where
serialize (BEPkg a) = BE.serialize $ a
deserialize = either (Left . toException) (Right . BEPkg) . BE.deserializeOrFail
-}
instance Binary a => Serialize BEPkg a where
serialize (BEPkg a) =BE.serialize a
deserialize = either (Left . toException) (Right . BEPkg) . BE.deserializeOrFail
pkg = BEPkg
unpkg (BEPkg a) = a
instance Binary a => Binary (List a) where
encode (N) = encodeCtr0 1
encode (C v l) = encodeCtr2 2 v l
decode = do
(t,l) <- decodeCtrTag
case t of
1 -> decodeCtrBody0 l N
2 -> decodeCtrBody2 l C
instance Binary a => Binary (Tree a) where
encode (Leaf a) = encodeCtr1 1 a
encode (Node n1 n2) = encodeCtr2 2 n1 n2
decode = do
(t,l) <- decodeCtrTag
case t of
1 -> decodeCtrBody1 l Leaf
2 -> decodeCtrBody2 l Node
{-# INLINE encodeCtr0 #-}
{-# INLINE encodeCtr1 #-}
{-# INLINE encodeCtr2 #-}
{-# INLINE encodeCtr3 #-}
{-# INLINE encodeCtr4 #-}
{-# INLINE encodeCtr6 #-}
{-# INLINE encodeCtr7 #-}
instance Binary N where
-- 71 us
encode One = word 0
encode Two = word 1
encode Three = word 2
encode Four = word 3
encode Five = word 4
-- 77 us
-- encode n = int (fromEnum n)
-- 87 us
-- encode = int . fromEnum
decode = toEnum <$> expectInt
-- correct?
instance Binary () where
encode _ = word 0
decode = const () <$> expectInt
encodeCtr0 :: Word -> Encoding
encodeCtr1 :: Binary a => Word -> a -> Encoding
encodeCtr2 :: (Binary a, Binary b) => Word -> a -> b -> Encoding
encodeCtr0 n = beginListLen 1 <> encode (n :: Word)
encodeCtr1 n a = beginListLen 2 <> encode (n :: Word) <> encode a
encodeCtr2 n a b = beginListLen 3 <> encode (n :: Word) <> encode a <> encode b
encodeCtr3 n a b c
= beginListLen 4 <> encode (n :: Word) <> encode a <> encode b
<> encode c
encodeCtr4 n a b c d
= beginListLen 5 <> encode (n :: Word) <> encode a <> encode b
<> encode c <> encode d
encodeCtr6 n a b c d e f
= beginListLen 7 <> encode (n :: Word) <> encode a <> encode b
<> encode c <> encode d <> encode e <> encode f
encodeCtr7 n a b c d e f g
= beginListLen 8 <> encode (n :: Word) <> encode a <> encode b
<> encode c <> encode d <> encode e <> encode f
<> encode g
{-# INLINE decodeCtrTag #-}
{-# INLINE decodeCtrBody0 #-}
{-# INLINE decodeCtrBody1 #-}
{-# INLINE decodeCtrBody2 #-}
{-# INLINE decodeCtrBody3 #-}
decodeCtrTag = (\len tag -> (tag, len)) <$> expectListLen <*> expectTag
decodeCtrBody0 1 f = pure f
decodeCtrBody1 2 f = do x1 <- decode
return (f x1)
decodeCtrBody2 3 f = do x1 <- decode
x2 <- decode
return (f x1 x2)
decodeCtrBody3 4 f = do x1 <- decode
x2 <- decode
x3 <- decode
return (f x1 x2 x3)
|
tittoassini/flat
|
benchmarks/PkgBE.hs
|
bsd-3-clause
| 3,840 | 0 | 14 | 1,077 | 1,156 | 586 | 570 | 87 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.OpenRTB where
import Control.Applicative
import Data.Aeson
import Data.Text as T
import Data.Word
import Data.Int
data BidRequestTopLevel = BRTL {
_bidID :: (T.Text) -- alwayas <= 40 chars long
,_bidAuctionType ::( Maybe Word8 )
,_bidTimeOutMax :: (Maybe Word32)
,_bidImpressionLIst :: [BidImpressionObject]
}
data BidImpressionObject = BIMPO {
_bimpID :: T.Text
,_bimpSeat :: () --- stub, dont lookup this field for ow
,_bimpHeight :: Int32
,_bimpWidth :: Int32
,_bimpPosn :: Int32
,_bimpIsInterSitch :: Maybe Bool --- encoded as 0,1 values, not always there
}
deriving (Eq,Show,Read)
instance FromJSON BidImpressionObject where
parseJSON (Object kv)= BIMPO <$>
kv .: "impid" <*>
pure () <*> -- fixme later
kv .: "h" <*>
kv .: "w" <*>
kv .: "pos" <*>
( sanitizeStitial <$> kv .:? "instl")
where
sanitizeStitial:: Maybe Int -> Maybe Bool
sanitizeStitial Nothing = Nothing
sanitizeStitial (Just i) | i == 1 = Just True
| i== 0 = Just False
| otherwise = Nothing
|
cartazio/hopenRTB
|
src/Data/OpenRTB.hs
|
bsd-3-clause
| 1,421 | 0 | 15 | 570 | 313 | 172 | 141 | 33 | 0 |
module Tonque.Util where
import Codec.Text.IConv (Fuzzy (..), convertFuzzy)
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as BSL
import Data.Convertible (safeConvert)
import Data.Monoid ((<>))
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as TL
import Data.Text.Lazy.Encoding (decodeUtf8, encodeUtf8)
import Data.Time (UTCTime)
import Data.Time.Format (formatTime)
import qualified Network.HTTP as H
import Network.URI (parseURI)
import System.Locale (defaultTimeLocale)
import Tonque.Type
textShow :: Show a => a -> Text
textShow = TL.pack . show
epochToUTC :: EpochTime -> UTCTime
epochToUTC t = case safeConvert t of
Left err -> error $ show err
Right res -> res
timeFormat :: UTCTime -> Text
timeFormat = TL.pack . formatTime defaultTimeLocale "%F %T"
request :: Text -> IO Text
request url = do
flip (maybe $ error $ TL.unpack $ "Invalid URL: " <> url) uriM $ \uri -> do
res <- H.simpleHTTP $ H.Request uri H.GET [] BSL.empty
H.getResponseBody res >>= return . toUTF8
where
uriM = parseURI $ TL.unpack url
toUTF8 :: ByteString -> Text
toUTF8 = decodeUtf8 . convertFuzzy Discard "SJIS" "UTF-8"
toSJIS :: Text -> ByteString
toSJIS = convertFuzzy Discard "UTF-8" "SJIS" . encodeUtf8
|
daimatz/Tonque
|
src/Tonque/Util.hs
|
bsd-3-clause
| 1,492 | 0 | 14 | 436 | 426 | 236 | 190 | 33 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.Monad (filterM, when)
import Data.Char (isAscii, isPrint)
import Data.FileEmbed (embedStringFile)
import Data.List.Split (splitOn)
import qualified Data.Text as T
import Data.Word (Word8)
import System.Console.CmdArgs (Data, Typeable, args, cmdArgs, def,
details, help, name, program, summary,
typ, (&=))
import System.Directory (doesFileExist)
import System.Random (randomRIO)
import Text.Wrap (WrapSettings (..), wrapText)
import UI (run)
data Config =
Config
{ fg_empty :: Word8
, fg_error :: Word8
, files :: [FilePath]
, height :: Int
, max_paragraph_len :: Int
, min_paragraph_len :: Int
, nonsense_len :: Int
, paragraph :: Bool
, reflow_ :: Bool
, tab :: Int
, width :: Int
}
deriving (Show, Data, Typeable)
toAscii :: Int -> String -> String
toAscii tabWidth = concatMap toAscii'
where
toAscii' c
| c == '\t' = replicate tabWidth ' '
| c == '‘' || c == '’' = "'"
| c == '“' || c == '”' = "\""
| c == '–' || c == '—' = "-"
| c == '…' = "..."
| isAscii c && (isPrint c || c == '\n') = [c]
| otherwise = ""
trimEmptyLines :: String -> String
trimEmptyLines = (++ "\n") . f . f
where
f = reverse . dropWhile (== '\n')
config :: Config
config =
Config
{ fg_empty =
8 &= typ "COLOUR" &=
help "The ANSI colour code for empty (not yet typed) text"
, fg_error = 1 &= typ "COLOUR" &= help "The ANSI colour code for errors"
, height =
20 &= typ "LINES" &=
help "The maximum number of lines to sample (default: 20)"
, max_paragraph_len =
750 &= typ "WORDS" &=
help "The maximum length of a sampled paragraph (default: 750)"
, min_paragraph_len =
250 &= typ "WORDS" &=
help "The minimum length of a sampled paragraph (default: 250)"
, nonsense_len =
500 &= name "l" &= typ "WORDS" &=
help "The length of nonsense to generate (default: 500)"
, paragraph = def &= help "Sample a paragraph from the input files"
, reflow_ = def &= help "Reflow paragraph to the target width"
, tab = 4 &= typ "SIZE" &= help "The size of a tab in spaces (default: 4)"
, width =
80 &= typ "CHARS" &=
help "The width at which to wrap lines (default: 80)"
, files = def &= args &= typ "FILES"
} &=
summary "Gotta Go Fast 0.3.0.6" &=
help "Practice typing and measure your WPM and accuracy." &=
program "gotta-go-fast" &=
details (lines $(embedStringFile "details.txt"))
wrap :: Int -> String -> String
wrap width = T.unpack . wrapText wrapSettings width . T.pack
wrapSettings = WrapSettings {preserveIndentation = True, breakLongWords = True}
-- wordWeights.txt is taken from
-- https://en.wiktionary.org/wiki/Wiktionary:Frequency_lists#TV_and_movie_scripts
-- (and cleaned up a little with some throwaway sed)
wordWeights :: [(String, Int)]
wordWeights =
map ((\[w, f] -> (w, read f)) . words) . lines $
$(embedStringFile "wordWeights.txt")
totalWeight :: Int
totalWeight = sum . map snd $ wordWeights
weightedRandomWord :: IO String
weightedRandomWord = do
r <- randomRIO (0, totalWeight - 1)
return $ go r wordWeights
where
go r ((w, f):rest)
| r < f = w
| otherwise = go (r - f) rest
loopWhile :: Monad m => (a -> Bool) -> m a -> m a
loopWhile p mx = do
x <- mx
if p x
then loopWhile p mx
else return x
-- Generates nonsense which is superficially similar to English. Similar in the
-- sense that the frequency of words in the generated text is approximately the
-- same as the frequency of words in actual usage.
nonsense :: Config -> IO String
nonsense c = do
words <- go (nonsense_len c) Nothing
return $ (wrap (width c) . unwords $ words) ++ "\n"
where
go n lastWord
| n <= 0 = return []
| otherwise = do
word <- loopWhile ((== lastWord) . Just) weightedRandomWord
rest <- go (n - length word - 1) (Just word) -- extra 1 to count the space
return $ word : rest
sample :: Config -> String -> IO String
sample c file =
if paragraph c && not (null paragraphs)
then sampleParagraph
else sampleLines
where
sampleParagraph = do
r <- randomRIO (0, length paragraphs - 1)
return $
(if reflow_ c
then reflow
else wrap (width c))
(paragraphs !! r) ++
"\n"
sampleLines = do
r <- randomRIO (0, max 0 $ length (lines ascii) - height c)
return . trimEmptyLines . chop . wrap (width c) . chop . unlines . drop r $
lines ascii
paragraphs =
filter
((\l -> l >= min_paragraph_len c && l <= max_paragraph_len c) . length) .
map unlines . splitOn [""] . lines $
ascii
reflow =
wrap (width c) .
map
(\case
'\n' -> ' '
c -> c)
ascii = toAscii (tab c) file
chop = unlines . take (height c) . lines
main :: IO ()
main = do
c <- cmdArgs config
fs <- filterM doesFileExist $ files c
target <-
case fs of
[] -> nonsense c
_ -> do
r <- randomRIO (0, length fs - 1)
file <- readFile $ fs !! r
sample c file
loop <- run (fg_empty c) (fg_error c) target
when loop main
|
hot-leaf-juice/gotta-go-fast
|
src/Main.hs
|
bsd-3-clause
| 5,750 | 0 | 19 | 1,866 | 1,732 | 895 | 837 | 150 | 4 |
module Data.Iteratee
( module Data.Iteratee.Base
, module Data.Iteratee.ByteString
, module Data.Iteratee.Exception
, module Data.Iteratee.IO
) where
------------------------------------------------------------------------
-- Imports
------------------------------------------------------------------------
import Data.Iteratee.Base
import Data.Iteratee.ByteString
import Data.Iteratee.Exception
import Data.Iteratee.IO
|
tanimoto/iteratee-bytestring
|
src/Data/Iteratee.hs
|
bsd-3-clause
| 431 | 0 | 5 | 39 | 63 | 44 | 19 | 9 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.LinkWorkspaces
-- Description : Bindings to add and delete links between workspaces.
-- Copyright : (c) Jan-David Quesel <[email protected]>
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : none
-- Stability : unstable
-- Portability : unportable
--
-- Provides bindings to add and delete links between workspaces. It is aimed
-- at providing useful links between workspaces in a multihead setup. Linked
-- workspaces are view at the same time.
--
-----------------------------------------------------------------------------
module XMonad.Actions.LinkWorkspaces (
-- * Usage
-- $usage
switchWS,
removeAllMatchings,
unMatch,
toggleLinkWorkspaces,
defaultMessageConf,
MessageConfig(..)
) where
import XMonad
import XMonad.Prelude (for_)
import qualified XMonad.StackSet as W
import XMonad.Layout.IndependentScreens(countScreens)
import qualified XMonad.Util.ExtensibleState as XS (get, put)
import XMonad.Actions.OnScreen(Focus(FocusCurrent), onScreen')
import qualified Data.Map as M
( insert, delete, Map, lookup, empty, filter )
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@ file:
--
-- > import XMonad.Actions.LinkWorkspaces
--
-- and add a function to print messages like
--
-- > message_command (S screen) = " dzen2 -p 1 -w 300 -xs " ++ show (screen + 1)
-- > message_color_func c1 c2 msg = dzenColor c1 c2 msg
-- > message screen c1 c2 msg = spawn $ "echo '" ++ (message_color_func c1 c2 msg) ++ "' | " ++ message_command screen
--
-- alternatively you can use the noMessages function as the argument
--
-- Then add keybindings like the following:
--
-- > ,((modm, xK_p), toggleLinkWorkspaces message)
-- > ,((modm .|. shiftMask, xK_p), removeAllMatchings message)
--
-- > [ ((modm .|. m, k), a i)
-- > | (a, m) <- [(switchWS (\y -> windows $ view y) message, 0),(switchWS (\x -> windows $ shift x . view x) message, shiftMask)]
-- > , (i, k) <- zip (XMonad.workspaces conf) [xK_1 .. xK_9]]
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
data MessageConfig = MessageConfig { messageFunction :: ScreenId -> [Char] -> [Char] -> [Char] -> X()
, foreground :: [Char]
, alertedForeground :: [Char]
, background :: [Char]
}
defaultMessageConf :: MessageConfig
defaultMessageConf = MessageConfig { messageFunction = noMessageFn
, background = "#000000"
, alertedForeground = "#ff7701"
, foreground = "#00ff00" }
noMessageFn :: ScreenId -> [Char] -> [Char] -> [Char] -> X()
noMessageFn _ _ _ _ = return () :: X ()
-- | Stuff for linking workspaces
newtype WorkspaceMap = WorkspaceMap (M.Map WorkspaceId WorkspaceId) deriving (Read, Show)
instance ExtensionClass WorkspaceMap
where initialValue = WorkspaceMap M.empty
extensionType = PersistentExtension
switchWS :: (WorkspaceId -> X ()) -> MessageConfig -> WorkspaceId -> X ()
switchWS f m ws = switchWS' f m ws Nothing
-- | Switch to the given workspace in a non greedy way, stop if we reached the first screen
-- | we already did switching on
switchWS' :: (WorkspaceId -> X ()) -> MessageConfig -> WorkspaceId -> Maybe ScreenId -> X ()
switchWS' switchFn message workspace stopAtScreen = do
ws <- gets windowset
nScreens <- countScreens
let now = W.screen (W.current ws)
let next = (now + 1) `mod` nScreens
switchFn workspace
case stopAtScreen of
Nothing -> sTM now next (Just now)
Just sId -> if sId == next then return () else sTM now next (Just sId)
where sTM = switchToMatching (switchWS' switchFn message) message workspace
-- | Switch to the workspace that matches the current one, executing switches for that workspace as well.
-- | The function switchWorkspaceNonGreedy' will take of stopping if we reached the first workspace again.
switchToMatching :: (WorkspaceId -> Maybe ScreenId -> X ()) -> MessageConfig -> WorkspaceId -> ScreenId
-> ScreenId -> Maybe ScreenId -> X ()
switchToMatching f message t now next stopAtScreen = do
WorkspaceMap matchings <- XS.get :: X WorkspaceMap
case M.lookup t matchings of
Nothing -> return () :: X()
Just newWorkspace -> do
onScreen' (f newWorkspace stopAtScreen) FocusCurrent next
messageFunction message now (foreground message) (background message) ("Switching to: " ++ (t ++ " and " ++ newWorkspace))
-- | Insert a mapping between t1 and t2 or remove it was already present
toggleMatching :: MessageConfig -> WorkspaceId -> WorkspaceId -> X ()
toggleMatching message t1 t2 = do
WorkspaceMap matchings <- XS.get :: X WorkspaceMap
case M.lookup t1 matchings of
Nothing -> setMatching message t1 t2 matchings
Just t -> if t == t2 then removeMatching' message t1 t2 matchings else setMatching message t1 t2 matchings
return ()
-- | Insert a mapping between t1 and t2 and display a message
setMatching :: MessageConfig -> WorkspaceId -> WorkspaceId -> M.Map WorkspaceId WorkspaceId -> X ()
setMatching message t1 t2 matchings = do
ws <- gets windowset
let now = W.screen (W.current ws)
XS.put $ WorkspaceMap $ M.insert t1 t2 matchings
messageFunction message now (foreground message) (background message) ("Linked: " ++ (t1 ++ " " ++ t2))
-- currently this function is called manually this means that if workspaces
-- were deleted, some links stay in the RAM even though they are not used
-- anymore... because of the small amount of memory used for those there is no
-- special cleanup so far
removeMatching' :: MessageConfig -> WorkspaceId -> WorkspaceId -> M.Map WorkspaceId WorkspaceId -> X ()
removeMatching' message t1 t2 matchings = do
ws <- gets windowset
let now = W.screen (W.current ws)
XS.put $ WorkspaceMap $ M.delete t1 matchings
messageFunction message now (alertedForeground message) (background message) ("Unlinked: " ++ t1 ++ " " ++ t2)
-- | Remove all maps between workspaces
removeAllMatchings :: MessageConfig -> X ()
removeAllMatchings message = do
ws <- gets windowset
let now = W.screen (W.current ws)
XS.put $ WorkspaceMap M.empty
messageFunction message now (alertedForeground message) (background message) "All links removed!"
-- | remove all matching regarding a given workspace
unMatch :: WorkspaceId -> X ()
unMatch workspace = do
WorkspaceMap matchings <- XS.get :: X WorkspaceMap
XS.put $ WorkspaceMap $ M.delete workspace (M.filter (/= workspace) matchings)
-- | Toggle the currently displayed workspaces as matching. Starting from the one with focus
-- | a linked list of workspaces is created that will later be iterated by switchToMatching.
toggleLinkWorkspaces :: MessageConfig -> X ()
toggleLinkWorkspaces message = withWindowSet $ \ws -> toggleLinkWorkspaces' (W.screen (W.current ws)) message
toggleLinkWorkspaces' :: ScreenId -> MessageConfig -> X ()
toggleLinkWorkspaces' first message = do
ws <- gets windowset
nScreens <- countScreens
let now = W.screen (W.current ws)
let next = (now + 1) `mod` nScreens
if next == first then return () else do -- this is also the case if there is only one screen
for_ (W.lookupWorkspace next ws)
(toggleMatching message (W.currentTag ws))
onScreen' (toggleLinkWorkspaces' first message) FocusCurrent next
|
xmonad/xmonad-contrib
|
XMonad/Actions/LinkWorkspaces.hs
|
bsd-3-clause
| 7,876 | 0 | 17 | 1,859 | 1,660 | 866 | 794 | 93 | 3 |
{-# LANGUAGE ScopedTypeVariables, PatternGuards #-}
{-# OPTIONS -fno-warn-orphans #-}
-- | A simple ''cron'' loop. Used for running commands according to a given schedule.
module HCron
( module HCron.Schedule
, cronLoop )
where
import HCron.Schedule
import Control.Concurrent
import Data.Time
-- | Given a schedule of commands, run them when their time is due.
-- Only one command is run at a time. If several commands could be started at a specific
-- moment, then we take the one with the earliest potential start time. If any command throws
-- an error then the whole loop does.
--
cronLoop :: Schedule (IO ()) -> IO ()
cronLoop schedule = do
startTime <- getCurrentTime
case earliestEventToStartAt startTime $ eventsOfSchedule schedule of
Nothing -> do
sleep 1
cronLoop schedule
Just event -> do
let Just cmd = lookupCommandOfSchedule (eventName event) schedule
cmd
endTime <- getCurrentTime
let event' = event
{ eventLastStarted = Just startTime
, eventLastEnded = Just endTime }
let schedule' = adjustEventOfSchedule event' schedule
cronLoop schedule'
-- | Sleep for a given number of seconds.
sleep :: Int -> IO ()
sleep secs
= threadDelay $ secs * 1000000
|
tbk303/hcron
|
HCron.hs
|
bsd-3-clause
| 1,219 | 8 | 18 | 243 | 211 | 109 | 102 | 27 | 2 |
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE ConstraintKinds #-}
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
-- | Bounded integer ranges
module Feldspar.Range.Test where
import Feldspar.Range
import System.Random -- Should maybe be exported from QuickCheck
import Test.QuickCheck hiding ((.&.))
import qualified Test.QuickCheck as QC
import Test.Tasty
import Test.Tasty.QuickCheck hiding ((.&.))
import Control.Applicative
import Data.Bits
import Data.Int
import Data.Word
import Data.Typeable
import qualified Control.Foldl as L
import Feldspar.Lattice
import Debug.Trace
tests = [ testGroup "Range Int" $ typedTestsSigned "Int" (undefined :: Int)
, testGroup "Range Int8" $ typedTestsSigned "Int8" (undefined :: Int8)
, testGroup "Range Word8" $ typedTestsUnsigned "Word8" (undefined :: Word8)
, testGroup "Range Word32" $ typedTestsUnsigned "Word32" (undefined :: Word32)
, testGroup "Range Int8, Range Int8" $ typedTestsTwo "Int8, Int8" (undefined :: Int8) (undefined :: Int8)
, testGroup "Range Word8, Range Word8" $ typedTestsTwo "Word8, Word8" (undefined :: Word8) (undefined :: Word8)
, testGroup "Expensive" [ testProperty "prop_rangeBitCount Word8" $ prop_rangeBitCountBruteForce (undefined :: Word8)
, testProperty "prop_rangeBitCount Word16" $ prop_rangeBitCountBruteForce (undefined :: Word16)
]
]
typedTests name typ =
[ testProperty (unwords ["prop_empty" , name]) (prop_empty typ)
, testProperty (unwords ["prop_full" , name]) (prop_full typ)
, testProperty (unwords ["prop_isEmpty" , name]) (prop_isEmpty typ)
, testProperty (unwords ["prop_singletonRange" , name]) (prop_singletonRange typ)
, testProperty (unwords ["prop_singletonSize" , name]) (prop_singletonSize typ)
, testProperty (unwords ["prop_emptySubRange1" , name]) (prop_emptySubRange1 typ)
, testProperty (unwords ["prop_emptySubRange2" , name]) (prop_emptySubRange2 typ)
, testProperty (unwords ["prop_rangeGap" , name]) (prop_rangeGap typ)
, testProperty (unwords ["prop_union1" , name]) (prop_union1 typ)
, testProperty (unwords ["prop_union2" , name]) (prop_union2 typ)
, testProperty (unwords ["prop_union3" , name]) (prop_union3 typ)
, testProperty (unwords ["prop_union4" , name]) (prop_union4 typ)
, testProperty (unwords ["prop_intersect1" , name]) (prop_intersect1 typ)
, testProperty (unwords ["prop_intersect2" , name]) (prop_intersect2 typ)
, testProperty (unwords ["prop_intersect3" , name]) (prop_intersect3 typ)
, testProperty (unwords ["prop_intersect4" , name]) (prop_intersect4 typ)
, testProperty (unwords ["prop_intersect5" , name]) (prop_intersect5 typ)
, testProperty (unwords ["prop_disjoint" , name]) (prop_disjoint typ)
, testProperty (unwords ["prop_rangeLess1" , name]) (prop_rangeLess1 typ)
, testProperty (unwords ["prop_rangeLess2" , name]) (prop_rangeLess2 typ)
, testProperty (unwords ["prop_rangeLessEq" , name]) (prop_rangeLessEq typ)
, testProperty (unwords ["prop_rangeByRange1" , name]) (prop_rangeByRange1 typ)
, testProperty (unwords ["prop_rangeByRange2" , name]) (prop_rangeByRange2 typ)
, testProperty (unwords ["prop_fromInteger" , name]) (prop_fromInteger typ)
, testProperty (unwords ["prop_abs" , name]) (prop_abs typ)
, testProperty (unwords ["prop_sign" , name]) (prop_sign typ)
, testProperty (unwords ["prop_neg" , name]) (prop_neg typ)
, testProperty (unwords ["prop_add" , name]) (prop_add typ)
, testProperty (unwords ["prop_sub" , name]) (prop_sub typ)
, testProperty (unwords ["prop_mul" , name]) (prop_mul typ)
, testProperty (unwords ["prop_exp" , name]) (prop_exp typ)
, testProperty (unwords ["prop_abs2" , name]) (prop_abs2 typ)
, testProperty (unwords ["prop_or" , name]) (prop_or typ)
, testProperty (unwords ["prop_and" , name]) (prop_and typ)
, testProperty (unwords ["prop_xor" , name]) (prop_xor typ)
, testProperty (unwords ["prop_rangeMax1" , name]) (prop_rangeMax1 typ)
, testProperty (unwords ["prop_rangeMax2" , name]) (prop_rangeMax2 typ)
, testProperty (unwords ["prop_rangeMax3" , name]) (prop_rangeMax3 typ)
, testProperty (unwords ["prop_rangeMax4" , name]) (prop_rangeMax4 typ)
, testProperty (unwords ["prop_rangeMax5_1" , name]) (prop_rangeMax5_1 typ)
, testProperty (unwords ["prop_rangeMax5_2" , name]) (prop_rangeMax5_2 typ)
, testProperty (unwords ["prop_rangeMax6" , name]) (prop_rangeMax6 typ)
, testProperty (unwords ["prop_rangeMax7" , name]) (prop_rangeMax7 typ)
, testProperty (unwords ["prop_rangeMin1" , name]) (prop_rangeMin1 typ)
, testProperty (unwords ["prop_rangeMin2" , name]) (prop_rangeMin2 typ)
, testProperty (unwords ["prop_rangeMin3" , name]) (prop_rangeMin3 typ)
, testProperty (unwords ["prop_rangeMin4" , name]) (prop_rangeMin4 typ)
, testProperty (unwords ["prop_rangeMin5_1" , name]) (prop_rangeMin5_1 typ)
, testProperty (unwords ["prop_rangeMin5_2" , name]) (prop_rangeMin5_2 typ)
, testProperty (unwords ["prop_rangeMin6" , name]) (prop_rangeMin6 typ)
, testProperty (unwords ["prop_rangeMin7" , name]) (prop_rangeMin7 typ)
, testProperty (unwords ["prop_rangeMod1" , name]) (prop_rangeMod1 typ)
, testProperty (unwords ["prop_rangeMod2" , name]) (prop_rangeMod2 typ)
, testProperty (unwords ["prop_rangeRem" , name]) (prop_rangeRem typ)
, testProperty (unwords ["prop_rangeQuot" , name]) (prop_rangeQuot typ)
]
typedTestsUnsigned name typ = typedTests name typ ++
[ testProperty (unwords ["prop_mulU" , name]) (prop_mulU typ)
, testProperty (unwords ["prop_subSat" , name]) (prop_subSat typ)
, testProperty (unwords ["prop_rangeBitCount" , name]) (prop_rangeBitCount typ)
]
typedTestsSigned name typ = typedTests name typ ++
[ testProperty (unwords ["prop_isNegative" , name]) (prop_isNegative typ)
, testProperty (unwords ["prop_rangeMod3" , name]) (prop_rangeMod3 typ)
, testProperty (unwords ["prop_rangeRem1" , name]) (prop_rangeRem1 typ)
, testProperty (unwords ["prop_rangeQuot1" , name]) (prop_rangeQuot1 typ)
]
typedTestsTwo name t1 t2 =
[ testProperty (unwords ["prop_shiftLU" , name]) (prop_shiftLU t1 t2)
, testProperty (unwords ["prop_shiftRU" , name]) (prop_shiftRU t1 t2)
]
--------------------------------------------------------------------------------
-- * Testing
--------------------------------------------------------------------------------
instance (BoundedInt a, Arbitrary a) => Arbitrary (Range a)
where
arbitrary = do
[bound1,bound2] <- vectorOf 2 $ oneof
[ arbitrary
, elements [minBound,-1,0,1,maxBound]]
frequency
[ (10, return $
Range (min bound1 bound2) (max bound1 bound2))
, (1 , return $
Range (max bound1 bound2) (min bound1 bound2)) -- Empty
, (1 , return $
Range bound1 bound1) -- Singleton
]
shrink (Range x y) =
[ Range x' y | x' <- shrink x ] ++
[ Range x y' | y' <- shrink y ]
newtype EmptyRange a = EmptyRange {getEmpty :: Range a}
deriving (Eq, Show)
instance (Arbitrary a, Random a, Ord a, Bounded a) => Arbitrary (EmptyRange a) where
arbitrary = do
l <- arbitrary `suchThat` (>(minBound :: a))
return $ EmptyRange $ Range l minBound
newtype NonEmptyRange a = NonEmptyRange {getNonEmpty :: Range a}
deriving (Eq, Show)
instance (Arbitrary a, Random a, Ord a, Bounded a) => Arbitrary (NonEmptyRange a) where
arbitrary = do
l <- arbitrary `suchThat` (<(maxBound :: a))
u <- choose (l,maxBound)
return $ NonEmptyRange $ Range l u
-- | Generate a range guaranteed to include the element
aroundRange :: (Bounded a, Random a) => a -> Gen (Range a)
aroundRange x = do
l <- choose (minBound,x)
u <- choose (x,maxBound)
return $ Range l u
disjointRanges :: (Arbitrary a, Num a, Ord a, Bounded a, Random a) => Gen (Range a, Range a)
disjointRanges = do
NonEmptyRange r <- arbitrary
u1 <- choose (minBound,lowerBound r)
l1 <- choose (minBound,u1)
l2 <- choose (upperBound r,maxBound)
u2 <- choose (l2,maxBound)
return (Range l1 u1, Range l2 u2)
prop_disjointGen t = forAll disjointRanges $ \(r1,r2) -> disjoint r1 (r2 `rangeTy`t)
fromRange :: (BoundedInt a, Random a) => Range a -> Gen a
fromRange r
| isEmpty r = error "fromRange: empty range"
| otherwise = frequency [(1,return (lowerBound r))
,(1,return (upperBound r))
,(1,choose (lowerBound r, upperBound r))
]
rangeTy :: Range t -> t -> Range t
rangeTy r _ = r
-- | Applies a (monadic) function to all the types we are interested in testing
-- with for Feldspar.
--
-- Example usage: 'atAllTypes (quickCheck . prop_mul)'
atAllTypes :: (Monad m) =>
(forall t . (Show t, BoundedInt t, Random t, Arbitrary t, Typeable t) =>
t -> m a)
-> m ()
atAllTypes test = sequence_ [test (undefined :: Int)
,test (undefined :: Int8)
,test (undefined :: Word32)
,test (undefined :: Word8)
]
-- | Test if a operation is "strict" wrt. empty ranges
prop_isStrict1 t op (EmptyRange ra) = isEmpty (op ra)
where _ = ra `rangeTy` t
-- | Test if an operation is "strict" wrt. empty ranges
prop_isStrict2 t op ra rb =
isEmpty ra || isEmpty rb ==> isEmpty (op ra rb)
where _ = ra `rangeTy` t
-- TODO Think about strictness of range operations (in the sense of `isStrict1`
-- and `isStrict2`). Probably all range propagation operations should be strict,
-- but many of them are currently not:
--
-- *Feldspar.Range> quickCheck (prop_isStrict2 (undefined :: Int) (+))
-- *** Failed! Falsifiable (after 1 test and 1 shrink):
-- Range {lowerBound = 0, upperBound = 1}
-- Range {lowerBound = 1, upperBound = 0}
--------------------------------------------------------------------------------
-- ** Lattice operations
--------------------------------------------------------------------------------
prop_empty t = isEmpty (emptyRange `rangeTy` t)
prop_full t = isFull (fullRange `rangeTy` t)
prop_isEmpty t (EmptyRange r) = isEmpty (r `rangeTy` t)
prop_singletonRange t a = isSingleton (singletonRange (a `asTypeOf` t))
prop_singletonSize t r = isSingleton (r `rangeTy` t) ==> (rangeSize r == 1)
prop_emptySubRange1 t (EmptyRange r1) (NonEmptyRange r2) =
not (r2 `isSubRangeOf` (r1 `rangeTy` t))
prop_emptySubRange2 t (EmptyRange r1) (NonEmptyRange r2) =
r1 `isSubRangeOf` (r2 `rangeTy` t)
prop_rangeGap t r1 r2 =
(isEmpty gap1 && isEmpty gap2) || (gap1 == gap2)
where
gap1 = rangeGap r1 r2
gap2 = rangeGap r2 r1
_ = r1 `rangeTy` t
prop_union1 t x r1 r2 =
((x `inRange` r1) || (x `inRange` r2)) ==> (x `inRange` (r1\/r2))
where _ = x `asTypeOf` t
prop_union2 t x r1 r2 =
(x `inRange` (r1\/r2)) ==>
((x `inRange` r1) || (x `inRange` r2) || (x `inRange` rangeGap r1 r2))
where _ = x `asTypeOf` t
prop_union3 t r1 r2 = (r1 `rangeTy` t) `isSubRangeOf` (r1\/r2)
prop_union4 t r1 r2 = (r2 `rangeTy` t) `isSubRangeOf` (r1\/r2)
prop_intersect1 t x = forAll (aroundRange x) $ \r1 ->
forAll (aroundRange x) $ \r2 ->
x `inRange` (r1/\r2)
where _ = x `asTypeOf` t
prop_intersect2 t x =
forAll (aroundRange x) $ \r1 ->
forAll (aroundRange x) $ \r2 ->
(x `inRange` (r1/\r2)) ==> ((x `inRange` r1) && (x `inRange` r2))
where _ = x `asTypeOf` t
prop_intersect3 t r1 r2 = (r1/\r2) `isSubRangeOf` (r1 `rangeTy` t)
prop_intersect4 t r1 r2 = (r1/\r2) `isSubRangeOf` (r2 `rangeTy` t)
prop_intersect5 t r1 r2 =
isEmpty r1 || isEmpty r2 ==> isEmpty (r1/\r2)
where _ = r1 `rangeTy` t
prop_disjoint t = forAll disjointRanges $ \(r1,r2) ->
forAll (fromRange r1) $ \x ->
not (x `inRange` (r2 `rangeTy` t))
prop_rangeLess1 t r1 r2 =
rangeLess r1 r2 ==> disjoint r1 (r2 `rangeTy` t)
prop_rangeLess2 t r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
forAll (fromRange r1) $ \x ->
forAll (fromRange r2) $ \y ->
rangeLess r1 r2 ==> x < y
where _ = r1 `rangeTy` t
prop_rangeLessEq t r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
forAll (fromRange r1) $ \x ->
forAll (fromRange r2) $ \y ->
rangeLessEq r1 r2 ==> x <= y
where _ = r1 `rangeTy` t
--------------------------------------------------------------------------------
-- ** Propagation
--------------------------------------------------------------------------------
prop_propagation1 :: (Show t, BoundedInt t, Random t) =>
t -> (forall a . Num a => a -> a) -> Range t -> Property
prop_propagation1 _ op r =
not (isEmpty r) ==>
forAll (fromRange r) $ \x ->
op x `inRange` op r
-- | This function is useful for range propagation functions like
-- 'rangeMax', 'rangeMod' etc.
-- It takes two ranges, picks an element out of either ranges and
-- checks if applying the operation to the individual elements is in
-- the resulting range after range propagation.
--
-- The third argument is a precondition that is satisfied before the test is
-- run. A good example is to make sure that the second argument is non-zero
-- when testing division.
rangePropagationSafetyPre :: (Show t, Random t, BoundedInt t, BoundedInt a) =>
t ->
(t -> t -> a) -> (Range t -> Range t -> Range a) ->
(t -> t -> Bool) ->
Range t -> Range t -> Property
rangePropagationSafetyPre _ op rop pre r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
forAll (fromRange r1) $ \v1 ->
forAll (fromRange r2) $ \v2 ->
pre v1 v2 ==>
op v1 v2 `inRange` rop r1 r2
rangePropagationSafetyPre2 ::
(Show t, Show t2, Random t, BoundedInt t, Random t2, BoundedInt t2, BoundedInt a) =>
t -> t2 ->
(t -> t2 -> a) -> (Range t -> Range t2 -> Range a) ->
(t -> t2 -> Bool) ->
Range t -> Range t2 -> Property
rangePropagationSafetyPre2 _ _ op rop pre r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
forAll (fromRange r1) $ \v1 ->
forAll (fromRange r2) $ \v2 ->
pre v1 v2 ==>
op v1 v2 `inRange` rop r1 r2
rangePropagationSafety t op rop = rangePropagationSafetyPre t op rop noPre
where
noPre _ _ = True
rangePropSafety1 t op rop ran =
not (isEmpty ran) ==>
forAll (fromRange ran) $ \val ->
op val `inRange` rop ran
where _ = ran `rangeTy` t
prop_propagation2
:: (Show t, BoundedInt t, Random t) => t -> (forall a . Num a => a -> a -> a)
-> Range t -> Range t -> Property
prop_propagation2 t op = rangePropagationSafety t op op
prop_rangeByRange1 t ra rb =
forAll (fromRange ra) $ \a ->
forAll (fromRange rb) $ \b ->
forAll (fromRange (Range a b)) $ \x ->
not (isEmpty ra) && not (isEmpty rb) && not (isEmpty (Range a b)) ==>
inRange x (rangeByRange ra rb)
where _ = ra `rangeTy` t
prop_rangeByRange2 t = prop_isStrict2 t rangeByRange
prop_fromInteger t a = isSingleton (fromInteger a `rangeTy` t)
prop_abs t = prop_propagation1 t abs
prop_sign t = prop_propagation1 t signum
prop_neg t = prop_propagation1 t negate
prop_add t = prop_propagation2 t (+)
prop_sub t = prop_propagation2 t (-)
prop_mul t = prop_propagation2 t (*)
prop_exp t = rangePropagationSafetyPre t (^) rangeExp (\_ e -> e >= 0)
prop_mulU t = rangePropagationSafety t (*) rangeMulUnsigned
prop_subSat t = rangePropagationSafety t subSat rangeSubSat
prop_isNegative t r =
not (isEmpty r) && (r /= Range minBound minBound) ==>
isNegative r ==> not (isNegative $ negate r)
where _ = rangeTy r t
prop_abs2 t r =
lowerBound r /= (minBound `asTypeOf` t) ==> isNatural (abs r)
prop_or t = rangePropagationSafety t (.|.) rangeOr
prop_and t = rangePropagationSafety t (.&.) rangeAnd
prop_xor t = rangePropagationSafety t xor rangeXor
prop_shiftLU t1 t2
= rangePropagationSafetyPre2 t1 t2 fixShiftL rangeShiftLU (\_ _ -> True)
where fixShiftL a b = shiftL a (fromIntegral b)
prop_shiftRU t1 t2
= rangePropagationSafetyPre2 t1 t2 fixShiftR rangeShiftRU (\_ _ -> True)
where fixShiftR = correctShiftRU
prop_rangeMax1 t r1 = rangeMax r1 r1 == (r1 `rangeTy` t)
prop_rangeMax2 t r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
upperBound r1 <= upperBound max && upperBound r2 <= upperBound max
where
max = rangeMax r1 (r2 `rangeTy` t)
prop_rangeMax3 t r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
lowerBound (rangeMax r1 r2) == max (lowerBound r1) (lowerBound r2)
where _ = r1 `rangeTy` t
prop_rangeMax4 t r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
rangeMax r1 r2 == rangeMax r2 r1
where _ = r1 `rangeTy` t
prop_rangeMax5_1 t (EmptyRange r1) (NonEmptyRange r2) =
rangeMax r1 r2 == (r2 `rangeTy` t)
prop_rangeMax5_2 t (NonEmptyRange r1) (EmptyRange r2) =
rangeMax r1 r2 == (r1 `rangeTy` t)
prop_rangeMax6 t v1 v2 =
max v1 v2 `inRange` rangeMax (singletonRange v1) (singletonRange v2)
where _ = v1 `asTypeOf` t
prop_rangeMax7 a = rangePropagationSafety a max rangeMax
prop_rangeMin1 t r1 = rangeMin r1 r1 == (r1 `rangeTy` t)
prop_rangeMin2 t r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
lowerBound min <= lowerBound r1 && lowerBound min <= lowerBound r2
where
min = rangeMin r1 (r2 `rangeTy` t)
prop_rangeMin3 t r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
upperBound (rangeMin r1 r2) == min (upperBound r1) (upperBound r2)
where _ = r1 `rangeTy` t
prop_rangeMin4 t r1 r2 =
not (isEmpty r1) && not (isEmpty r2) ==>
rangeMin r1 r2 == rangeMin r2 r1
where _ = r1 `rangeTy` t
prop_rangeMin5 t r1 r2 =
(isEmpty r1 && not (isEmpty r2) ==>
rangeMin r1 r2 == r2)
QC..&.
(isEmpty r2 && not (isEmpty r1) ==>
rangeMin r1 r2 == r1)
where _ = r1 `rangeTy` t
prop_rangeMin5_1 t (EmptyRange r1) (NonEmptyRange r2) =
rangeMin r1 r2 == (r2 `rangeTy` t)
prop_rangeMin5_2 t (NonEmptyRange r1) (EmptyRange r2) =
rangeMin r1 r2 == (r1 `rangeTy` t)
prop_rangeMin6 t v1 v2 =
min v1 v2 `inRange` rangeMin (singletonRange v1) (singletonRange v2)
where _ = v1 `asTypeOf` t
prop_rangeMin7 t = rangePropagationSafety t min rangeMin
prop_rangeMod1 t v1 v2 =
v2 /= 0 ==>
mod v1 v2 `inRange` rangeMod (singletonRange v1) (singletonRange v2)
where _ = v1 `asTypeOf` t
prop_rangeMod2 t =
rangePropagationSafetyPre t mod rangeMod divPre
prop_rangeMod3 t =
isFull $ rangeMod (singletonRange (minBound `asTypeOf` t))
(singletonRange (-1))
prop_rangeRem t =
rangePropagationSafetyPre t rem rangeRem divPre
prop_rangeRem1 t =
isFull $ rangeRem (singletonRange (minBound `asTypeOf` t))
(singletonRange (-1))
prop_rangeQuot t =
rangePropagationSafetyPre t quot rangeQuot divPre
prop_rangeQuot1 t =
isFull $ rangeQuot (singletonRange (minBound `asTypeOf` t))
(singletonRange (-1))
-- | Precondition for division like operators.
-- Avoids division by zero and arithmetic overflow.
divPre v1 v2 = v2 /= 0 && not (v1 == minBound && v2 == (-1))
prop_rangeBitCount :: (Show t, BoundedInt t) => t -> NonEmptyRange t -> Bool
prop_rangeBitCount t (NonEmptyRange (r@(Range l u))) = and
[ r' `isSubRangeOf` Range 0 (fromIntegral (finiteBitSize (undefined `asTypeOf` t)))
, l' <= fromIntegral (popCount l) || l' <= fromIntegral (popCount u)
, u' >= fromIntegral (popCount l) || u' >= fromIntegral (popCount u)
]
where r'@(Range l' u') = rangeBitCount r `asTypeOf` r
-- This property enumerates all values in the range, so it is expensive for large types
prop_rangeBitCountBruteForce :: (Show t, BoundedInt t) => t -> Range t -> Bool
prop_rangeBitCountBruteForce t r@(Range l u) = and
[ fromIntegral l' == x
, fromIntegral u' == y
]
where
Range x y = rangeBitCount r `asTypeOf` r
Range l' u' = rangeBitCountBruteForce r `asTypeOf` r
rangeBitCountBruteForce :: (BoundedInt a, BoundedInt b) => Range a -> Range b
rangeBitCountBruteForce r | isEmpty r = emptyRange
rangeBitCountBruteForce r@(Range l u) = range (fromIntegral l') (fromIntegral u')
where
(Just l',Just u') = L.fold ((,) <$> L.minimum <*> L.maximum)
$ map popCount $ enumFromTo (min l u) (max l u)
|
emwap/feldspar-language
|
tests/Feldspar/Range/Test.hs
|
bsd-3-clause
| 22,775 | 0 | 17 | 5,394 | 7,358 | 3,874 | 3,484 | 375 | 1 |
module Problem57 where
main :: IO ()
-- sqrt(2) = [1;2,2,2,...]
-- p(0) = 1, q(0)=1;
-- We know that p(n) = a(n)*p(n-1)+p(n-2) and q(n) = a(n)*q(n-1)+q(n-2)
-- [Theorem 12.9 Rosen Elementary Number Theory 5th edition]
-- here a(n)=2 for n>=1
-- (p(n), q(n)) = (1,1), (3,2), (7,5), (17,12), ..
main =
print . length . filter (\(a, b) -> digits a > digits b) . map convergent $ [0 .. 999]
where
digits = length . show
convergent = (map convergent' [0 ..] !!)
where
convergent' 0 = (1, 1)
convergent' 1 = (3, 2)
convergent' n = (2 * p + p', 2 * q + q')
where
(p , q ) = convergent (n - 1)
(p', q') = convergent (n - 2)
|
adityagupta1089/Project-Euler-Haskell
|
src/problems/Problem57.hs
|
bsd-3-clause
| 699 | 0 | 13 | 209 | 212 | 119 | 93 | 11 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE GADTs #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.IndexUtils
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Extra utils related to the package indexes.
-----------------------------------------------------------------------------
module Distribution.Client.IndexUtils (
getIndexFileAge,
getInstalledPackages,
Configure.getInstalledPackagesMonitorFiles,
getSourcePackages,
getSourcePackagesMonitorFiles,
Index(..),
PackageEntry(..),
parsePackageIndex,
updateRepoIndexCache,
updatePackageIndexCacheFile,
readCacheStrict,
BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Archive.Tar.Index as Tar
import qualified Distribution.Client.Tar as Tar
import Distribution.Client.Types
import Distribution.Package
( PackageId, PackageIdentifier(..), PackageName(..)
, Package(..), packageVersion, packageName
, Dependency(Dependency) )
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.PackageDescription.Parse as PackageDesc.Parse
import Distribution.PackageDescription
( GenericPackageDescription )
import Distribution.PackageDescription.Parse
( parsePackageDescription )
import Distribution.Simple.Compiler
( Compiler, PackageDBStack )
import Distribution.Simple.Program
( ProgramDb )
import qualified Distribution.Simple.Configure as Configure
( getInstalledPackages, getInstalledPackagesMonitorFiles )
import Distribution.ParseUtils
( ParseResult(..) )
import Distribution.Version
( Version(Version), intersectVersionRanges )
import Distribution.Text
( display, simpleParse )
import Distribution.Verbosity
( Verbosity, normal, lessVerbose )
import Distribution.Simple.Utils
( die, warn, info, fromUTF8, ignoreBOM )
import Distribution.Client.Setup
( RepoContext(..) )
import Distribution.Solver.Types.PackageIndex (PackageIndex)
import qualified Distribution.Solver.Types.PackageIndex as PackageIndex
import Distribution.Solver.Types.SourcePackage
import Data.Char (isAlphaNum)
import Data.Maybe (mapMaybe, catMaybes, maybeToList)
import Data.List (isPrefixOf)
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(..))
#endif
import qualified Data.Map as Map
import Control.Monad (when, liftM)
import Control.Exception (evaluate)
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
import qualified Data.ByteString.Char8 as BSS
import Data.ByteString.Lazy (ByteString)
import Distribution.Client.GZipUtils (maybeDecompress)
import Distribution.Client.Utils ( byteStringToFilePath
, tryFindAddSourcePackageDesc )
import Distribution.Compat.Exception (catchIO)
import Distribution.Compat.Time (getFileAge, getModTime)
import System.Directory (doesFileExist, doesDirectoryExist)
import System.FilePath
( (</>), (<.>), takeExtension, replaceExtension, splitDirectories, normalise )
import System.FilePath.Posix as FilePath.Posix
( takeFileName )
import System.IO
import System.IO.Unsafe (unsafeInterleaveIO)
import System.IO.Error (isDoesNotExistError)
import qualified Hackage.Security.Client as Sec
import qualified Hackage.Security.Util.Some as Sec
-- | Reduced-verbosity version of 'Configure.getInstalledPackages'
getInstalledPackages :: Verbosity -> Compiler
-> PackageDBStack -> ProgramDb
-> IO InstalledPackageIndex
getInstalledPackages verbosity comp packageDbs progdb =
Configure.getInstalledPackages verbosity' comp packageDbs progdb
where
verbosity' = lessVerbose verbosity
-- | Get filename base (i.e. without file extension) for index-related files
--
-- /Secure/ cabal repositories use a new extended & incremental
-- @01-index.tar@. In order to avoid issues resulting from clobbering
-- new/old-style index data, we save them locally to different names.
--
-- Example: Use @indexBaseName repo <.> "tar.gz"@ to compute the 'FilePath' of the
-- @00-index.tar.gz@/@01-index.tar.gz@ file.
indexBaseName :: Repo -> FilePath
indexBaseName repo = repoLocalDir repo </> fn
where
fn = case repo of
RepoSecure {} -> "01-index"
RepoRemote {} -> "00-index"
RepoLocal {} -> "00-index"
------------------------------------------------------------------------
-- Reading the source package index
--
-- | Read a repository index from disk, from the local files specified by
-- a list of 'Repo's.
--
-- All the 'SourcePackage's are marked as having come from the appropriate
-- 'Repo'.
--
-- This is a higher level wrapper used internally in cabal-install.
getSourcePackages :: Verbosity -> RepoContext -> IO SourcePackageDb
getSourcePackages verbosity repoCtxt | null (repoContextRepos repoCtxt) = do
warn verbosity $ "No remote package servers have been specified. Usually "
++ "you would have one specified in the config file."
return SourcePackageDb {
packageIndex = mempty,
packagePreferences = mempty
}
getSourcePackages verbosity repoCtxt = do
info verbosity "Reading available packages..."
pkgss <- mapM (\r -> readRepoIndex verbosity repoCtxt r) (repoContextRepos repoCtxt)
let (pkgs, prefs) = mconcat pkgss
prefs' = Map.fromListWith intersectVersionRanges
[ (name, range) | Dependency name range <- prefs ]
_ <- evaluate pkgs
_ <- evaluate prefs'
return SourcePackageDb {
packageIndex = pkgs,
packagePreferences = prefs'
}
readCacheStrict :: Verbosity -> Index -> (PackageEntry -> pkg) -> IO ([pkg], [Dependency])
readCacheStrict verbosity index mkPkg = do
updateRepoIndexCache verbosity index
cache <- liftM readIndexCache $ BSS.readFile (cacheFile index)
withFile (indexFile index) ReadMode $ \indexHnd ->
packageListFromCache mkPkg indexHnd cache ReadPackageIndexStrict
-- | Read a repository index from disk, from the local file specified by
-- the 'Repo'.
--
-- All the 'SourcePackage's are marked as having come from the given 'Repo'.
--
-- This is a higher level wrapper used internally in cabal-install.
--
readRepoIndex :: Verbosity -> RepoContext -> Repo
-> IO (PackageIndex UnresolvedSourcePackage, [Dependency])
readRepoIndex verbosity repoCtxt repo =
handleNotFound $ do
warnIfIndexIsOld =<< getIndexFileAge repo
updateRepoIndexCache verbosity (RepoIndex repoCtxt repo)
readPackageIndexCacheFile mkAvailablePackage (RepoIndex repoCtxt repo)
where
mkAvailablePackage pkgEntry =
SourcePackage {
packageInfoId = pkgid,
packageDescription = packageDesc pkgEntry,
packageSource = case pkgEntry of
NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing
BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path,
packageDescrOverride = case pkgEntry of
NormalPackage _ _ pkgtxt _ -> Just pkgtxt
_ -> Nothing
}
where
pkgid = packageId pkgEntry
handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e
then do
case repo of
RepoRemote{..} -> warn verbosity $ errMissingPackageList repoRemote
RepoSecure{..} -> warn verbosity $ errMissingPackageList repoRemote
RepoLocal{..} -> warn verbosity $
"The package list for the local repo '" ++ repoLocalDir
++ "' is missing. The repo is invalid."
return mempty
else ioError e
isOldThreshold = 15 --days
warnIfIndexIsOld dt = do
when (dt >= isOldThreshold) $ case repo of
RepoRemote{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt
RepoSecure{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt
RepoLocal{..} -> return ()
errMissingPackageList repoRemote =
"The package list for '" ++ remoteRepoName repoRemote
++ "' does not exist. Run 'cabal update' to download it."
errOutdatedPackageList repoRemote dt =
"The package list for '" ++ remoteRepoName repoRemote
++ "' is " ++ shows (floor dt :: Int) " days old.\nRun "
++ "'cabal update' to get the latest list of available packages."
-- | Return the age of the index file in days (as a Double).
getIndexFileAge :: Repo -> IO Double
getIndexFileAge repo = getFileAge $ indexBaseName repo <.> "tar"
-- | A set of files (or directories) that can be monitored to detect when
-- there might have been a change in the source packages.
--
getSourcePackagesMonitorFiles :: [Repo] -> [FilePath]
getSourcePackagesMonitorFiles repos =
[ indexBaseName repo <.> "cache" | repo <- repos ]
-- | It is not necessary to call this, as the cache will be updated when the
-- index is read normally. However you can do the work earlier if you like.
--
updateRepoIndexCache :: Verbosity -> Index -> IO ()
updateRepoIndexCache verbosity index =
whenCacheOutOfDate index $ do
updatePackageIndexCacheFile verbosity index
whenCacheOutOfDate :: Index -> IO () -> IO ()
whenCacheOutOfDate index action = do
exists <- doesFileExist $ cacheFile index
if not exists
then action
else do
indexTime <- getModTime $ indexFile index
cacheTime <- getModTime $ cacheFile index
when (indexTime > cacheTime) action
------------------------------------------------------------------------
-- Reading the index file
--
-- | An index entry is either a normal package, or a local build tree reference.
data PackageEntry =
NormalPackage PackageId GenericPackageDescription ByteString BlockNo
| BuildTreeRef BuildTreeRefType
PackageId GenericPackageDescription FilePath BlockNo
-- | A build tree reference is either a link or a snapshot.
data BuildTreeRefType = SnapshotRef | LinkRef
deriving Eq
refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType
refTypeFromTypeCode t
| t == Tar.buildTreeRefTypeCode = LinkRef
| t == Tar.buildTreeSnapshotTypeCode = SnapshotRef
| otherwise =
error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code"
typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode
typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode
typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode
instance Package PackageEntry where
packageId (NormalPackage pkgid _ _ _) = pkgid
packageId (BuildTreeRef _ pkgid _ _ _) = pkgid
packageDesc :: PackageEntry -> GenericPackageDescription
packageDesc (NormalPackage _ descr _ _) = descr
packageDesc (BuildTreeRef _ _ descr _ _) = descr
-- | Parse an uncompressed \"00-index.tar\" repository index file represented
-- as a 'ByteString'.
--
data PackageOrDep = Pkg PackageEntry | Dep Dependency
-- | Read @00-index.tar.gz@ and extract @.cabal@ and @preferred-versions@ files
--
-- We read the index using 'Tar.read', which gives us a lazily constructed
-- 'TarEntries'. We translate it to a list of entries using 'tarEntriesList',
-- which preserves the lazy nature of 'TarEntries', and finally 'concatMap' a
-- function over this to translate it to a list of IO actions returning
-- 'PackageOrDep's. We can use 'lazySequence' to turn this into a list of
-- 'PackageOrDep's, still maintaining the lazy nature of the original tar read.
parsePackageIndex :: ByteString -> [IO (Maybe PackageOrDep)]
parsePackageIndex = concatMap (uncurry extract) . tarEntriesList . Tar.read
where
extract :: BlockNo -> Tar.Entry -> [IO (Maybe PackageOrDep)]
extract blockNo entry = tryExtractPkg ++ tryExtractPrefs
where
tryExtractPkg = do
mkPkgEntry <- maybeToList $ extractPkg entry blockNo
return $ fmap (fmap Pkg) mkPkgEntry
tryExtractPrefs = do
prefs' <- maybeToList $ extractPrefs entry
fmap (return . Just . Dep) prefs'
-- | Turn the 'Entries' data structure from the @tar@ package into a list,
-- and pair each entry with its block number.
--
-- NOTE: This preserves the lazy nature of 'Entries': the tar file is only read
-- as far as the list is evaluated.
tarEntriesList :: Show e => Tar.Entries e -> [(BlockNo, Tar.Entry)]
tarEntriesList = go 0
where
go !_ Tar.Done = []
go !_ (Tar.Fail e) = error ("tarEntriesList: " ++ show e)
go !n (Tar.Next e es') = (n, e) : go (Tar.nextEntryOffset e n) es'
extractPkg :: Tar.Entry -> BlockNo -> Maybe (IO (Maybe PackageEntry))
extractPkg entry blockNo = case Tar.entryContent entry of
Tar.NormalFile content _
| takeExtension fileName == ".cabal"
-> case splitDirectories (normalise fileName) of
[pkgname,vers,_] -> case simpleParse vers of
Just ver -> Just . return $ Just (NormalPackage pkgid descr content blockNo)
where
pkgid = PackageIdentifier (PackageName pkgname) ver
parsed = parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack
$ content
descr = case parsed of
ParseOk _ d -> d
_ -> error $ "Couldn't read cabal file "
++ show fileName
_ -> Nothing
_ -> Nothing
Tar.OtherEntryType typeCode content _
| Tar.isBuildTreeRefTypeCode typeCode ->
Just $ do
let path = byteStringToFilePath content
dirExists <- doesDirectoryExist path
result <- if not dirExists then return Nothing
else do
cabalFile <- tryFindAddSourcePackageDesc path "Error reading package index."
descr <- PackageDesc.Parse.readPackageDescription normal cabalFile
return . Just $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr)
descr path blockNo
return result
_ -> Nothing
where
fileName = Tar.entryPath entry
extractPrefs :: Tar.Entry -> Maybe [Dependency]
extractPrefs entry = case Tar.entryContent entry of
Tar.NormalFile content _
| takeFileName entrypath == "preferred-versions"
-> Just prefs
where
entrypath = Tar.entryPath entry
prefs = parsePreferredVersions content
_ -> Nothing
parsePreferredVersions :: ByteString -> [Dependency]
parsePreferredVersions = mapMaybe simpleParse
. filter (not . isPrefixOf "--")
. lines
. BS.Char8.unpack -- TODO: Are we sure no unicode?
------------------------------------------------------------------------
-- Reading and updating the index cache
--
-- | Variation on 'sequence' which evaluates the actions lazily
--
-- Pattern matching on the result list will execute just the first action;
-- more generally pattern matching on the first @n@ '(:)' nodes will execute
-- the first @n@ actions.
lazySequence :: [IO a] -> IO [a]
lazySequence = unsafeInterleaveIO . go
where
go [] = return []
go (x:xs) = do x' <- x
xs' <- lazySequence xs
return (x' : xs')
-- | Which index do we mean?
data Index =
-- | The main index for the specified repository
RepoIndex RepoContext Repo
-- | A sandbox-local repository
-- Argument is the location of the index file
| SandboxIndex FilePath
indexFile :: Index -> FilePath
indexFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "tar"
indexFile (SandboxIndex index) = index
cacheFile :: Index -> FilePath
cacheFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "cache"
cacheFile (SandboxIndex index) = index `replaceExtension` "cache"
updatePackageIndexCacheFile :: Verbosity -> Index -> IO ()
updatePackageIndexCacheFile verbosity index = do
info verbosity ("Updating index cache file " ++ cacheFile index)
withIndexEntries index $ \entries -> do
let cache = Cache { cacheEntries = entries }
writeFile (cacheFile index) (showIndexCache cache)
-- | Read the index (for the purpose of building a cache)
--
-- The callback is provided with list of cache entries, which is guaranteed to
-- be lazily constructed. This list must ONLY be used in the scope of the
-- callback; when the callback is terminated the file handle to the index will
-- be closed and further attempts to read from the list will result in (pure)
-- I/O exceptions.
--
-- In the construction of the index for a secure repo we take advantage of the
-- index built by the @hackage-security@ library to avoid reading the @.tar@
-- file as much as possible (we need to read it only to extract preferred
-- versions). This helps performance, but is also required for correctness:
-- the new @01-index.tar.gz@ may have multiple versions of preferred-versions
-- files, and 'parsePackageIndex' does not correctly deal with that (see #2956);
-- by reading the already-built cache from the security library we will be sure
-- to only read the latest versions of all files.
--
-- TODO: It would be nicer if we actually incrementally updated @cabal@'s
-- cache, rather than reconstruct it from zero on each update. However, this
-- would require a change in the cache format.
withIndexEntries :: Index -> ([IndexCacheEntry] -> IO a) -> IO a
withIndexEntries (RepoIndex repoCtxt repo@RepoSecure{..}) callback =
repoContextWithSecureRepo repoCtxt repo $ \repoSecure ->
Sec.withIndex repoSecure $ \Sec.IndexCallbacks{..} -> do
let mk :: (Sec.DirectoryEntry, fp, Maybe (Sec.Some Sec.IndexFile))
-> IO [IndexCacheEntry]
mk (_, _fp, Nothing) =
return [] -- skip unrecognized file
mk (_, _fp, Just (Sec.Some (Sec.IndexPkgMetadata _pkgId))) =
return [] -- skip metadata
mk (dirEntry, _fp, Just (Sec.Some (Sec.IndexPkgCabal pkgId))) = do
let blockNo = fromIntegral (Sec.directoryEntryBlockNo dirEntry)
return [CachePackageId pkgId blockNo]
mk (dirEntry, _fp, Just (Sec.Some file@(Sec.IndexPkgPrefs _pkgName))) = do
content <- Sec.indexEntryContent `fmap` indexLookupFileEntry dirEntry file
return $ map CachePreference (parsePreferredVersions content)
entriess <- lazySequence $ map mk (Sec.directoryEntries indexDirectory)
callback $ concat entriess
withIndexEntries index callback = do
withFile (indexFile index) ReadMode $ \h -> do
bs <- maybeDecompress `fmap` BS.hGetContents h
pkgsOrPrefs <- lazySequence $ parsePackageIndex bs
callback $ map toCache (catMaybes pkgsOrPrefs)
where
toCache :: PackageOrDep -> IndexCacheEntry
toCache (Pkg (NormalPackage pkgid _ _ blockNo)) = CachePackageId pkgid blockNo
toCache (Pkg (BuildTreeRef refType _ _ _ blockNo)) = CacheBuildTreeRef refType blockNo
toCache (Dep d) = CachePreference d
data ReadPackageIndexMode = ReadPackageIndexStrict
| ReadPackageIndexLazyIO
readPackageIndexCacheFile :: Package pkg
=> (PackageEntry -> pkg)
-> Index
-> IO (PackageIndex pkg, [Dependency])
readPackageIndexCacheFile mkPkg index = do
cache <- liftM readIndexCache $ BSS.readFile (cacheFile index)
indexHnd <- openFile (indexFile index) ReadMode
packageIndexFromCache mkPkg indexHnd cache ReadPackageIndexLazyIO
packageIndexFromCache :: Package pkg
=> (PackageEntry -> pkg)
-> Handle
-> Cache
-> ReadPackageIndexMode
-> IO (PackageIndex pkg, [Dependency])
packageIndexFromCache mkPkg hnd cache mode = do
(pkgs, prefs) <- packageListFromCache mkPkg hnd cache mode
pkgIndex <- evaluate $ PackageIndex.fromList pkgs
return (pkgIndex, prefs)
-- | Read package list
--
-- The result package releases and preference entries are guaranteed
-- to be unique.
--
-- Note: 01-index.tar is an append-only index and therefore contains
-- all .cabal edits and preference-updates. The masking happens
-- here, i.e. the semantics that later entries in a tar file mask
-- earlier ones is resolved in this function.
packageListFromCache :: (PackageEntry -> pkg)
-> Handle
-> Cache
-> ReadPackageIndexMode
-> IO ([pkg], [Dependency])
packageListFromCache mkPkg hnd Cache{..} mode = accum mempty [] mempty cacheEntries
where
accum !srcpkgs btrs !prefs [] = return (Map.elems srcpkgs ++ btrs, Map.elems prefs)
accum srcpkgs btrs prefs (CachePackageId pkgid blockno : entries) = do
-- Given the cache entry, make a package index entry.
-- The magic here is that we use lazy IO to read the .cabal file
-- from the index tarball if it turns out that we need it.
-- Most of the time we only need the package id.
~(pkg, pkgtxt) <- unsafeInterleaveIO $ do
pkgtxt <- getEntryContent blockno
pkg <- readPackageDescription pkgtxt
return (pkg, pkgtxt)
let srcpkg = case mode of
ReadPackageIndexLazyIO ->
mkPkg (NormalPackage pkgid pkg pkgtxt blockno)
ReadPackageIndexStrict ->
pkg `seq` pkgtxt `seq` mkPkg (NormalPackage pkgid pkg
pkgtxt blockno)
accum (Map.insert pkgid srcpkg srcpkgs) btrs prefs entries
accum srcpkgs btrs prefs (CacheBuildTreeRef refType blockno : entries) = do
-- We have to read the .cabal file eagerly here because we can't cache the
-- package id for build tree references - the user might edit the .cabal
-- file after the reference was added to the index.
path <- liftM byteStringToFilePath . getEntryContent $ blockno
pkg <- do let err = "Error reading package index from cache."
file <- tryFindAddSourcePackageDesc path err
PackageDesc.Parse.readPackageDescription normal file
let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno)
accum srcpkgs (srcpkg:btrs) prefs entries
accum srcpkgs btrs prefs (CachePreference pref@(Dependency pn _) : entries) =
accum srcpkgs btrs (Map.insert pn pref prefs) entries
getEntryContent :: BlockNo -> IO ByteString
getEntryContent blockno = do
entry <- Tar.hReadEntry hnd blockno
case Tar.entryContent entry of
Tar.NormalFile content _size -> return content
Tar.OtherEntryType typecode content _size
| Tar.isBuildTreeRefTypeCode typecode
-> return content
_ -> interror "unexpected tar entry type"
readPackageDescription :: ByteString -> IO GenericPackageDescription
readPackageDescription content =
case parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of
ParseOk _ d -> return d
_ -> interror "failed to parse .cabal file"
interror msg = die $ "internal error when reading package index: " ++ msg
++ "The package index or index cache is probably "
++ "corrupt. Running cabal update might fix it."
------------------------------------------------------------------------
-- Index cache data structure
--
-- | Tar files are block structured with 512 byte blocks. Every header and file
-- content starts on a block boundary.
--
type BlockNo = Tar.TarEntryOffset
data IndexCacheEntry = CachePackageId PackageId BlockNo
| CacheBuildTreeRef BuildTreeRefType BlockNo
| CachePreference Dependency
deriving (Eq)
packageKey, blocknoKey, buildTreeRefKey, preferredVersionKey :: String
packageKey = "pkg:"
blocknoKey = "b#"
buildTreeRefKey = "build-tree-ref:"
preferredVersionKey = "pref-ver:"
readIndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry
readIndexCacheEntry = \line ->
case BSS.words line of
[key, pkgnamestr, pkgverstr, sep, blocknostr]
| key == BSS.pack packageKey && sep == BSS.pack blocknoKey ->
case (parseName pkgnamestr, parseVer pkgverstr [],
parseBlockNo blocknostr) of
(Just pkgname, Just pkgver, Just blockno)
-> Just (CachePackageId (PackageIdentifier pkgname pkgver) blockno)
_ -> Nothing
[key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey ->
case (parseRefType typecodestr, parseBlockNo blocknostr) of
(Just refType, Just blockno)
-> Just (CacheBuildTreeRef refType blockno)
_ -> Nothing
(key: remainder) | key == BSS.pack preferredVersionKey ->
fmap CachePreference (simpleParse (BSS.unpack (BSS.unwords remainder)))
_ -> Nothing
where
parseName str
| BSS.all (\c -> isAlphaNum c || c == '-') str
= Just (PackageName (BSS.unpack str))
| otherwise = Nothing
parseVer str vs =
case BSS.readInt str of
Nothing -> Nothing
Just (v, str') -> case BSS.uncons str' of
Just ('.', str'') -> parseVer str'' (v:vs)
Just _ -> Nothing
Nothing -> Just (Version (reverse (v:vs)) [])
parseBlockNo str =
case BSS.readInt str of
Just (blockno, remainder)
| BSS.null remainder -> Just (fromIntegral blockno)
_ -> Nothing
parseRefType str =
case BSS.uncons str of
Just (typeCode, remainder)
| BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode
-> Just (refTypeFromTypeCode typeCode)
_ -> Nothing
showIndexCacheEntry :: IndexCacheEntry -> String
showIndexCacheEntry entry = unwords $ case entry of
CachePackageId pkgid b -> [ packageKey
, display (packageName pkgid)
, display (packageVersion pkgid)
, blocknoKey
, show b
]
CacheBuildTreeRef t b -> [ buildTreeRefKey
, [typeCodeFromRefType t]
, show b
]
CachePreference dep -> [ preferredVersionKey
, display dep
]
-- | Cabal caches various information about the Hackage index
data Cache = Cache {
cacheEntries :: [IndexCacheEntry]
}
readIndexCache :: BSS.ByteString -> Cache
readIndexCache bs = Cache {
cacheEntries = mapMaybe readIndexCacheEntry $ BSS.lines bs
}
showIndexCache :: Cache -> String
showIndexCache Cache{..} = unlines $ map showIndexCacheEntry cacheEntries
|
sopvop/cabal
|
cabal-install/Distribution/Client/IndexUtils.hs
|
bsd-3-clause
| 27,093 | 0 | 22 | 6,605 | 5,655 | 2,959 | 2,696 | 436 | 11 |
{-# LANGUAGE OverloadedStrings #-}
module ReadXLSX2
where
import Codec.Xlsx
import Control.Lens ((^?))
import Data.Aeson (Value, encode)
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as L
import qualified Data.Map as DM
import Data.Maybe (fromJust)
import Data.Text (Text)
import qualified Data.Text as T
import ReadXLSX.AllSheetsToJSON
import ReadXLSX.Internal (cellToCommentValue, filterCellMap,
getXlsxAndStyleSheet,
isNonEmptyWorksheet)
import ReadXLSX.SheetToDataframe
-- import Data.Text.Lazy.Encoding (encodeUtf8)
-- import qualified Data.Text.Lazy as TL
readFromFile :: FilePath -> (Cell -> Value) -> Text -> Bool -> Maybe Int -> Maybe Int -> IO ByteString
readFromFile file cellToValue sheetname header firstRow lastRow =
do
bs <- L.readFile file
let xlsx = toXlsx bs
let mapSheets = DM.filter isNonEmptyWorksheet (DM.fromList $ _xlSheets xlsx)
if DM.member sheetname mapSheets
then
return $ sheetToDataframe (filterCellMap firstRow lastRow . _wsCells $ fromJust $ xlsx ^? ixSheet sheetname) cellToValue header
else
return $ let sheets = DM.keys mapSheets in
encode $
T.concat [T.pack ("Available sheet" ++ (if length sheets > 1 then "s: " else ": ")),
T.intercalate ", " sheets]
-- return $ let sheets = DM.keys mapSheets in
-- encodeUtf8 $
-- TL.concat [TL.pack ("Available sheet" ++ (if length sheets > 1 then "s: " else ": ")),
-- TL.fromStrict $ T.intercalate ", " sheets]
readFromXlsx :: Xlsx -> (Cell -> Value) -> Text -> Bool -> Maybe Int -> Maybe Int -> ByteString
readFromXlsx xlsx cellToValue sheetname header firstRow lastRow =
if DM.member sheetname mapSheets
then
sheetToDataframe (filterCellMap firstRow lastRow . _wsCells $ fromJust $ xlsx ^? ixSheet sheetname) cellToValue header
else
encode $
T.concat [T.pack ("Available sheet" ++ (if length sheets > 1 then "s: " else ": ")),
T.intercalate ", " sheets]
where mapSheets = DM.filter isNonEmptyWorksheet (DM.fromList $ _xlSheets xlsx)
sheets = DM.keys mapSheets
read1 :: FilePath -> Text -> Bool -> Maybe Int -> Maybe Int -> IO ByteString
read1 file sheetname header firstRow lastRow = do
(xlsx, stylesheet) <- getXlsxAndStyleSheet file
return $ readFromXlsx xlsx (cellFormatter stylesheet) sheetname header firstRow lastRow
readComments :: FilePath -> Text -> Bool -> Maybe Int -> Maybe Int -> IO ByteString
readComments file = readFromFile file cellToCommentValue
readTypes :: FilePath -> Text -> Bool -> Maybe Int -> Maybe Int -> IO ByteString
readTypes file sheetname header firstRow lastRow = do
(xlsx, stylesheet) <- getXlsxAndStyleSheet file
return $ readFromXlsx xlsx (cellType stylesheet) sheetname header firstRow lastRow
readDataAndComments :: FilePath -> Text -> Bool -> Maybe Int -> Maybe Int -> IO ByteString
readDataAndComments file sheetname header firstRow lastRow = do
(xlsx, stylesheet) <- getXlsxAndStyleSheet file
let mapSheets = DM.filter isNonEmptyWorksheet (DM.fromList $ _xlSheets xlsx)
let sheets = DM.keys mapSheets
if DM.member sheetname mapSheets
then
return $ sheetToTwoDataframes
(filterCellMap firstRow lastRow . _wsCells $ fromJust $ xlsx ^? ixSheet sheetname)
"data" (cellFormatter stylesheet)
"comments" cellToCommentValue header
True
else
return $ encode $
T.concat [T.pack ("Available sheet" ++ (if length sheets > 1 then "s: " else ": ")),
T.intercalate ", " sheets]
-- TODO: check that cleanCellMap is handled by allSheetsToDataframe
readAll :: FilePath -> Bool -> IO ByteString
readAll file header =
do
(xlsx, stylesheet) <- getXlsxAndStyleSheet file
return $ allSheetsToDataframe xlsx (cellFormatter stylesheet) header
readAllWithComments :: FilePath -> Bool -> IO ByteString
readAllWithComments file header =
do
(xlsx, stylesheet) <- getXlsxAndStyleSheet file
return $ allSheetsToTwoDataframes xlsx "data "(cellFormatter stylesheet) "comments" cellToCommentValue header True
|
stla/jsonxlsx
|
src/ReadXLSX2.hs
|
bsd-3-clause
| 4,560 | 0 | 20 | 1,271 | 1,164 | 590 | 574 | 71 | 3 |
{-# LANGUAGE RecordWildCards, OverloadedStrings #-}
module Network.Wai.Application.Monitoring
( monitorGC
, monitorGCResponse
, encodeGCStats
) where
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Resource (ResourceT)
import qualified Data.HashMap.Strict as M
import qualified GHC.Stats as Stats
import Network.HTTP.Types (status200)
import Network.Wai (Application, Response(ResponseBuilder))
import Blaze.ByteString.Builder (fromLazyByteString)
import Data.Aeson (toJSON, Value(Object))
import Data.Aeson.Encode (encode)
monitorGCResponse :: ResourceT IO Response
monitorGCResponse = do
stats <- liftIO Stats.getGCStats
return $ ResponseBuilder
status200
[("Content-Type", "application/json")]
$ fromLazyByteString $ encode $ encodeGCStats stats
monitorGC :: Application
monitorGC = const monitorGCResponse
encodeGCStats :: Stats.GCStats
-> Value
encodeGCStats (Stats.GCStats {..}) =
Object $ M.fromList [("counters", counters), ("gauges", gauges)]
where
counters = Object $ M.fromList
[ ("bytes_allocated" , toJSON bytesAllocated)
, ("num_gcs" , toJSON numGcs)
, ("num_bytes_usage_samples" , toJSON numByteUsageSamples)
, ("cumulative_bytes_used" , toJSON cumulativeBytesUsed)
, ("bytes_copied" , toJSON bytesCopied)
, ("mutator_cpu_seconds" , toJSON mutatorCpuSeconds)
, ("mutator_wall_seconds" , toJSON mutatorWallSeconds)
, ("gc_cpu_seconds" , toJSON gcCpuSeconds)
, ("gc_wall_seconds" , toJSON gcWallSeconds)
, ("cpu_seconds" , toJSON cpuSeconds)
, ("wall_seconds" , toJSON wallSeconds)
]
gauges = Object $ M.fromList
[ ("max_bytes_used" , toJSON maxBytesUsed)
, ("current_bytes_used" , toJSON currentBytesUsed)
, ("current_bytes_slop" , toJSON currentBytesSlop)
, ("max_bytes_slop" , toJSON maxBytesSlop)
, ("peak_megabytes_allocated" , toJSON $ peakMegabytesAllocated*1024*1024)
, ("par_avg_bytes_copied" , toJSON parAvgBytesCopied)
, ("par_max_bytes_copied" , toJSON parMaxBytesCopied)
]
|
yihuang/gc-monitoring-wai
|
Network/Wai/Application/Monitoring.hs
|
bsd-3-clause
| 2,309 | 0 | 13 | 592 | 516 | 302 | 214 | 47 | 1 |
{-# LANGUAGE DeriveGeneric #-}
-- | Basic operations on 2D points represented as linear offsets.
module Game.LambdaHack.Common.Point
( Point(..), PointI
, chessDist, euclidDistSq, adjacent, bresenhamsLineAlgorithm, fromTo
, originPoint, insideP
, speedupHackXSize
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, bresenhamsLineAlgorithmBegin, balancedWord
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import Data.Binary
import Data.Int (Int32)
import qualified Data.Primitive.PrimArray as PA
import GHC.Generics (Generic)
import Test.QuickCheck
import Game.LambdaHack.Definition.Defs
-- | This is a hack to pass the X size of the dungeon, defined
-- in game content, to the @Enum@ instances of @Point@ and @Vector@.
-- This is already slower and has higher allocation than
-- hardcoding the value, so passing the value explicitly to
-- a generalization of the @Enum@ conversions is out of the question.
-- Perhaps this can be done cleanly and efficiently at link-time
-- via Backpack, but it's probably not supported yet by GHCJS (not verified).
-- For now, we need to be careful never to modify this array,
-- except for setting it at program start before it's used for the first time.
-- Which is easy, because @Point@ is never mentioned in content definitions.
-- The @PrimArray@ has much smaller overhead than @IORef@
-- and reading from it looks cleaner, hence its use.
speedupHackXSize :: PA.PrimArray X
{-# NOINLINE speedupHackXSize #-}
speedupHackXSize = PA.primArrayFromList [80] -- updated at program startup
-- | 2D points in cartesian representation. Coordinates grow to the right
-- and down, so that the (0, 0) point is in the top-left corner
-- of the screen. Coordinates are never negative
-- (unlike for 'Game.LambdaHack.Common.Vector.Vector')
-- and the @X@ coordinate never reaches the screen width as read
-- from 'speedupHackXSize'.
data Point = Point
{ px :: X
, py :: Y
}
deriving (Eq, Ord, Generic)
instance Show Point where
show (Point x y) = show (x, y)
instance Binary Point where
put = put . (toIntegralCrash :: Int -> Int32) . fromEnum
get = fmap (toEnum . (fromIntegralWrap :: Int32 -> Int)) get
-- `fromIntegralWrap` is fine here, because we converted the integer
-- in the opposite direction first, so it fits even in 31 bit `Int`
-- Note that @Ord@ on @Int@ is not monotonic wrt @Ord@ on @Point@.
-- We need to keep it that way, because we want close xs to have close indexes,
-- e.g., adjacent points in line to have adjacent enumerations,
-- because some of the screen layout and most of processing is line-by-line.
-- Consequently, one can use EM.fromDistinctAscList
-- on @(1, 8)..(10, 8)@, but not on @(1, 7)..(10, 9)@.
instance Enum Point where
fromEnum Point{..} =
let !xsize = PA.indexPrimArray speedupHackXSize 0
in
#ifdef WITH_EXPENSIVE_ASSERTIONS
assert (px >= 0 && py >= 0 && px < xsize
`blame` "invalid point coordinates"
`swith` (px, py))
#endif
(px + py * xsize)
toEnum n = let !xsize = PA.indexPrimArray speedupHackXSize 0
(py, px) = n `quotRem` xsize
in Point{..}
instance Arbitrary Point where
arbitrary = do
let xsize = PA.indexPrimArray speedupHackXSize 0
n <- getSize
Point <$> choose (0, min n (xsize - 1))
<*> choose (0, n)
-- | Enumeration representation of @Point@.
type PointI = Int
-- This is hidden from Haddock, but run by doctest:
-- $
-- prop> (toEnum :: PointI -> Point) (fromEnum p) == p
-- prop> \ (NonNegative i) -> (fromEnum :: Point -> PointI) (toEnum i) == i
-- | The distance between two points in the chessboard metric.
--
-- >>> chessDist (Point 0 0) (Point 0 0)
-- 0
-- >>> chessDist (Point (-1) 0) (Point 0 0)
-- 1
-- >>> chessDist (Point (-1) 0) (Point (-1) 1)
-- 1
-- >>> chessDist (Point (-1) 0) (Point 0 1)
-- 1
-- >>> chessDist (Point (-1) 0) (Point 1 1)
-- 2
--
-- prop> chessDist p1 p2 >= 0
-- prop> chessDist p1 p2 ^ (2 :: Int) <= euclidDistSq p1 p2
chessDist :: Point -> Point -> Int
chessDist (Point x0 y0) (Point x1 y1) = max (abs (x1 - x0)) (abs (y1 - y0))
-- | Squared euclidean distance between two points.
euclidDistSq :: Point -> Point -> Int
euclidDistSq (Point x0 y0) (Point x1 y1) =
(x1 - x0) ^ (2 :: Int) + (y1 - y0) ^ (2 :: Int)
-- | Checks whether two points are adjacent on the map
-- (horizontally, vertically or diagonally).
adjacent :: Point -> Point -> Bool
{-# INLINE adjacent #-}
adjacent s t = chessDist s t == 1
-- | Bresenham's line algorithm generalized to arbitrary starting @eps@
-- (@eps@ value of 0 gives the standard BLA).
-- Skips the source point and goes through the second point to infinity.
-- Gives @Nothing@ if the points are equal. The target is given as @Point@,
-- not @PointI@, to permit aiming out of the level, e.g., to get
-- uniform distributions of directions for explosions close to the edge
-- of the level.
--
-- >>> bresenhamsLineAlgorithm 0 (Point 0 0) (Point 0 0)
-- Nothing
-- >>> take 3 $ fromJust $ bresenhamsLineAlgorithm 0 (Point 0 0) (Point 1 0)
-- [(1,0),(2,0),(3,0)]
-- >>> take 3 $ fromJust $ bresenhamsLineAlgorithm 0 (Point 0 0) (Point 0 1)
-- [(0,1),(0,2),(0,3)]
-- >>> take 3 $ fromJust $ bresenhamsLineAlgorithm 0 (Point 0 0) (Point 1 1)
-- [(1,1),(2,2),(3,3)]
bresenhamsLineAlgorithm :: Int -> Point -> Point -> Maybe [Point]
bresenhamsLineAlgorithm eps source target =
if source == target then Nothing
else Just $ tail $ bresenhamsLineAlgorithmBegin eps source target
-- | Bresenham's line algorithm generalized to arbitrary starting @eps@
-- (@eps@ value of 0 gives the standard BLA). Includes the source point
-- and goes through the target point to infinity.
--
-- >>> take 4 $ bresenhamsLineAlgorithmBegin 0 (Point 0 0) (Point 2 0)
-- [(0,0),(1,0),(2,0),(3,0)]
bresenhamsLineAlgorithmBegin :: Int -> Point -> Point -> [Point]
bresenhamsLineAlgorithmBegin eps (Point x0 y0) (Point x1 y1) =
let (dx, dy) = (x1 - x0, y1 - y0)
xyStep b (x, y) = (x + signum dx, y + signum dy * b)
yxStep b (x, y) = (x + signum dx * b, y + signum dy)
(p, q, step) | abs dx > abs dy = (abs dy, abs dx, xyStep)
| otherwise = (abs dx, abs dy, yxStep)
bw = balancedWord p q (eps `mod` max 1 q)
walk w xy = xy : walk (tail w) (step (head w) xy)
in map (uncurry Point) $ walk bw (x0, y0)
-- | See <http://roguebasin.roguelikedevelopment.org/index.php/index.php?title=Digital_lines>.
balancedWord :: Int -> Int -> Int -> [Int]
balancedWord p q eps | eps + p < q = 0 : balancedWord p q (eps + p)
balancedWord p q eps = 1 : balancedWord p q (eps + p - q)
-- | A list of all points on a straight vertical or straight horizontal line
-- between two points. Fails if no such line exists.
--
-- >>> fromTo (Point 0 0) (Point 2 0)
-- [(0,0),(1,0),(2,0)]
fromTo :: Point -> Point -> [Point]
fromTo (Point x0 y0) (Point x1 y1) =
let fromTo1 :: Int -> Int -> [Int]
fromTo1 z0 z1
| z0 <= z1 = [z0..z1]
| otherwise = [z0,z0-1..z1]
result
| x0 == x1 = map (Point x0) (fromTo1 y0 y1)
| y0 == y1 = map (`Point` y0) (fromTo1 x0 x1)
| otherwise = error $ "diagonal fromTo"
`showFailure` ((x0, y0), (x1, y1))
in result
originPoint :: Point
originPoint = Point 0 0
-- | Checks that a point belongs to an area.
insideP :: (X, Y, X, Y) -> Point -> Bool
{-# INLINE insideP #-}
insideP (x0, y0, x1, y1) (Point x y) = x1 >= x && x >= x0 && y1 >= y && y >= y0
|
LambdaHack/LambdaHack
|
engine-src/Game/LambdaHack/Common/Point.hs
|
bsd-3-clause
| 7,557 | 0 | 16 | 1,659 | 1,550 | 867 | 683 | -1 | -1 |
-- p_adicanalysis.hs
module Math.P_adicAnalysis where
import Math.MathsPrimitives (FunctionRep (..), partialProducts, ($+), ($.) )
import Math.QQ
import Math.UPoly
import Math.CombinatoricsCounting
import Math.NonArchimedean
import Math.PowerSeries
import Math.UmbralCalculus
-- Sources:
-- Robert, A Course in p-adic Analysis
-- MAHLER SERIES
-- Robert, A Course in p-adic Analysis, p161
binomialPoly n = (1 / fromInteger (factorial n)) */ fallingFactorial x n
-- then for x <- NN, evalUP (binomialPoly n) x == x `choose` n
-- the binomial polynomials form a basis for the space of integer-valued functions on NN
dGrad f n = f (n+1) - f n
-- discrete gradient operator, returns f(x+1) - f(x)
dGradUP f = composeUP f (x+1) - f -- translateUP 1 f - f
-- (also called the finite difference operator)
dGradUP' f = applyCompOp (expPS - 1) f
-- Robert p162
-- express the polynomial as a sum of binomialPolys
-- use the discrete gradient operator to find the coefficients
mahlerSeriesUP' 0 = []
mahlerSeriesUP' f = evalUP f 0 : mahlerSeriesUP (dGradUP f)
-- Robert p214 - the Mahler series of x^n is [S2(n,k)*k! | k <- [0..] ], where S2(n,k) are the Stirling numbers of the second kind
mahlerSeriesUP (UP as) = foldl ($+) [] summands
where
summands = zipWith (\a cs -> map (a*) cs) as (map (map fromInteger) powers)
powers = zipWith (zipWith (*)) stirlingSecondTriangle (repeat (partialProducts (1:[1..])))
-- (faster)
polyFromMahlerSeries ms = sum (zipWith (*/) ms (map binomialPoly [0..]))
-- indefinite sum operator, Robert p167
iSum f 0 = 0
iSum f n = sum [f i | i <- [0..n-1] ]
iSumUP'' f = polyFromMahlerSeries (0 : (mahlerSeriesUP f))
-- This works because iSum of the kth binomial poly is the (k+1)th binomial poly
-- ie iSum (evalUP (binomialPoly k) . fromInteger) n == (evalUP (binomialPoly (k+1)) . fromInteger) n
-- So for example we can check that iSum (evalUP (x^2) . fromInteger) 5 == evalUP (iSumUP (x^2)) 5 == 1^2 + 2^2 + 3^2 + 4^2 == 30
iSumUP' (UP as) = sum (zipWith (*/) as bernoulliSums)
iSumUP f = inverseDeltaOp (expPS - 1) f
-- iSum is inverse of dGrad
-- dGrad (iSum f) n == iSum f (n+1) - iSum f n = f n
bernoulliSums = zipWith (\m p -> (1 /(fromInteger m+1)) */ p) [0..] (tail (zipWith (\bp bn -> bp - UP [bn]) bernoulliPolys bernoulliNumbers))
-- Ireland, Rosen, p231
-- S_m(n) = sum [i^m | i <- [0..n-1] ] = 1/(m+1) * (bp_m+1(n) - bn_m+1)
-- VOLKENBORN INTEGRAL
volkenbornIntegralPoly1 p f = untilConvergence (map (volkenbornSumPoly p f) [1..])
volkenbornSumPoly p f n =
let Qp _ d as = evalUP (uptoQp p (iSumUP f)) (toQp' p (p^n))
in Qp p d (drop n as)
-- !! this doesn't work for n>16 because the normalQp call fails. Not quite sure why
volkenbornSumPoly' p f n =
let Just s = normalQp (Qp p (-n) (1 : repeat 0) * (evalUP (uptoQp p (iSumUP f)) (toQp' p (p^n))))
in s
-- Robert p265
-- Calculate the Volkenborn integral of a polynomial via its Mahler series (as a sum of binomial polys)
volkenbornIntegralPoly2 :: UPoly QQ -> QQ
volkenbornIntegralPoly2 f = volkenbornIntegralMahlerSeries (mahlerSeriesUP f)
-- let cs = toBinomialBasis f
-- in alternatingSum (zipWith (/) cs (map fromInteger [1..])) -- sum (-1)^k c_k / (k+1)
volkenbornIntegralMahlerSeries ms = alternatingSum (zipWith (/) ms (map fromInteger [1..])) -- sum (-1)^k m_k / (k+1)
alternatingSum xs = foldr (-) 0 xs
-- Robert p270
-- We can work out the Volkenborn integral of polynomials directly, as the Volkenborn integral of a power x^n is the nth Bernoulli number
volkenbornIntegralPoly :: UPoly QQ -> QQ
volkenbornIntegralPoly (UP as) = as $. bernoulliNumbers
|
nfjinjing/bench-euler
|
src/Math/P_adicAnalysis.hs
|
bsd-3-clause
| 3,691 | 4 | 17 | 749 | 906 | 483 | 423 | 37 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module: Aws.Sns.Commands.GetSubscriptionAttributes
-- Copyright: Copyright © 2014 AlephCloud Systems, Inc.
-- License: MIT
-- Maintainer: Lars Kuhtz <[email protected]>
-- Stability: experimental
--
-- /API Version: 2013-03-31/
--
-- Returns all of the properties of a subscription.
--
-- <http://docs.aws.amazon.com/sns/2010-03-31/api/API_GetSubscriptionAttributes.html>
--
module Aws.Sns.Commands.GetSubscriptionAttributes
( GetSubscriptionAttributes(..)
, GetSubscriptionAttributesResponse(..)
, GetSubscriptionAttributesErrors(..)
) where
import Aws.Core
import Aws.General
import Aws.Sns.Internal
import Aws.Sns.Core
import Aws.Sns.Types
import Control.Applicative
import Control.Exception
import Control.Monad.Trans.Resource (throwM)
import Data.Aeson
import qualified Data.Text.Encoding as T
import qualified Data.Traversable as TR
import Data.Typeable
import Text.XML.Cursor (($//), (&/))
import qualified Text.XML.Cursor as CU
-- -------------------------------------------------------------------------- --
-- GetSubscriptionAttributes
getSubscriptionAttributesAction :: SnsAction
getSubscriptionAttributesAction = SnsActionGetSubscriptionAttributes
data GetSubscriptionAttributes = GetSubscriptionAttributes
{ getSubscriptionAttributesSubscriptionArn :: !Arn
-- ^ The ARN of the subscription whose properties you want to get.
}
deriving (Show, Eq, Typeable)
data GetSubscriptionAttributesResponse = GetSubscriptionAttributesResponse
{ getSubscriptionAttributesResAttributes :: !SubscriptionAttributes
}
deriving (Show, Eq, Typeable)
instance ResponseConsumer r GetSubscriptionAttributesResponse where
type ResponseMetadata GetSubscriptionAttributesResponse = SnsMetadata
responseConsumer _ = snsXmlResponseConsumer p
where
p :: CU.Cursor -> Response (ResponseMetadata GetSubscriptionAttributesResponse) GetSubscriptionAttributesResponse
p el = either throwM return $ do
entries <- fmap parseXmlEntryMap $ force "Missing Attributes" $ el
$// CU.laxElement "GetSubscriptionAttributesResult"
&/ CU.laxElement "Attributes"
fmapL (toException . XmlException) . fmap GetSubscriptionAttributesResponse $ SubscriptionAttributes
<$> TR.mapM fromText (lookup "SubscriptionArn" entries)
<*> TR.mapM fromText (lookup "TopicArn" entries)
<*> pure (fmap AccountId (lookup "Owner" entries))
<*> pure (maybe False (== "true") (lookup "ConfirmationWasAuthenticated" entries))
<*> TR.mapM eitherDecodeText (lookup "DeliveryPolicy" entries)
<*> TR.mapM eitherDecodeText (lookup "EffectiveDeliveryPolicy" entries)
eitherDecodeText = eitherDecodeStrict . T.encodeUtf8
instance SignQuery GetSubscriptionAttributes where
type ServiceConfiguration GetSubscriptionAttributes = SnsConfiguration
signQuery GetSubscriptionAttributes{..} = snsSignQuery SnsQuery
{ snsQueryMethod = Get
, snsQueryAction = getSubscriptionAttributesAction
, snsQueryParameters = [("SubscriptionArn", Just $ toText getSubscriptionAttributesSubscriptionArn)]
, snsQueryBody = Nothing
}
instance Transaction GetSubscriptionAttributes GetSubscriptionAttributesResponse
instance AsMemoryResponse GetSubscriptionAttributesResponse where
type MemoryResponse GetSubscriptionAttributesResponse = GetSubscriptionAttributesResponse
loadToMemory = return
-- -------------------------------------------------------------------------- --
-- Errors
--
-- Currently not used for requests. It's included for future usage
-- and as reference.
data GetSubscriptionAttributesErrors
= GetSubscriptionAttributesAuthorizationError
-- ^ Indicates that the user has been denied access to the requested resource.
--
-- /Code 403/
| GetSubscriptionAttributesInternalError
-- ^ Indicates an internal service error.
--
-- /Code 500/
| GetSubscriptionAttributesInvalidParameter
-- ^ Indicates that a request parameter does not comply with the associated constraints.
--
-- /Code 400/
| GetSubscriptionAttributesNotFound
-- ^ Indicates that the requested resource does not exist.
--
-- /Code 404/
deriving (Show, Read, Eq, Ord, Enum, Bounded, Typeable)
|
alephcloud/hs-aws-sns
|
src/Aws/Sns/Commands/GetSubscriptionAttributes.hs
|
mit
| 4,565 | 0 | 21 | 782 | 673 | 388 | 285 | 69 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE Rank2Types #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Database.Persist.Sql.Orphan.PersistStore
( withRawQuery
, BackendKey(..)
, toSqlKey
, fromSqlKey
, getFieldName
, getTableName
, tableDBName
, fieldDBName
) where
import Database.Persist
import Database.Persist.Sql.Types
import Database.Persist.Sql.Raw
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Text as T
import Data.Text (Text, unpack)
import Data.Monoid (mappend, (<>))
import Control.Monad.IO.Class
import Data.ByteString.Char8 (readInteger)
import Data.Maybe (isJust)
import Data.List (find)
import Control.Monad.Trans.Reader (ReaderT, ask)
import Data.Acquire (with)
import Data.Int (Int64)
import Web.PathPieces (PathPiece)
import Database.Persist.Sql.Class (PersistFieldSql)
import qualified Data.Aeson as A
import Control.Exception.Lifted (throwIO)
withRawQuery :: MonadIO m
=> Text
-> [PersistValue]
-> C.Sink [PersistValue] IO a
-> ReaderT SqlBackend m a
withRawQuery sql vals sink = do
srcRes <- rawQueryRes sql vals
liftIO $ with srcRes (C.$$ sink)
toSqlKey :: ToBackendKey SqlBackend record => Int64 -> Key record
toSqlKey = fromBackendKey . SqlBackendKey
fromSqlKey :: ToBackendKey SqlBackend record => Key record -> Int64
fromSqlKey = unSqlBackendKey . toBackendKey
whereStmtForKey :: PersistEntity record => SqlBackend -> Key record -> Text
whereStmtForKey conn k =
case entityPrimary t of
Just pdef -> T.intercalate " AND " $ map (\fld -> connEscapeName conn (fieldDB fld) <> "=? ") $ compositeFields pdef
Nothing -> connEscapeName conn (fieldDB (entityId t)) <> "=?"
where t = entityDef $ dummyFromKey k
-- | get the SQL string for the table that a PeristEntity represents
-- Useful for raw SQL queries
--
-- Your backend may provide a more convenient tableName function
-- which does not operate in a Monad
getTableName :: forall record m.
( PersistEntity record
, PersistEntityBackend record ~ SqlBackend
, Monad m
) => record -> ReaderT SqlBackend m Text
getTableName rec = do
conn <- ask
return $ connEscapeName conn $ tableDBName rec
-- | useful for a backend to implement tableName by adding escaping
tableDBName :: forall record.
( PersistEntity record
, PersistEntityBackend record ~ SqlBackend
) => record -> DBName
tableDBName rec = entityDB $ entityDef (Just rec)
-- | get the SQL string for the field that an EntityField represents
-- Useful for raw SQL queries
--
-- Your backend may provide a more convenient fieldName function
-- which does not operate in a Monad
getFieldName :: forall record typ m.
( PersistEntity record
, PersistEntityBackend record ~ SqlBackend
, Monad m
)
=> EntityField record typ -> ReaderT SqlBackend m Text
getFieldName rec = do
conn <- ask
return $ connEscapeName conn $ fieldDBName rec
-- | useful for a backend to implement fieldName by adding escaping
fieldDBName :: forall record typ. (PersistEntity record) => EntityField record typ -> DBName
fieldDBName = fieldDB . persistFieldDef
instance PersistStore SqlBackend where
newtype BackendKey SqlBackend = SqlBackendKey { unSqlBackendKey :: Int64 }
deriving (Show, Read, Eq, Ord, Num, Integral, PersistField, PersistFieldSql, PathPiece, Real, Enum, Bounded, A.ToJSON, A.FromJSON)
update _ [] = return ()
update k upds = do
conn <- ask
let go'' n Assign = n <> "=?"
go'' n Add = T.concat [n, "=", n, "+?"]
go'' n Subtract = T.concat [n, "=", n, "-?"]
go'' n Multiply = T.concat [n, "=", n, "*?"]
go'' n Divide = T.concat [n, "=", n, "/?"]
go'' _ (BackendSpecificUpdate up) = error $ T.unpack $ "BackendSpecificUpdate" `mappend` up `mappend` "not supported"
let go' (x, pu) = go'' (connEscapeName conn x) pu
let wher = whereStmtForKey conn k
let sql = T.concat
[ "UPDATE "
, connEscapeName conn $ tableDBName $ recordTypeFromKey k
, " SET "
, T.intercalate "," $ map (go' . go) upds
, " WHERE "
, wher
]
rawExecute sql $
map updatePersistValue upds `mappend` keyToValues k
where
go x = (fieldDB $ updateFieldDef x, updateUpdate x)
insert val = do
conn <- ask
let esql = connInsertSql conn t vals
key <-
case esql of
ISRSingle sql -> withRawQuery sql vals $ do
x <- CL.head
case x of
Just [PersistInt64 i] -> case keyFromValues [PersistInt64 i] of
Left err -> error $ "SQL insert: keyFromValues: PersistInt64 " `mappend` show i `mappend` " " `mappend` unpack err
Right k -> return k
Nothing -> error $ "SQL insert did not return a result giving the generated ID"
Just vals' -> case keyFromValues vals' of
Left _ -> error $ "Invalid result from a SQL insert, got: " ++ show vals'
Right k -> return k
ISRInsertGet sql1 sql2 -> do
rawExecute sql1 vals
withRawQuery sql2 [] $ do
mm <- CL.head
let m = maybe
(Left $ "No results from ISRInsertGet: " `mappend` tshow (sql1, sql2))
Right mm
-- TODO: figure out something better for MySQL
let convert x =
case x of
[PersistByteString i] -> case readInteger i of -- mssql
Just (ret,"") -> [PersistInt64 $ fromIntegral ret]
_ -> x
_ -> x
-- Yes, it's just <|>. Older bases don't have the
-- instance for Either.
onLeft Left{} x = x
onLeft x _ = x
case m >>= (\x -> keyFromValues x `onLeft` keyFromValues (convert x)) of
Right k -> return k
Left err -> throw $ "ISRInsertGet: keyFromValues failed: " `mappend` err
ISRManyKeys sql fs -> do
rawExecute sql vals
case entityPrimary t of
Nothing -> error $ "ISRManyKeys is used when Primary is defined " ++ show sql
Just pdef ->
let pks = map fieldHaskell $ compositeFields pdef
keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ entityFields t) fs
in case keyFromValues keyvals of
Right k -> return k
Left e -> error $ "ISRManyKeys: unexpected keyvals result: " `mappend` unpack e
return key
where
tshow :: Show a => a -> Text
tshow = T.pack . show
throw = liftIO . throwIO . userError . T.unpack
t = entityDef $ Just val
vals = map toPersistValue $ toPersistFields val
insertMany_ [] = return ()
insertMany_ vals = do
conn <- ask
let sql = T.concat
[ "INSERT INTO "
, connEscapeName conn (entityDB t)
, "("
, T.intercalate "," $ map (connEscapeName conn . fieldDB) $ entityFields t
, ") VALUES ("
, T.intercalate "),(" $ replicate (length valss) $ T.intercalate "," $ map (const "?") (entityFields t)
, ")"
]
-- SQLite support is only in later versions
if connRDBMS conn == "sqlite"
then mapM_ insert vals
else rawExecute sql (concat valss)
where
t = entityDef vals
valss = map (map toPersistValue . toPersistFields) vals
replace k val = do
conn <- ask
let t = entityDef $ Just val
let wher = whereStmtForKey conn k
let sql = T.concat
[ "UPDATE "
, connEscapeName conn (entityDB t)
, " SET "
, T.intercalate "," (map (go conn . fieldDB) $ entityFields t)
, " WHERE "
, wher
]
vals = map toPersistValue (toPersistFields val) `mappend` keyToValues k
rawExecute sql vals
where
go conn x = connEscapeName conn x `T.append` "=?"
insertKey = insrepHelper "INSERT"
repsert key value = do
mExisting <- get key
case mExisting of
Nothing -> insertKey key value
Just _ -> replace key value
get k = do
conn <- ask
let t = entityDef $ dummyFromKey k
let cols = T.intercalate ","
$ map (connEscapeName conn . fieldDB) $ entityFields t
noColumns :: Bool
noColumns = null $ entityFields t
let wher = whereStmtForKey conn k
let sql = T.concat
[ "SELECT "
, if noColumns then "*" else cols
, " FROM "
, connEscapeName conn $ entityDB t
, " WHERE "
, wher
]
withRawQuery sql (keyToValues k) $ do
res <- CL.head
case res of
Nothing -> return Nothing
Just vals ->
case fromPersistValues $ if noColumns then [] else vals of
Left e -> error $ "get " ++ show k ++ ": " ++ unpack e
Right v -> return $ Just v
delete k = do
conn <- ask
rawExecute (sql conn) (keyToValues k)
where
wher conn = whereStmtForKey conn k
sql conn = T.concat
[ "DELETE FROM "
, connEscapeName conn $ tableDBName $ recordTypeFromKey k
, " WHERE "
, wher conn
]
dummyFromKey :: Key record -> Maybe record
dummyFromKey = Just . recordTypeFromKey
recordTypeFromKey :: Key record -> record
recordTypeFromKey _ = error "dummyFromKey"
insrepHelper :: (MonadIO m, PersistEntity val)
=> Text
-> Key val
-> val
-> ReaderT SqlBackend m ()
insrepHelper command k val = do
conn <- ask
rawExecute (sql conn) vals
where
t = entityDef $ Just val
sql conn = T.concat
[ command
, " INTO "
, connEscapeName conn (entityDB t)
, "("
, T.intercalate ","
$ map (connEscapeName conn)
$ fieldDB (entityId t) : map fieldDB (entityFields t)
, ") VALUES("
, T.intercalate "," ("?" : map (const "?") (entityFields t))
, ")"
]
vals = keyToValues k ++ map toPersistValue (toPersistFields val)
updateFieldDef :: PersistEntity v => Update v -> FieldDef
updateFieldDef (Update f _ _) = persistFieldDef f
updateFieldDef (BackendUpdate {}) = error "updateFieldDef did not expect BackendUpdate"
updatePersistValue :: Update v -> PersistValue
updatePersistValue (Update _ v _) = toPersistValue v
updatePersistValue (BackendUpdate {}) = error "updatePersistValue did not expect BackendUpdate"
|
jcristovao/persistent
|
persistent/Database/Persist/Sql/Orphan/PersistStore.hs
|
mit
| 11,931 | 75 | 26 | 4,360 | 3,135 | 1,602 | 1,533 | 249 | 2 |
module Semantics.Value
( Value(..), Eval
, Level(..), level
, ICon(..), ID, Sort(..)
, POrd(..), DOrd(..), lessOrEqual
) where
import Syntax.Term
data Value t
= Lam
| Pi Sort Sort
| DCon ID Int Int (Eval t)
| PCon
| ICon ICon
| CCon
| FunCall ID (Eval t)
| Universe Sort
| DataType ID Int
| Interval
| Path Sort
| At
| Coe
| Iso
| Squeeze
| Case [Term Int String]
| Conds Int (Eval t)
| FieldAcc Int Int Int (Eval t)
data ICon = ILeft | IRight deriving (Eq, Show)
type Eval t = [([Term Int String], t)]
type ID = Int
data Level = Level Int | NoLevel
data Sort = TypeK Level | Set Level | Prop | Contr deriving Eq
instance Eq (Value t) where
Lam == Lam = True
Pi{} == Pi{} = True
DCon dt i _ _ == DCon dt' i' _ _ = dt == dt' && i == i'
PCon == PCon = True
ICon c == ICon c' = c == c'
CCon == CCon = True
FunCall n _ == FunCall n' _ = n == n'
Universe k == Universe k' = k == k'
DataType n _ == DataType n' _ = n == n'
Interval == Interval = True
Path{} == Path{} = True
At == At = True
Coe == Coe = True
Iso == Iso = True
Squeeze == Squeeze = True
Case pats == Case pats' = and (zipWith cmpPats pats pats')
where
cmpPats :: Term Int u -> Term Int u' -> Bool
cmpPats Var{} Var{} = True
cmpPats (Apply c pats) (Apply c' pats') = c == c' && and (zipWith cmpPats pats pats')
cmpPats _ _ = False
Conds{} == Conds{} = True
FieldAcc i _ _ _ == FieldAcc i' _ _ _ = i == i'
_ == _ = False
instance Show (Value t) where
show Lam = "Lam"
show (Pi s1 s2) = "Pi " ++ show s1 ++ " " ++ show s2
show (DCon n k m _) = "DCon " ++ show n ++ " " ++ show k ++ " " ++ show m
show PCon = "PCon"
show (ICon c) = "ICon " ++ show c
show CCon = "CCon"
show (FunCall n _) = "FunCall " ++ show n
show (Universe s) = "Universe " ++ show s
show (DataType n k) = "DataType " ++ show n ++ " " ++ show k
show Interval = "Interval"
show (Path s) = "Path " ++ show s
show At = "At"
show Coe = "Coe"
show Iso = "Iso"
show Squeeze = "Squeeze"
show (Case ts) = "Case " ++ show ts
show (Conds n _) = "Conds " ++ show n
show (FieldAcc n k m _) = "FieldAcc " ++ show n ++ " " ++ show k ++ " " ++ show m
instance Eq Level where
l1 == l2 = level l1 == level l2
instance Ord Level where
compare l1 l2 = compare (level l1) (level l2)
instance Show Level where
show NoLevel = ""
show (Level l) = show l
instance Read Level where
readsPrec _ s = case reads s of
[] -> [(NoLevel, s)]
is -> map (\(i,r) -> (Level i, r)) is
instance Enum Level where
toEnum 0 = NoLevel
toEnum n = Level n
fromEnum = level
level :: Level -> Int
level (Level l) = l
level NoLevel = 0
class POrd a where
pcompare :: a -> a -> Maybe Ordering
class POrd a => DOrd a where
dmax :: a -> a -> a
dmaximum :: [a] -> a
dmaximum [] = error "dmaximum: empty list"
dmaximum xs = foldl1 dmax xs
lessOrEqual :: POrd a => a -> a -> Bool
lessOrEqual t t' = case pcompare t t' of
Just r | r == EQ || r == LT -> True
_ -> False
instance POrd Sort where
pcompare Contr Contr = Just EQ
pcompare Contr _ = Just LT
pcompare _ Contr = Just GT
pcompare Prop Prop = Just EQ
pcompare Prop _ = Just LT
pcompare _ Prop = Just GT
pcompare (Set a) (Set b) = Just (compare a b)
pcompare (TypeK a) (TypeK b) = Just (compare a b)
pcompare (Set a) (TypeK b) = if a <= b then Just LT else Nothing
pcompare (TypeK a) (Set b) = if a >= b then Just GT else Nothing
instance DOrd Sort where
dmax a b = case pcompare a b of
Just LT -> b
Just _ -> a
Nothing -> case (a, b) of
(Set l1, TypeK l2) -> TypeK (max l1 l2)
(TypeK l1, Set l2) -> TypeK (max l1 l2)
_ -> a
dmaximum [] = TypeK NoLevel
dmaximum ks = foldl1 dmax ks
instance Show Sort where
show Contr = "Contr"
show Prop = "Prop"
show (Set a) = "Set" ++ show a
show (TypeK a) = "Type" ++ show a
instance Read Sort where
readsPrec _ ('C':'o':'n':'t':'r':s) = [(Contr,s)]
readsPrec _ ('P':'r':'o':'p':s) = [(Prop,s)]
readsPrec _ ('S':'e':'t':s) = map (\(l,s) -> (Set l, s)) (reads s)
readsPrec _ ('T':'y':'p':'e':s) = map (\(l,s) -> (TypeK l, s)) (reads s)
readsPrec _ _ = []
instance Enum Sort where
succ Contr = Prop
succ Prop = Set NoLevel
succ (Set l) = TypeK (succ l)
succ (TypeK l) = TypeK (succ l)
toEnum n = TypeK (toEnum n)
fromEnum Contr = -2
fromEnum Prop = -1
fromEnum (Set l) = fromEnum l
fromEnum (TypeK l) = fromEnum l
|
bitemyapp/hoq
|
src/Semantics/Value.hs
|
gpl-2.0
| 4,782 | 0 | 14 | 1,541 | 2,333 | 1,169 | 1,164 | 144 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.DetachElasticLoadBalancer
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Detaches a specified Elastic Load Balancing instance from its layer.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack, or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_DetachElasticLoadBalancer.html>
module Network.AWS.OpsWorks.DetachElasticLoadBalancer
(
-- * Request
DetachElasticLoadBalancer
-- ** Request constructor
, detachElasticLoadBalancer
-- ** Request lenses
, delbElasticLoadBalancerName
, delbLayerId
-- * Response
, DetachElasticLoadBalancerResponse
-- ** Response constructor
, detachElasticLoadBalancerResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
data DetachElasticLoadBalancer = DetachElasticLoadBalancer
{ _delbElasticLoadBalancerName :: Text
, _delbLayerId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'DetachElasticLoadBalancer' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'delbElasticLoadBalancerName' @::@ 'Text'
--
-- * 'delbLayerId' @::@ 'Text'
--
detachElasticLoadBalancer :: Text -- ^ 'delbElasticLoadBalancerName'
-> Text -- ^ 'delbLayerId'
-> DetachElasticLoadBalancer
detachElasticLoadBalancer p1 p2 = DetachElasticLoadBalancer
{ _delbElasticLoadBalancerName = p1
, _delbLayerId = p2
}
-- | The Elastic Load Balancing instance's name.
delbElasticLoadBalancerName :: Lens' DetachElasticLoadBalancer Text
delbElasticLoadBalancerName =
lens _delbElasticLoadBalancerName
(\s a -> s { _delbElasticLoadBalancerName = a })
-- | The ID of the layer that the Elastic Load Balancing instance is attached to.
delbLayerId :: Lens' DetachElasticLoadBalancer Text
delbLayerId = lens _delbLayerId (\s a -> s { _delbLayerId = a })
data DetachElasticLoadBalancerResponse = DetachElasticLoadBalancerResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DetachElasticLoadBalancerResponse' constructor.
detachElasticLoadBalancerResponse :: DetachElasticLoadBalancerResponse
detachElasticLoadBalancerResponse = DetachElasticLoadBalancerResponse
instance ToPath DetachElasticLoadBalancer where
toPath = const "/"
instance ToQuery DetachElasticLoadBalancer where
toQuery = const mempty
instance ToHeaders DetachElasticLoadBalancer
instance ToJSON DetachElasticLoadBalancer where
toJSON DetachElasticLoadBalancer{..} = object
[ "ElasticLoadBalancerName" .= _delbElasticLoadBalancerName
, "LayerId" .= _delbLayerId
]
instance AWSRequest DetachElasticLoadBalancer where
type Sv DetachElasticLoadBalancer = OpsWorks
type Rs DetachElasticLoadBalancer = DetachElasticLoadBalancerResponse
request = post "DetachElasticLoadBalancer"
response = nullResponse DetachElasticLoadBalancerResponse
|
romanb/amazonka
|
amazonka-opsworks/gen/Network/AWS/OpsWorks/DetachElasticLoadBalancer.hs
|
mpl-2.0
| 4,267 | 0 | 9 | 857 | 424 | 259 | 165 | 57 | 1 |
----------------------------------------------------------------------------
--
-- Pretty-printing of common Cmm types
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
--
-- This is where we walk over Cmm emitting an external representation,
-- suitable for parsing, in a syntax strongly reminiscent of C--. This
-- is the "External Core" for the Cmm layer.
--
-- As such, this should be a well-defined syntax: we want it to look nice.
-- Thus, we try wherever possible to use syntax defined in [1],
-- "The C-- Reference Manual", http://www.cminusminus.org/. We differ
-- slightly, in some cases. For one, we use I8 .. I64 for types, rather
-- than C--'s bits8 .. bits64.
--
-- We try to ensure that all information available in the abstract
-- syntax is reproduced, or reproducible, in the concrete syntax.
-- Data that is not in printed out can be reconstructed according to
-- conventions used in the pretty printer. There are at least two such
-- cases:
-- 1) if a value has wordRep type, the type is not appended in the
-- output.
-- 2) MachOps that operate over wordRep type are printed in a
-- C-style, rather than as their internal MachRep name.
--
-- These conventions produce much more readable Cmm output.
--
-- A useful example pass over Cmm is in nativeGen/MachCodeGen.hs
--
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module PprCmmDecl
( writeCmms, pprCmms, pprCmmGroup, pprSection, pprStatic
)
where
import CLabel
import PprCmmExpr
import Cmm
import Outputable
import Platform
import FastString
import Data.List
import System.IO
-- Temp Jan08
import SMRep
#include "../includes/rts/storage/FunTypes.h"
pprCmms :: (PlatformOutputable info, PlatformOutputable g)
=> Platform -> [GenCmmGroup CmmStatics info g] -> SDoc
pprCmms platform cmms = pprCode CStyle (vcat (intersperse separator $ map (pprPlatform platform) cmms))
where
separator = space $$ ptext (sLit "-------------------") $$ space
writeCmms :: (PlatformOutputable info, PlatformOutputable g)
=> Platform -> Handle -> [GenCmmGroup CmmStatics info g] -> IO ()
writeCmms platform handle cmms = printForC handle (pprCmms platform cmms)
-----------------------------------------------------------------------------
instance (PlatformOutputable d, PlatformOutputable info, PlatformOutputable i)
=> PlatformOutputable (GenCmmDecl d info i) where
pprPlatform platform t = pprTop platform t
instance PlatformOutputable CmmStatics where
pprPlatform = pprStatics
instance PlatformOutputable CmmStatic where
pprPlatform = pprStatic
instance PlatformOutputable CmmInfoTable where
pprPlatform = pprInfoTable
-----------------------------------------------------------------------------
pprCmmGroup :: (PlatformOutputable d,
PlatformOutputable info,
PlatformOutputable g)
=> Platform -> GenCmmGroup d info g -> SDoc
pprCmmGroup platform tops
= vcat $ intersperse blankLine $ map (pprTop platform) tops
-- --------------------------------------------------------------------------
-- Top level `procedure' blocks.
--
pprTop :: (PlatformOutputable d, PlatformOutputable info, PlatformOutputable i)
=> Platform -> GenCmmDecl d info i -> SDoc
pprTop platform (CmmProc info lbl graph)
= vcat [ pprCLabel platform lbl <> lparen <> rparen
, nest 8 $ lbrace <+> pprPlatform platform info $$ rbrace
, nest 4 $ pprPlatform platform graph
, rbrace ]
-- --------------------------------------------------------------------------
-- We follow [1], 4.5
--
-- section "data" { ... }
--
pprTop platform (CmmData section ds) =
(hang (pprSection section <+> lbrace) 4 (pprPlatform platform ds))
$$ rbrace
-- --------------------------------------------------------------------------
-- Info tables.
pprInfoTable :: Platform -> CmmInfoTable -> SDoc
pprInfoTable _ CmmNonInfoTable
= empty
pprInfoTable platform
(CmmInfoTable { cit_lbl = lbl, cit_rep = rep
, cit_prof = prof_info
, cit_srt = _srt })
= vcat [ ptext (sLit "label:") <+> pprPlatform platform lbl
, ptext (sLit "rep:") <> ppr rep
, case prof_info of
NoProfilingInfo -> empty
ProfilingInfo ct cd -> vcat [ ptext (sLit "type:") <+> pprWord8String ct
, ptext (sLit "desc: ") <> pprWord8String cd ] ]
instance PlatformOutputable C_SRT where
pprPlatform _ (NoC_SRT) = ptext (sLit "_no_srt_")
pprPlatform platform (C_SRT label off bitmap)
= parens (pprPlatform platform label <> comma <> ppr off
<> comma <> text (show bitmap))
instance Outputable ForeignHint where
ppr NoHint = empty
ppr SignedHint = quotes(text "signed")
-- ppr AddrHint = quotes(text "address")
-- Temp Jan08
ppr AddrHint = (text "PtrHint")
instance PlatformOutputable ForeignHint where
pprPlatform _ = ppr
-- --------------------------------------------------------------------------
-- Static data.
-- Strings are printed as C strings, and we print them as I8[],
-- following C--
--
pprStatics :: Platform -> CmmStatics -> SDoc
pprStatics platform (Statics lbl ds) = vcat ((pprCLabel platform lbl <> colon) : map (pprPlatform platform) ds)
pprStatic :: Platform -> CmmStatic -> SDoc
pprStatic platform s = case s of
CmmStaticLit lit -> nest 4 $ ptext (sLit "const") <+> pprLit platform lit <> semi
CmmUninitialised i -> nest 4 $ text "I8" <> brackets (int i)
CmmString s' -> nest 4 $ text "I8[]" <+> text (show s')
-- --------------------------------------------------------------------------
-- data sections
--
pprSection :: Section -> SDoc
pprSection s = case s of
Text -> section <+> doubleQuotes (ptext (sLit "text"))
Data -> section <+> doubleQuotes (ptext (sLit "data"))
ReadOnlyData -> section <+> doubleQuotes (ptext (sLit "readonly"))
ReadOnlyData16 -> section <+> doubleQuotes (ptext (sLit "readonly16"))
RelocatableReadOnlyData
-> section <+> doubleQuotes (ptext (sLit "relreadonly"))
UninitialisedData -> section <+> doubleQuotes (ptext (sLit "uninitialised"))
OtherSection s' -> section <+> doubleQuotes (text s')
where
section = ptext (sLit "section")
|
mcmaniac/ghc
|
compiler/cmm/PprCmmDecl.hs
|
bsd-3-clause
| 6,755 | 4 | 16 | 1,421 | 1,353 | 706 | 647 | 86 | 7 |
{-# LANGUAGE UnboxedTuples #-}
-- See Note [Float coercions (unlifted)] in Simplify
-- This one gave a CoreLint error when compiled optimised
--
-- See also #1718, of which this is a simplified version
module ShouldCompile where
bar :: Bool -> Int
bar x = case (case x of { True -> (# 2,3 #); False -> error "urk" }) of
(# p,q #) -> p+q
|
sdiehl/ghc
|
testsuite/tests/simplCore/should_compile/simpl018.hs
|
bsd-3-clause
| 356 | 0 | 10 | 85 | 75 | 44 | 31 | 5 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.ElastiCache.RevokeCacheSecurityGroupIngress
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | The /RevokeCacheSecurityGroupIngress/ action revokes ingress from a cache
-- security group. Use this action to disallow access from an Amazon EC2
-- security group that had been previously authorized.
--
-- <http://docs.aws.amazon.com/AmazonElastiCache/latest/APIReference/API_RevokeCacheSecurityGroupIngress.html>
module Network.AWS.ElastiCache.RevokeCacheSecurityGroupIngress
(
-- * Request
RevokeCacheSecurityGroupIngress
-- ** Request constructor
, revokeCacheSecurityGroupIngress
-- ** Request lenses
, rcsgiCacheSecurityGroupName
, rcsgiEC2SecurityGroupName
, rcsgiEC2SecurityGroupOwnerId
-- * Response
, RevokeCacheSecurityGroupIngressResponse
-- ** Response constructor
, revokeCacheSecurityGroupIngressResponse
-- ** Response lenses
, rcsgirCacheSecurityGroup
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.ElastiCache.Types
import qualified GHC.Exts
data RevokeCacheSecurityGroupIngress = RevokeCacheSecurityGroupIngress
{ _rcsgiCacheSecurityGroupName :: Text
, _rcsgiEC2SecurityGroupName :: Text
, _rcsgiEC2SecurityGroupOwnerId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'RevokeCacheSecurityGroupIngress' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rcsgiCacheSecurityGroupName' @::@ 'Text'
--
-- * 'rcsgiEC2SecurityGroupName' @::@ 'Text'
--
-- * 'rcsgiEC2SecurityGroupOwnerId' @::@ 'Text'
--
revokeCacheSecurityGroupIngress :: Text -- ^ 'rcsgiCacheSecurityGroupName'
-> Text -- ^ 'rcsgiEC2SecurityGroupName'
-> Text -- ^ 'rcsgiEC2SecurityGroupOwnerId'
-> RevokeCacheSecurityGroupIngress
revokeCacheSecurityGroupIngress p1 p2 p3 = RevokeCacheSecurityGroupIngress
{ _rcsgiCacheSecurityGroupName = p1
, _rcsgiEC2SecurityGroupName = p2
, _rcsgiEC2SecurityGroupOwnerId = p3
}
-- | The name of the cache security group to revoke ingress from.
rcsgiCacheSecurityGroupName :: Lens' RevokeCacheSecurityGroupIngress Text
rcsgiCacheSecurityGroupName =
lens _rcsgiCacheSecurityGroupName
(\s a -> s { _rcsgiCacheSecurityGroupName = a })
-- | The name of the Amazon EC2 security group to revoke access from.
rcsgiEC2SecurityGroupName :: Lens' RevokeCacheSecurityGroupIngress Text
rcsgiEC2SecurityGroupName =
lens _rcsgiEC2SecurityGroupName
(\s a -> s { _rcsgiEC2SecurityGroupName = a })
-- | The AWS account number of the Amazon EC2 security group owner. Note that this
-- is not the same thing as an AWS access key ID - you must provide a valid AWS
-- account number for this parameter.
rcsgiEC2SecurityGroupOwnerId :: Lens' RevokeCacheSecurityGroupIngress Text
rcsgiEC2SecurityGroupOwnerId =
lens _rcsgiEC2SecurityGroupOwnerId
(\s a -> s { _rcsgiEC2SecurityGroupOwnerId = a })
newtype RevokeCacheSecurityGroupIngressResponse = RevokeCacheSecurityGroupIngressResponse
{ _rcsgirCacheSecurityGroup :: Maybe CacheSecurityGroup
} deriving (Eq, Read, Show)
-- | 'RevokeCacheSecurityGroupIngressResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rcsgirCacheSecurityGroup' @::@ 'Maybe' 'CacheSecurityGroup'
--
revokeCacheSecurityGroupIngressResponse :: RevokeCacheSecurityGroupIngressResponse
revokeCacheSecurityGroupIngressResponse = RevokeCacheSecurityGroupIngressResponse
{ _rcsgirCacheSecurityGroup = Nothing
}
rcsgirCacheSecurityGroup :: Lens' RevokeCacheSecurityGroupIngressResponse (Maybe CacheSecurityGroup)
rcsgirCacheSecurityGroup =
lens _rcsgirCacheSecurityGroup
(\s a -> s { _rcsgirCacheSecurityGroup = a })
instance ToPath RevokeCacheSecurityGroupIngress where
toPath = const "/"
instance ToQuery RevokeCacheSecurityGroupIngress where
toQuery RevokeCacheSecurityGroupIngress{..} = mconcat
[ "CacheSecurityGroupName" =? _rcsgiCacheSecurityGroupName
, "EC2SecurityGroupName" =? _rcsgiEC2SecurityGroupName
, "EC2SecurityGroupOwnerId" =? _rcsgiEC2SecurityGroupOwnerId
]
instance ToHeaders RevokeCacheSecurityGroupIngress
instance AWSRequest RevokeCacheSecurityGroupIngress where
type Sv RevokeCacheSecurityGroupIngress = ElastiCache
type Rs RevokeCacheSecurityGroupIngress = RevokeCacheSecurityGroupIngressResponse
request = post "RevokeCacheSecurityGroupIngress"
response = xmlResponse
instance FromXML RevokeCacheSecurityGroupIngressResponse where
parseXML = withElement "RevokeCacheSecurityGroupIngressResult" $ \x -> RevokeCacheSecurityGroupIngressResponse
<$> x .@? "CacheSecurityGroup"
|
kim/amazonka
|
amazonka-elasticache/gen/Network/AWS/ElastiCache/RevokeCacheSecurityGroupIngress.hs
|
mpl-2.0
| 5,735 | 0 | 9 | 1,073 | 559 | 341 | 218 | 75 | 1 |
module Dwarf.Types
( -- * Dwarf information
DwarfInfo(..)
, pprDwarfInfo
, pprAbbrevDecls
-- * Dwarf address range table
, DwarfARange(..)
, pprDwarfARange
-- * Dwarf frame
, DwarfFrame(..), DwarfFrameProc(..), DwarfFrameBlock(..)
, pprDwarfFrame
-- * Utilities
, pprByte
, pprHalf
, pprData4'
, pprDwWord
, pprWord
, pprLEBWord
, pprLEBInt
, wordAlign
, sectionOffset
)
where
import Debug
import CLabel
import CmmExpr ( GlobalReg(..) )
import Encoding
import FastString
import Outputable
import Platform
import Unique
import Reg
import Dwarf.Constants
import Data.Bits
import Data.List ( mapAccumL )
import qualified Data.Map as Map
import Data.Word
import Data.Char
import CodeGen.Platform
-- | Individual dwarf records. Each one will be encoded as an entry in
-- the .debug_info section.
data DwarfInfo
= DwarfCompileUnit { dwChildren :: [DwarfInfo]
, dwName :: String
, dwProducer :: String
, dwCompDir :: String
, dwLowLabel :: CLabel
, dwHighLabel :: CLabel
, dwLineLabel :: LitString }
| DwarfSubprogram { dwChildren :: [DwarfInfo]
, dwName :: String
, dwLabel :: CLabel }
| DwarfBlock { dwChildren :: [DwarfInfo]
, dwLabel :: CLabel
, dwMarker :: CLabel }
-- | Abbreviation codes used for encoding above records in the
-- .debug_info section.
data DwarfAbbrev
= DwAbbrNull -- ^ Pseudo, used for marking the end of lists
| DwAbbrCompileUnit
| DwAbbrSubprogram
| DwAbbrBlock
deriving (Eq, Enum)
-- | Generate assembly for the given abbreviation code
pprAbbrev :: DwarfAbbrev -> SDoc
pprAbbrev = pprLEBWord . fromIntegral . fromEnum
-- | Abbreviation declaration. This explains the binary encoding we
-- use for representing 'DwarfInfo'. Be aware that this must be updated
-- along with 'pprDwarfInfo'.
pprAbbrevDecls :: Bool -> SDoc
pprAbbrevDecls haveDebugLine =
let mkAbbrev abbr tag chld flds =
let fld (tag, form) = pprLEBWord tag $$ pprLEBWord form
in pprAbbrev abbr $$ pprLEBWord tag $$ pprByte chld $$
vcat (map fld flds) $$ pprByte 0 $$ pprByte 0
in dwarfAbbrevSection $$
ptext dwarfAbbrevLabel <> colon $$
mkAbbrev DwAbbrCompileUnit dW_TAG_compile_unit dW_CHILDREN_yes
([(dW_AT_name, dW_FORM_string)
, (dW_AT_producer, dW_FORM_string)
, (dW_AT_language, dW_FORM_data4)
, (dW_AT_comp_dir, dW_FORM_string)
, (dW_AT_use_UTF8, dW_FORM_flag_present) -- not represented in body
, (dW_AT_low_pc, dW_FORM_addr)
, (dW_AT_high_pc, dW_FORM_addr)
] ++
(if haveDebugLine
then [ (dW_AT_stmt_list, dW_FORM_data4) ]
else [])) $$
mkAbbrev DwAbbrSubprogram dW_TAG_subprogram dW_CHILDREN_yes
[ (dW_AT_name, dW_FORM_string)
, (dW_AT_MIPS_linkage_name, dW_FORM_string)
, (dW_AT_external, dW_FORM_flag)
, (dW_AT_low_pc, dW_FORM_addr)
, (dW_AT_high_pc, dW_FORM_addr)
, (dW_AT_frame_base, dW_FORM_block1)
] $$
mkAbbrev DwAbbrBlock dW_TAG_lexical_block dW_CHILDREN_yes
[ (dW_AT_name, dW_FORM_string)
, (dW_AT_low_pc, dW_FORM_addr)
, (dW_AT_high_pc, dW_FORM_addr)
] $$
pprByte 0
-- | Generate assembly for DWARF data
pprDwarfInfo :: Bool -> DwarfInfo -> SDoc
pprDwarfInfo haveSrc d
= pprDwarfInfoOpen haveSrc d $$
vcat (map (pprDwarfInfo haveSrc) (dwChildren d)) $$
pprDwarfInfoClose
-- | Prints assembler data corresponding to DWARF info records. Note
-- that the binary format of this is paramterized in @abbrevDecls@ and
-- has to be kept in synch.
pprDwarfInfoOpen :: Bool -> DwarfInfo -> SDoc
pprDwarfInfoOpen haveSrc (DwarfCompileUnit _ name producer compDir lowLabel
highLabel lineLbl) =
pprAbbrev DwAbbrCompileUnit
$$ pprString name
$$ pprString producer
$$ pprData4 dW_LANG_Haskell
$$ pprString compDir
$$ pprWord (ppr lowLabel)
$$ pprWord (ppr highLabel)
$$ if haveSrc
then sectionOffset (ptext lineLbl) (ptext dwarfLineLabel)
else empty
pprDwarfInfoOpen _ (DwarfSubprogram _ name label) = sdocWithDynFlags $ \df ->
pprAbbrev DwAbbrSubprogram
$$ pprString name
$$ pprString (renderWithStyle df (ppr label) (mkCodeStyle CStyle))
$$ pprFlag (externallyVisibleCLabel label)
$$ pprWord (ppr label)
$$ pprWord (ppr $ mkAsmTempEndLabel label)
$$ pprByte 1
$$ pprByte dW_OP_call_frame_cfa
pprDwarfInfoOpen _ (DwarfBlock _ label marker) = sdocWithDynFlags $ \df ->
pprAbbrev DwAbbrBlock
$$ pprString (renderWithStyle df (ppr label) (mkCodeStyle CStyle))
$$ pprWord (ppr marker)
$$ pprWord (ppr $ mkAsmTempEndLabel marker)
-- | Close a DWARF info record with children
pprDwarfInfoClose :: SDoc
pprDwarfInfoClose = pprAbbrev DwAbbrNull
-- | A DWARF address range. This is used by the debugger to quickly locate
-- which compilation unit a given address belongs to. This type assumes
-- a non-segmented address-space.
data DwarfARange
= DwarfARange
{ dwArngStartLabel :: CLabel
, dwArngEndLabel :: CLabel
, dwArngUnitUnique :: Unique
-- ^ from which the corresponding label in @.debug_info@ is derived
}
-- | Print assembler directives corresponding to a DWARF @.debug_aranges@
-- address table entry.
pprDwarfARange :: DwarfARange -> SDoc
pprDwarfARange arng = sdocWithPlatform $ \plat ->
let wordSize = platformWordSize plat
paddingSize = 4 :: Int
-- header is 12 bytes long.
-- entry is 8 bytes (32-bit platform) or 16 bytes (64-bit platform).
-- pad such that first entry begins at multiple of entry size.
pad n = vcat $ replicate n $ pprByte 0
initialLength = 8 + paddingSize + 2*2*wordSize
length = ppr (dwArngEndLabel arng)
<> char '-' <> ppr (dwArngStartLabel arng)
in pprDwWord (ppr initialLength)
$$ pprHalf 2
$$ sectionOffset (ppr $ mkAsmTempLabel $ dwArngUnitUnique arng)
(ptext dwarfInfoLabel)
$$ pprByte (fromIntegral wordSize)
$$ pprByte 0
$$ pad paddingSize
-- beginning of body
$$ pprWord (ppr $ dwArngStartLabel arng)
$$ pprWord length
-- terminus
$$ pprWord (char '0')
$$ pprWord (char '0')
-- | Information about unwind instructions for a procedure. This
-- corresponds to a "Common Information Entry" (CIE) in DWARF.
data DwarfFrame
= DwarfFrame
{ dwCieLabel :: CLabel
, dwCieInit :: UnwindTable
, dwCieProcs :: [DwarfFrameProc]
}
-- | Unwind instructions for an individual procedure. Corresponds to a
-- "Frame Description Entry" (FDE) in DWARF.
data DwarfFrameProc
= DwarfFrameProc
{ dwFdeProc :: CLabel
, dwFdeHasInfo :: Bool
, dwFdeBlocks :: [DwarfFrameBlock]
-- ^ List of blocks. Order must match asm!
}
-- | Unwind instructions for a block. Will become part of the
-- containing FDE.
data DwarfFrameBlock
= DwarfFrameBlock
{ dwFdeBlock :: CLabel
, dwFdeBlkHasInfo :: Bool
, dwFdeUnwind :: UnwindTable
}
-- | Header for the .debug_frame section. Here we emit the "Common
-- Information Entry" record that etablishes general call frame
-- parameters and the default stack layout.
pprDwarfFrame :: DwarfFrame -> SDoc
pprDwarfFrame DwarfFrame{dwCieLabel=cieLabel,dwCieInit=cieInit,dwCieProcs=procs}
= sdocWithPlatform $ \plat ->
let cieStartLabel= mkAsmTempDerivedLabel cieLabel (fsLit "_start")
cieEndLabel = mkAsmTempEndLabel cieLabel
length = ppr cieEndLabel <> char '-' <> ppr cieStartLabel
spReg = dwarfGlobalRegNo plat Sp
retReg = dwarfReturnRegNo plat
wordSize = platformWordSize plat
pprInit (g, uw) = pprSetUnwind plat g (Nothing, uw)
in vcat [ ppr cieLabel <> colon
, pprData4' length -- Length of CIE
, ppr cieStartLabel <> colon
, pprData4' (ptext (sLit "-1"))
-- Common Information Entry marker (-1 = 0xf..f)
, pprByte 3 -- CIE version (we require DWARF 3)
, pprByte 0 -- Augmentation (none)
, pprByte 1 -- Code offset multiplicator
, pprByte (128-fromIntegral wordSize)
-- Data offset multiplicator
-- (stacks grow down => "-w" in signed LEB128)
, pprByte retReg -- virtual register holding return address
] $$
-- Initial unwind table
vcat (map pprInit $ Map.toList cieInit) $$
vcat [ -- RET = *CFA
pprByte (dW_CFA_offset+retReg)
, pprByte 0
-- Sp' = CFA
-- (we need to set this manually as our Sp register is
-- often not the architecture's default stack register)
, pprByte dW_CFA_val_offset
, pprLEBWord (fromIntegral spReg)
, pprLEBWord 0
] $$
wordAlign $$
ppr cieEndLabel <> colon $$
-- Procedure unwind tables
vcat (map (pprFrameProc cieLabel cieInit) procs)
-- | Writes a "Frame Description Entry" for a procedure. This consists
-- mainly of referencing the CIE and writing state machine
-- instructions to describe how the frame base (CFA) changes.
pprFrameProc :: CLabel -> UnwindTable -> DwarfFrameProc -> SDoc
pprFrameProc frameLbl initUw (DwarfFrameProc procLbl hasInfo blocks)
= let fdeLabel = mkAsmTempDerivedLabel procLbl (fsLit "_fde")
fdeEndLabel = mkAsmTempDerivedLabel procLbl (fsLit "_fde_end")
procEnd = mkAsmTempEndLabel procLbl
ifInfo str = if hasInfo then text str else empty
-- see [Note: Info Offset]
in vcat [ pprData4' (ppr fdeEndLabel <> char '-' <> ppr fdeLabel)
, ppr fdeLabel <> colon
, pprData4' (ppr frameLbl <> char '-' <>
ptext dwarfFrameLabel) -- Reference to CIE
, pprWord (ppr procLbl <> ifInfo "-1") -- Code pointer
, pprWord (ppr procEnd <> char '-' <>
ppr procLbl <> ifInfo "+1") -- Block byte length
] $$
vcat (snd $ mapAccumL pprFrameBlock initUw blocks) $$
wordAlign $$
ppr fdeEndLabel <> colon
-- | Generates unwind information for a block. We only generate
-- instructions where unwind information actually changes. This small
-- optimisations saves a lot of space, as subsequent blocks often have
-- the same unwind information.
pprFrameBlock :: UnwindTable -> DwarfFrameBlock -> (UnwindTable, SDoc)
pprFrameBlock oldUws (DwarfFrameBlock blockLbl hasInfo uws)
| uws == oldUws
= (oldUws, empty)
| otherwise
= (,) uws $ sdocWithPlatform $ \plat ->
let lbl = ppr blockLbl <> if hasInfo then text "-1" else empty
-- see [Note: Info Offset]
isChanged g v | old == Just v = Nothing
| otherwise = Just (old, v)
where old = Map.lookup g oldUws
changed = Map.toList $ Map.mapMaybeWithKey isChanged uws
died = Map.toList $ Map.difference oldUws uws
in pprByte dW_CFA_set_loc $$ pprWord lbl $$
vcat (map (uncurry $ pprSetUnwind plat) changed) $$
vcat (map (pprUndefUnwind plat . fst) died)
-- [Note: Info Offset]
--
-- GDB was pretty much written with C-like programs in mind, and as a
-- result they assume that once you have a return address, it is a
-- good idea to look at (PC-1) to unwind further - as that's where the
-- "call" instruction is supposed to be.
--
-- Now on one hand, code generated by GHC looks nothing like what GDB
-- expects, and in fact going up from a return pointer is guaranteed
-- to land us inside an info table! On the other hand, that actually
-- gives us some wiggle room, as we expect IP to never *actually* end
-- up inside the info table, so we can "cheat" by putting whatever GDB
-- expects to see there. This is probably pretty safe, as GDB cannot
-- assume (PC-1) to be a valid code pointer in the first place - and I
-- have seen no code trying to correct this.
--
-- Note that this will not prevent GDB from failing to look-up the
-- correct function name for the frame, as that uses the symbol table,
-- which we can not manipulate as easily.
-- | Get DWARF register ID for a given GlobalReg
dwarfGlobalRegNo :: Platform -> GlobalReg -> Word8
dwarfGlobalRegNo p = maybe 0 (dwarfRegNo p . RegReal) . globalRegMaybe p
-- | Generate code for setting the unwind information for a register,
-- optimized using its known old value in the table. Note that "Sp" is
-- special: We see it as synonym for the CFA.
pprSetUnwind :: Platform -> GlobalReg -> (Maybe UnwindExpr, UnwindExpr) -> SDoc
pprSetUnwind _ Sp (Just (UwReg s _), UwReg s' o') | s == s'
= if o' >= 0
then pprByte dW_CFA_def_cfa_offset $$ pprLEBWord (fromIntegral o')
else pprByte dW_CFA_def_cfa_offset_sf $$ pprLEBInt o'
pprSetUnwind plat Sp (_, UwReg s' o')
= if o' >= 0
then pprByte dW_CFA_def_cfa $$
pprLEBWord (fromIntegral $ dwarfGlobalRegNo plat s') $$
pprLEBWord (fromIntegral o')
else pprByte dW_CFA_def_cfa_sf $$
pprLEBWord (fromIntegral $ dwarfGlobalRegNo plat s') $$
pprLEBInt o'
pprSetUnwind _ Sp (_, uw)
= pprByte dW_CFA_def_cfa_expression $$ pprUnwindExpr False uw
pprSetUnwind plat g (_, UwDeref (UwReg Sp o))
| o < 0 && ((-o) `mod` platformWordSize plat) == 0 -- expected case
= pprByte (dW_CFA_offset + dwarfGlobalRegNo plat g) $$
pprLEBWord (fromIntegral ((-o) `div` platformWordSize plat))
| otherwise
= pprByte dW_CFA_offset_extended_sf $$
pprLEBWord (fromIntegral (dwarfGlobalRegNo plat g)) $$
pprLEBInt o
pprSetUnwind plat g (_, UwDeref uw)
= pprByte dW_CFA_expression $$
pprLEBWord (fromIntegral (dwarfGlobalRegNo plat g)) $$
pprUnwindExpr True uw
pprSetUnwind plat g (_, uw)
= pprByte dW_CFA_val_expression $$
pprLEBWord (fromIntegral (dwarfGlobalRegNo plat g)) $$
pprUnwindExpr True uw
-- | Generates a DWARF expression for the given unwind expression. If
-- @spIsCFA@ is true, we see @Sp@ as the frame base CFA where it gets
-- mentioned.
pprUnwindExpr :: Bool -> UnwindExpr -> SDoc
pprUnwindExpr spIsCFA expr
= sdocWithPlatform $ \plat ->
let ppr (UwConst i)
| i >= 0 && i < 32 = pprByte (dW_OP_lit0 + fromIntegral i)
| otherwise = pprByte dW_OP_consts $$ pprLEBInt i -- lazy...
ppr (UwReg Sp i) | spIsCFA
= if i == 0
then pprByte dW_OP_call_frame_cfa
else ppr (UwPlus (UwReg Sp 0) (UwConst i))
ppr (UwReg g i) = pprByte (dW_OP_breg0+dwarfGlobalRegNo plat g) $$
pprLEBInt i
ppr (UwDeref u) = ppr u $$ pprByte dW_OP_deref
ppr (UwPlus u1 u2) = ppr u1 $$ ppr u2 $$ pprByte dW_OP_plus
ppr (UwMinus u1 u2) = ppr u1 $$ ppr u2 $$ pprByte dW_OP_minus
ppr (UwTimes u1 u2) = ppr u1 $$ ppr u2 $$ pprByte dW_OP_mul
in ptext (sLit "\t.byte 1f-.-1") $$
ppr expr $$
ptext (sLit "1:")
-- | Generate code for re-setting the unwind information for a
-- register to "undefined"
pprUndefUnwind :: Platform -> GlobalReg -> SDoc
pprUndefUnwind _ Sp = panic "pprUndefUnwind Sp" -- should never happen
pprUndefUnwind plat g = pprByte dW_CFA_undefined $$
pprLEBWord (fromIntegral $ dwarfGlobalRegNo plat g)
-- | Align assembly at (machine) word boundary
wordAlign :: SDoc
wordAlign = sdocWithPlatform $ \plat ->
ptext (sLit "\t.align ") <> case platformOS plat of
OSDarwin -> case platformWordSize plat of
8 -> text "3"
4 -> text "2"
_other -> error "wordAlign: Unsupported word size!"
_other -> ppr (platformWordSize plat)
-- | Assembly for a single byte of constant DWARF data
pprByte :: Word8 -> SDoc
pprByte x = ptext (sLit "\t.byte ") <> ppr (fromIntegral x :: Word)
-- | Assembly for a two-byte constant integer
pprHalf :: Word16 -> SDoc
pprHalf x = ptext (sLit "\t.hword ") <> ppr (fromIntegral x :: Word)
-- | Assembly for a constant DWARF flag
pprFlag :: Bool -> SDoc
pprFlag f = pprByte (if f then 0xff else 0x00)
-- | Assembly for 4 bytes of dynamic DWARF data
pprData4' :: SDoc -> SDoc
pprData4' x = ptext (sLit "\t.long ") <> x
-- | Assembly for 4 bytes of constant DWARF data
pprData4 :: Word -> SDoc
pprData4 = pprData4' . ppr
-- | Assembly for a DWARF word of dynamic data. This means 32 bit, as
-- we are generating 32 bit DWARF.
pprDwWord :: SDoc -> SDoc
pprDwWord = pprData4'
-- | Assembly for a machine word of dynamic data. Depends on the
-- architecture we are currently generating code for.
pprWord :: SDoc -> SDoc
pprWord s = (<> s) . sdocWithPlatform $ \plat ->
case platformWordSize plat of
4 -> ptext (sLit "\t.long ")
8 -> ptext (sLit "\t.quad ")
n -> panic $ "pprWord: Unsupported target platform word length " ++
show n ++ "!"
-- | Prints a number in "little endian base 128" format. The idea is
-- to optimize for small numbers by stopping once all further bytes
-- would be 0. The highest bit in every byte signals whether there
-- are further bytes to read.
pprLEBWord :: Word -> SDoc
pprLEBWord x | x < 128 = pprByte (fromIntegral x)
| otherwise = pprByte (fromIntegral $ 128 .|. (x .&. 127)) $$
pprLEBWord (x `shiftR` 7)
-- | Same as @pprLEBWord@, but for a signed number
pprLEBInt :: Int -> SDoc
pprLEBInt x | x >= -64 && x < 64
= pprByte (fromIntegral (x .&. 127))
| otherwise = pprByte (fromIntegral $ 128 .|. (x .&. 127)) $$
pprLEBInt (x `shiftR` 7)
-- | Generates a dynamic null-terminated string. If required the
-- caller needs to make sure that the string is escaped properly.
pprString' :: SDoc -> SDoc
pprString' str = ptext (sLit "\t.asciz \"") <> str <> char '"'
-- | Generate a string constant. We take care to escape the string.
pprString :: String -> SDoc
pprString str
= pprString' $ hcat $ map escapeChar $
if utf8EncodedLength str == length str
then str
else map (chr . fromIntegral) $ bytesFS $ mkFastString str
-- | Escape a single non-unicode character
escapeChar :: Char -> SDoc
escapeChar '\\' = ptext (sLit "\\\\")
escapeChar '\"' = ptext (sLit "\\\"")
escapeChar '\n' = ptext (sLit "\\n")
escapeChar c
| isAscii c && isPrint c && c /= '?' -- prevents trigraph warnings
= char c
| otherwise
= char '\\' <> char (intToDigit (ch `div` 64)) <>
char (intToDigit ((ch `div` 8) `mod` 8)) <>
char (intToDigit (ch `mod` 8))
where ch = ord c
-- | Generate an offset into another section. This is tricky because
-- this is handled differently depending on platform: Mac Os expects
-- us to calculate the offset using assembler arithmetic. Linux expects
-- us to just reference the target directly, and will figure out on
-- their own that we actually need an offset. Finally, Windows has
-- a special directive to refer to relative offsets. Fun.
sectionOffset :: SDoc -> SDoc -> SDoc
sectionOffset target section = sdocWithPlatform $ \plat ->
case platformOS plat of
OSDarwin -> pprDwWord (target <> char '-' <> section)
OSMinGW32 -> text "\t.secrel32 " <> target
_other -> pprDwWord target
|
anton-dessiatov/ghc
|
compiler/nativeGen/Dwarf/Types.hs
|
bsd-3-clause
| 19,485 | 0 | 21 | 5,060 | 4,301 | 2,255 | 2,046 | 344 | 8 |
module ListCompIn2 where
main
= sum [x + 4 | (x, y, z) <- zipthree [1, 3 ..]
['h' .. 'o']
[False ..],
x > 0]
zipthree :: [a] -> [b] -> [c] -> [(a, b, c)]
zipthree
= \ xs ys zs ->
case (xs, ys, zs) of
([], _, _) -> []
(_, [], _) -> []
(_, _, []) -> []
((a : as), (b : bs), (c : cs))
-> (a, b, c) : (zipthree as bs cs)
|
SAdams601/HaRe
|
old/testing/removeDef/ListCompIn2_AstOut.hs
|
bsd-3-clause
| 380 | 10 | 11 | 145 | 259 | 151 | 108 | 15 | 4 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.Builtin
-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- The module defines all the known built-in 'Program's.
--
-- Where possible we try to find their version numbers.
--
module Distribution.Simple.Program.Builtin (
-- * The collection of unconfigured and configured programs
builtinPrograms,
-- * Programs that Cabal knows about
ghcProgram,
ghcPkgProgram,
runghcProgram,
ghcjsProgram,
ghcjsPkgProgram,
lhcProgram,
lhcPkgProgram,
hmakeProgram,
jhcProgram,
haskellSuiteProgram,
haskellSuitePkgProgram,
uhcProgram,
gccProgram,
arProgram,
stripProgram,
happyProgram,
alexProgram,
hsc2hsProgram,
c2hsProgram,
cpphsProgram,
hscolourProgram,
doctestProgram,
haddockProgram,
greencardProgram,
ldProgram,
tarProgram,
cppProgram,
pkgConfigProgram,
hpcProgram,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Simple.Program.Find
import Distribution.Simple.Program.Internal
import Distribution.Simple.Program.Run
import Distribution.Simple.Program.Types
import Distribution.Simple.Utils
import Distribution.Compat.Exception
import Distribution.Verbosity
import Distribution.Version
import qualified Data.Map as Map
-- ------------------------------------------------------------
-- * Known programs
-- ------------------------------------------------------------
-- | The default list of programs.
-- These programs are typically used internally to Cabal.
builtinPrograms :: [Program]
builtinPrograms =
[
-- compilers and related progs
ghcProgram
, runghcProgram
, ghcPkgProgram
, ghcjsProgram
, ghcjsPkgProgram
, haskellSuiteProgram
, haskellSuitePkgProgram
, hmakeProgram
, jhcProgram
, lhcProgram
, lhcPkgProgram
, uhcProgram
, hpcProgram
-- preprocessors
, hscolourProgram
, doctestProgram
, haddockProgram
, happyProgram
, alexProgram
, hsc2hsProgram
, c2hsProgram
, cpphsProgram
, greencardProgram
-- platform toolchain
, gccProgram
, arProgram
, stripProgram
, ldProgram
, tarProgram
-- configuration tools
, pkgConfigProgram
]
ghcProgram :: Program
ghcProgram = (simpleProgram "ghc") {
programFindVersion = findProgramVersion "--numeric-version" id,
-- Workaround for https://ghc.haskell.org/trac/ghc/ticket/8825
-- (spurious warning on non-english locales)
programPostConf = \_verbosity ghcProg ->
do let ghcProg' = ghcProg {
programOverrideEnv = ("LANGUAGE", Just "en")
: programOverrideEnv ghcProg
}
-- Only the 7.8 branch seems to be affected. Fixed in 7.8.4.
affectedVersionRange = intersectVersionRanges
(laterVersion $ mkVersion [7,8,0])
(earlierVersion $ mkVersion [7,8,4])
return $ maybe ghcProg
(\v -> if withinRange v affectedVersionRange
then ghcProg' else ghcProg)
(programVersion ghcProg)
}
runghcProgram :: Program
runghcProgram = (simpleProgram "runghc") {
programFindVersion = findProgramVersion "--version" $ \str ->
case words str of
-- "runghc 7.10.3"
(_:ver:_) -> ver
_ -> ""
}
ghcPkgProgram :: Program
ghcPkgProgram = (simpleProgram "ghc-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "ghc-pkg --version" gives a string like
-- "GHC package manager version 6.4.1"
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
ghcjsProgram :: Program
ghcjsProgram = (simpleProgram "ghcjs") {
programFindVersion = findProgramVersion "--numeric-ghcjs-version" id
}
-- note: version is the version number of the GHC version that ghcjs-pkg was built with
ghcjsPkgProgram :: Program
ghcjsPkgProgram = (simpleProgram "ghcjs-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "ghcjs-pkg --version" gives a string like
-- "GHCJS package manager version 6.4.1"
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
lhcProgram :: Program
lhcProgram = (simpleProgram "lhc") {
programFindVersion = findProgramVersion "--numeric-version" id
}
lhcPkgProgram :: Program
lhcPkgProgram = (simpleProgram "lhc-pkg") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "lhc-pkg --version" gives a string like
-- "LHC package manager version 0.7"
case words str of
(_:_:_:_:ver:_) -> ver
_ -> ""
}
hmakeProgram :: Program
hmakeProgram = (simpleProgram "hmake") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "hmake --version" gives a string line
-- "/usr/local/bin/hmake: 3.13 (2006-11-01)"
case words str of
(_:ver:_) -> ver
_ -> ""
}
jhcProgram :: Program
jhcProgram = (simpleProgram "jhc") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- invoking "jhc --version" gives a string like
-- "jhc 0.3.20080208 (wubgipkamcep-2)
-- compiled by ghc-6.8 on a x86_64 running linux"
case words str of
(_:ver:_) -> ver
_ -> ""
}
uhcProgram :: Program
uhcProgram = (simpleProgram "uhc") {
programFindVersion = findProgramVersion "--version-dotted" id
}
hpcProgram :: Program
hpcProgram = (simpleProgram "hpc")
{
programFindVersion = findProgramVersion "version" $ \str ->
case words str of
(_ : _ : _ : ver : _) -> ver
_ -> ""
}
-- This represents a haskell-suite compiler. Of course, the compiler
-- itself probably is not called "haskell-suite", so this is not a real
-- program. (But we don't know statically the name of the actual compiler,
-- so this is the best we can do.)
--
-- Having this Program value serves two purposes:
--
-- 1. We can accept options for the compiler in the form of
--
-- --haskell-suite-option(s)=...
--
-- 2. We can find a program later using this static id (with
-- requireProgram).
--
-- The path to the real compiler is found and recorded in the ProgramDb
-- during the configure phase.
haskellSuiteProgram :: Program
haskellSuiteProgram = (simpleProgram "haskell-suite") {
-- pretend that the program exists, otherwise it won't be in the
-- "configured" state
programFindLocation = \_verbosity _searchPath ->
return $ Just ("haskell-suite-dummy-location", [])
}
-- This represent a haskell-suite package manager. See the comments for
-- haskellSuiteProgram.
haskellSuitePkgProgram :: Program
haskellSuitePkgProgram = (simpleProgram "haskell-suite-pkg") {
programFindLocation = \_verbosity _searchPath ->
return $ Just ("haskell-suite-pkg-dummy-location", [])
}
happyProgram :: Program
happyProgram = (simpleProgram "happy") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "happy --version" gives a string like
-- "Happy Version 1.16 Copyright (c) ...."
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
alexProgram :: Program
alexProgram = (simpleProgram "alex") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "alex --version" gives a string like
-- "Alex version 2.1.0, (c) 2003 Chris Dornan and Simon Marlow"
case words str of
(_:_:ver:_) -> takeWhile (\x -> isDigit x || x == '.') ver
_ -> ""
}
gccProgram :: Program
gccProgram = (simpleProgram "gcc") {
programFindVersion = findProgramVersion "-dumpversion" id
}
arProgram :: Program
arProgram = simpleProgram "ar"
stripProgram :: Program
stripProgram = (simpleProgram "strip") {
programFindVersion = \verbosity ->
findProgramVersion "--version" stripExtractVersion (lessVerbose verbosity)
}
hsc2hsProgram :: Program
hsc2hsProgram = (simpleProgram "hsc2hs") {
programFindVersion =
findProgramVersion "--version" $ \str ->
-- Invoking "hsc2hs --version" gives a string like "hsc2hs version 0.66"
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
c2hsProgram :: Program
c2hsProgram = (simpleProgram "c2hs") {
programFindVersion = findProgramVersion "--numeric-version" id
}
cpphsProgram :: Program
cpphsProgram = (simpleProgram "cpphs") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "cpphs --version" gives a string like "cpphs 1.3"
case words str of
(_:ver:_) -> ver
_ -> ""
}
hscolourProgram :: Program
hscolourProgram = (simpleProgram "hscolour") {
programFindLocation = \v p -> findProgramOnSearchPath v p "HsColour",
programFindVersion = findProgramVersion "-version" $ \str ->
-- Invoking "HsColour -version" gives a string like "HsColour 1.7"
case words str of
(_:ver:_) -> ver
_ -> ""
}
-- TODO: Ensure that doctest is built against the same GHC as the one
-- that's being used. Same for haddock. @phadej pointed this out.
doctestProgram :: Program
doctestProgram = (simpleProgram "doctest") {
programFindLocation = \v p -> findProgramOnSearchPath v p "doctest"
, programFindVersion = findProgramVersion "--version" $ \str ->
-- "doctest version 0.11.2"
case words str of
(_:_:ver:_) -> ver
_ -> ""
}
haddockProgram :: Program
haddockProgram = (simpleProgram "haddock") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "haddock --version" gives a string like
-- "Haddock version 0.8, (c) Simon Marlow 2006"
case words str of
(_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver
_ -> ""
}
greencardProgram :: Program
greencardProgram = simpleProgram "greencard"
ldProgram :: Program
ldProgram = simpleProgram "ld"
tarProgram :: Program
tarProgram = (simpleProgram "tar") {
-- See #1901. Some versions of 'tar' (OpenBSD, NetBSD, ...) don't support the
-- '--format' option.
programPostConf = \verbosity tarProg -> do
tarHelpOutput <- getProgramInvocationOutput
verbosity (programInvocation tarProg ["--help"])
-- Some versions of tar don't support '--help'.
`catchIO` (\_ -> return "")
let k = "Supports --format"
v = if ("--format" `isInfixOf` tarHelpOutput) then "YES" else "NO"
m = Map.insert k v (programProperties tarProg)
return $ tarProg { programProperties = m }
}
cppProgram :: Program
cppProgram = simpleProgram "cpp"
pkgConfigProgram :: Program
pkgConfigProgram = (simpleProgram "pkg-config") {
programFindVersion = findProgramVersion "--version" id
}
|
mydaum/cabal
|
Cabal/Distribution/Simple/Program/Builtin.hs
|
bsd-3-clause
| 11,157 | 0 | 17 | 2,736 | 2,078 | 1,187 | 891 | 222 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- |Generate a Storable instance for ROS msg types.
module Instances.Storable (genStorableInstance) where
import Control.Monad ((>=>))
import Data.ByteString.Char8 (ByteString, pack)
import qualified Data.ByteString.Char8 as B
import Data.Maybe (fromJust)
import Types
import Analysis
rosPoke :: MsgType -> ByteString
rosPoke (RFixedArray _ t) = B.concat [ "V.mapM_ (", rosPoke t, ")" ]
rosPoke _ = "SM.poke"
rosPeek :: MsgType -> ByteString
rosPeek (RFixedArray n t) = B.concat [ "V.replicateM ", B.pack (show n),
" (", rosPeek t, ")" ]
rosPeek _ = "SM.peek"
genStorableInstance :: Msg -> MsgInfo (ByteString, ByteString)
genStorableInstance msg
| null (fields msg) = return ("import Foreign.Storable (Storable(..))\n",
singletonStorable)
| otherwise = isFlat msg >>= aux
where aux False = return ("", "")
aux True = do sz <- totalSize msg
return (smImp, stInst sz)
--peekFields = map (const "SM.peek") (fields msg)
peekFields = map (rosPeek . fieldType) (fields msg)
-- pokeFields = map ((\n -> B.concat ["SM.poke (", n, " obj')"]) .
-- fieldName)
-- (fields msg)
pokeFields = map (\f -> B.concat [ rosPoke (fieldType f)
, " (", fieldName f
, " obj')"])
(fields msg)
name = pack (shortName msg)
stInst sz = B.concat ["instance Storable ", name, " where\n",
" sizeOf _ = ", sz,"\n",
" alignment _ = 8\n",
" peek = SM.runStorable (", name, " <$> ",
B.intercalate " <*> " peekFields, ")\n",
" poke ptr' obj' = ",
"SM.runStorable store' ptr'\n",
" where store' = ",
B.intercalate " *> " pokeFields,"\n\n"]
smImp = B.concat [ "import Foreign.Storable (Storable(..))\n"
, "import qualified Ros.Internal.Util.StorableMonad"
, " as SM\n" ]
singletonStorable = B.concat [ "instance Storable ", name, " where\n"
, " sizeOf _ = 1\n"
, " alignment _ = 1\n"
, " peek _ = pure ", name, "\n"
, " poke _ _ = pure ()\n\n" ]
totalSize :: Msg -> MsgInfo ByteString
totalSize msg = B.intercalate sep `fmap` mapM (aux . fieldType) (fields msg)
where aux = getTypeInfo >=> return . fromJust . size
sep = B.append " +\n" $ B.replicate 13 ' '
|
bitemyapp/roshask
|
src/executable/Instances/Storable.hs
|
bsd-3-clause
| 2,917 | 0 | 14 | 1,198 | 616 | 337 | 279 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Generics
-- Copyright : (c) Universiteit Utrecht 2010-2011, University of Oxford 2012-2014
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable
--
-- @since 4.6.0.0
--
-- If you're using @GHC.Generics@, you should consider using the
-- <http://hackage.haskell.org/package/generic-deriving> package, which
-- contains many useful generic functions.
module GHC.Generics (
-- * Introduction
--
-- |
--
-- Datatype-generic functions are based on the idea of converting values of
-- a datatype @T@ into corresponding values of a (nearly) isomorphic type @'Rep' T@.
-- The type @'Rep' T@ is
-- built from a limited set of type constructors, all provided by this module. A
-- datatype-generic function is then an overloaded function with instances
-- for most of these type constructors, together with a wrapper that performs
-- the mapping between @T@ and @'Rep' T@. By using this technique, we merely need
-- a few generic instances in order to implement functionality that works for any
-- representable type.
--
-- Representable types are collected in the 'Generic' class, which defines the
-- associated type 'Rep' as well as conversion functions 'from' and 'to'.
-- Typically, you will not define 'Generic' instances by hand, but have the compiler
-- derive them for you.
-- ** Representing datatypes
--
-- |
--
-- The key to defining your own datatype-generic functions is to understand how to
-- represent datatypes using the given set of type constructors.
--
-- Let us look at an example first:
--
-- @
-- data Tree a = Leaf a | Node (Tree a) (Tree a)
-- deriving 'Generic'
-- @
--
-- The above declaration (which requires the language pragma @DeriveGeneric@)
-- causes the following representation to be generated:
--
-- @
-- instance 'Generic' (Tree a) where
-- type 'Rep' (Tree a) =
-- 'D1' ('MetaData \"Tree\" \"Main\" \"package-name\" 'False)
-- ('C1' ('MetaCons \"Leaf\" 'PrefixI 'False)
-- ('S1' '(MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- ('Rec0' a))
-- ':+:'
-- 'C1' ('MetaCons \"Node\" 'PrefixI 'False)
-- ('S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- ('Rec0' (Tree a))
-- ':*:'
-- 'S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- ('Rec0' (Tree a))))
-- ...
-- @
--
-- /Hint:/ You can obtain information about the code being generated from GHC by passing
-- the @-ddump-deriv@ flag. In GHCi, you can expand a type family such as 'Rep' using
-- the @:kind!@ command.
--
-- This is a lot of information! However, most of it is actually merely meta-information
-- that makes names of datatypes and constructors and more available on the type level.
--
-- Here is a reduced representation for 'Tree' with nearly all meta-information removed,
-- for now keeping only the most essential aspects:
--
-- @
-- instance 'Generic' (Tree a) where
-- type 'Rep' (Tree a) =
-- 'Rec0' a
-- ':+:'
-- ('Rec0' (Tree a) ':*:' 'Rec0' (Tree a))
-- @
--
-- The @Tree@ datatype has two constructors. The representation of individual constructors
-- is combined using the binary type constructor ':+:'.
--
-- The first constructor consists of a single field, which is the parameter @a@. This is
-- represented as @'Rec0' a@.
--
-- The second constructor consists of two fields. Each is a recursive field of type @Tree a@,
-- represented as @'Rec0' (Tree a)@. Representations of individual fields are combined using
-- the binary type constructor ':*:'.
--
-- Now let us explain the additional tags being used in the complete representation:
--
-- * The @'S1' ('MetaSel 'Nothing 'NoSourceUnpackedness 'NoSourceStrictness
-- 'DecidedLazy)@ tag indicates several things. The @'Nothing@ indicates
-- that there is no record field selector associated with this field of
-- the constructor (if there were, it would have been marked @'Just
-- \"recordName\"@ instead). The other types contain meta-information on
-- the field's strictness:
--
-- * There is no @{\-\# UNPACK \#-\}@ or @{\-\# NOUNPACK \#-\}@ annotation
-- in the source, so it is tagged with @'NoSourceUnpackedness@.
--
-- * There is no strictness (@!@) or laziness (@~@) annotation in the
-- source, so it is tagged with @'NoSourceStrictness@.
--
-- * The compiler infers that the field is lazy, so it is tagged with
-- @'DecidedLazy@. Bear in mind that what the compiler decides may be
-- quite different from what is written in the source. See
-- 'DecidedStrictness' for a more detailed explanation.
--
-- The @'MetaSel@ type is also an instance of the type class 'Selector',
-- which can be used to obtain information about the field at the value
-- level.
--
-- * The @'C1' ('MetaCons \"Leaf\" 'PrefixI 'False)@ and
-- @'C1' ('MetaCons \"Node\" 'PrefixI 'False)@ invocations indicate that the enclosed part is
-- the representation of the first and second constructor of datatype @Tree@, respectively.
-- Here, the meta-information regarding constructor names, fixity and whether
-- it has named fields or not is encoded at the type level. The @'MetaCons@
-- type is also an instance of the type class 'Constructor'. This type class can be used
-- to obtain information about the constructor at the value level.
--
-- * The @'D1' ('MetaData \"Tree\" \"Main\" \"package-name\" 'False)@ tag
-- indicates that the enclosed part is the representation of the
-- datatype @Tree@. Again, the meta-information is encoded at the type level.
-- The @'MetaData@ type is an instance of class 'Datatype', which
-- can be used to obtain the name of a datatype, the module it has been
-- defined in, the package it is located under, and whether it has been
-- defined using @data@ or @newtype@ at the value level.
-- ** Derived and fundamental representation types
--
-- |
--
-- There are many datatype-generic functions that do not distinguish between positions that
-- are parameters or positions that are recursive calls. There are also many datatype-generic
-- functions that do not care about the names of datatypes and constructors at all. To keep
-- the number of cases to consider in generic functions in such a situation to a minimum,
-- it turns out that many of the type constructors introduced above are actually synonyms,
-- defining them to be variants of a smaller set of constructors.
-- *** Individual fields of constructors: 'K1'
--
-- |
--
-- The type constructor 'Rec0' is a variant of 'K1':
--
-- @
-- type 'Rec0' = 'K1' 'R'
-- @
--
-- Here, 'R' is a type-level proxy that does not have any associated values.
--
-- There used to be another variant of 'K1' (namely 'Par0'), but it has since
-- been deprecated.
-- *** Meta information: 'M1'
--
-- |
--
-- The type constructors 'S1', 'C1' and 'D1' are all variants of 'M1':
--
-- @
-- type 'S1' = 'M1' 'S'
-- type 'C1' = 'M1' 'C'
-- type 'D1' = 'M1' 'D'
-- @
--
-- The types 'S', 'C' and 'D' are once again type-level proxies, just used to create
-- several variants of 'M1'.
-- *** Additional generic representation type constructors
--
-- |
--
-- Next to 'K1', 'M1', ':+:' and ':*:' there are a few more type constructors that occur
-- in the representations of other datatypes.
-- **** Empty datatypes: 'V1'
--
-- |
--
-- For empty datatypes, 'V1' is used as a representation. For example,
--
-- @
-- data Empty deriving 'Generic'
-- @
--
-- yields
--
-- @
-- instance 'Generic' Empty where
-- type 'Rep' Empty =
-- 'D1' ('MetaData \"Empty\" \"Main\" \"package-name\" 'False) 'V1'
-- @
-- **** Constructors without fields: 'U1'
--
-- |
--
-- If a constructor has no arguments, then 'U1' is used as its representation. For example
-- the representation of 'Bool' is
--
-- @
-- instance 'Generic' Bool where
-- type 'Rep' Bool =
-- 'D1' ('MetaData \"Bool\" \"Data.Bool\" \"package-name\" 'False)
-- ('C1' ('MetaCons \"False\" 'PrefixI 'False) 'U1' ':+:' 'C1' ('MetaCons \"True\" 'PrefixI 'False) 'U1')
-- @
-- *** Representation of types with many constructors or many fields
--
-- |
--
-- As ':+:' and ':*:' are just binary operators, one might ask what happens if the
-- datatype has more than two constructors, or a constructor with more than two
-- fields. The answer is simple: the operators are used several times, to combine
-- all the constructors and fields as needed. However, users /should not rely on
-- a specific nesting strategy/ for ':+:' and ':*:' being used. The compiler is
-- free to choose any nesting it prefers. (In practice, the current implementation
-- tries to produce a more or less balanced nesting, so that the traversal of the
-- structure of the datatype from the root to a particular component can be performed
-- in logarithmic rather than linear time.)
-- ** Defining datatype-generic functions
--
-- |
--
-- A datatype-generic function comprises two parts:
--
-- 1. /Generic instances/ for the function, implementing it for most of the representation
-- type constructors introduced above.
--
-- 2. A /wrapper/ that for any datatype that is in `Generic`, performs the conversion
-- between the original value and its `Rep`-based representation and then invokes the
-- generic instances.
--
-- As an example, let us look at a function 'encode' that produces a naive, but lossless
-- bit encoding of values of various datatypes. So we are aiming to define a function
--
-- @
-- encode :: 'Generic' a => a -> [Bool]
-- @
--
-- where we use 'Bool' as our datatype for bits.
--
-- For part 1, we define a class @Encode'@. Perhaps surprisingly, this class is parameterized
-- over a type constructor @f@ of kind @* -> *@. This is a technicality: all the representation
-- type constructors operate with kind @* -> *@ as base kind. But the type argument is never
-- being used. This may be changed at some point in the future. The class has a single method,
-- and we use the type we want our final function to have, but we replace the occurrences of
-- the generic type argument @a@ with @f p@ (where the @p@ is any argument; it will not be used).
--
-- > class Encode' f where
-- > encode' :: f p -> [Bool]
--
-- With the goal in mind to make @encode@ work on @Tree@ and other datatypes, we now define
-- instances for the representation type constructors 'V1', 'U1', ':+:', ':*:', 'K1', and 'M1'.
-- *** Definition of the generic representation types
--
-- |
--
-- In order to be able to do this, we need to know the actual definitions of these types:
--
-- @
-- data 'V1' p -- lifted version of Empty
-- data 'U1' p = 'U1' -- lifted version of ()
-- data (':+:') f g p = 'L1' (f p) | 'R1' (g p) -- lifted version of 'Either'
-- data (':*:') f g p = (f p) ':*:' (g p) -- lifted version of (,)
-- newtype 'K1' i c p = 'K1' { 'unK1' :: c } -- a container for a c
-- newtype 'M1' i t f p = 'M1' { 'unM1' :: f p } -- a wrapper
-- @
--
-- So, 'U1' is just the unit type, ':+:' is just a binary choice like 'Either',
-- ':*:' is a binary pair like the pair constructor @(,)@, and 'K1' is a value
-- of a specific type @c@, and 'M1' wraps a value of the generic type argument,
-- which in the lifted world is an @f p@ (where we do not care about @p@).
-- *** Generic instances
--
-- |
--
-- The instance for 'V1' is slightly awkward (but also rarely used):
--
-- @
-- instance Encode' 'V1' where
-- encode' x = undefined
-- @
--
-- There are no values of type @V1 p@ to pass (except undefined), so this is
-- actually impossible. One can ask why it is useful to define an instance for
-- 'V1' at all in this case? Well, an empty type can be used as an argument to
-- a non-empty type, and you might still want to encode the resulting type.
-- As a somewhat contrived example, consider @[Empty]@, which is not an empty
-- type, but contains just the empty list. The 'V1' instance ensures that we
-- can call the generic function on such types.
--
-- There is exactly one value of type 'U1', so encoding it requires no
-- knowledge, and we can use zero bits:
--
-- @
-- instance Encode' 'U1' where
-- encode' 'U1' = []
-- @
--
-- In the case for ':+:', we produce 'False' or 'True' depending on whether
-- the constructor of the value provided is located on the left or on the right:
--
-- @
-- instance (Encode' f, Encode' g) => Encode' (f ':+:' g) where
-- encode' ('L1' x) = False : encode' x
-- encode' ('R1' x) = True : encode' x
-- @
--
-- In the case for ':*:', we append the encodings of the two subcomponents:
--
-- @
-- instance (Encode' f, Encode' g) => Encode' (f ':*:' g) where
-- encode' (x ':*:' y) = encode' x ++ encode' y
-- @
--
-- The case for 'K1' is rather interesting. Here, we call the final function
-- 'encode' that we yet have to define, recursively. We will use another type
-- class 'Encode' for that function:
--
-- @
-- instance (Encode c) => Encode' ('K1' i c) where
-- encode' ('K1' x) = encode x
-- @
--
-- Note how 'Par0' and 'Rec0' both being mapped to 'K1' allows us to define
-- a uniform instance here.
--
-- Similarly, we can define a uniform instance for 'M1', because we completely
-- disregard all meta-information:
--
-- @
-- instance (Encode' f) => Encode' ('M1' i t f) where
-- encode' ('M1' x) = encode' x
-- @
--
-- Unlike in 'K1', the instance for 'M1' refers to 'encode'', not 'encode'.
-- *** The wrapper and generic default
--
-- |
--
-- We now define class 'Encode' for the actual 'encode' function:
--
-- @
-- class Encode a where
-- encode :: a -> [Bool]
-- default encode :: (Generic a, Encode' (Rep a)) => a -> [Bool]
-- encode x = encode' ('from' x)
-- @
--
-- The incoming 'x' is converted using 'from', then we dispatch to the
-- generic instances using 'encode''. We use this as a default definition
-- for 'encode'. We need the 'default encode' signature because ordinary
-- Haskell default methods must not introduce additional class constraints,
-- but our generic default does.
--
-- Defining a particular instance is now as simple as saying
--
-- @
-- instance (Encode a) => Encode (Tree a)
-- @
--
#if 0
-- /TODO:/ Add usage example?
--
#endif
-- The generic default is being used. In the future, it will hopefully be
-- possible to use @deriving Encode@ as well, but GHC does not yet support
-- that syntax for this situation.
--
-- Having 'Encode' as a class has the advantage that we can define
-- non-generic special cases, which is particularly useful for abstract
-- datatypes that have no structural representation. For example, given
-- a suitable integer encoding function 'encodeInt', we can define
--
-- @
-- instance Encode Int where
-- encode = encodeInt
-- @
-- *** Omitting generic instances
--
-- |
--
-- It is not always required to provide instances for all the generic
-- representation types, but omitting instances restricts the set of
-- datatypes the functions will work for:
--
-- * If no ':+:' instance is given, the function may still work for
-- empty datatypes or datatypes that have a single constructor,
-- but will fail on datatypes with more than one constructor.
--
-- * If no ':*:' instance is given, the function may still work for
-- datatypes where each constructor has just zero or one field,
-- in particular for enumeration types.
--
-- * If no 'K1' instance is given, the function may still work for
-- enumeration types, where no constructor has any fields.
--
-- * If no 'V1' instance is given, the function may still work for
-- any datatype that is not empty.
--
-- * If no 'U1' instance is given, the function may still work for
-- any datatype where each constructor has at least one field.
--
-- An 'M1' instance is always required (but it can just ignore the
-- meta-information, as is the case for 'encode' above).
#if 0
-- *** Using meta-information
--
-- |
--
-- TODO
#endif
-- ** Generic constructor classes
--
-- |
--
-- Datatype-generic functions as defined above work for a large class
-- of datatypes, including parameterized datatypes. (We have used 'Tree'
-- as our example above, which is of kind @* -> *@.) However, the
-- 'Generic' class ranges over types of kind @*@, and therefore, the
-- resulting generic functions (such as 'encode') must be parameterized
-- by a generic type argument of kind @*@.
--
-- What if we want to define generic classes that range over type
-- constructors (such as 'Functor', 'Traversable', or 'Foldable')?
-- *** The 'Generic1' class
--
-- |
--
-- Like 'Generic', there is a class 'Generic1' that defines a
-- representation 'Rep1' and conversion functions 'from1' and 'to1',
-- only that 'Generic1' ranges over types of kind @* -> *@. (More generally,
-- it can range over types of kind @k -> *@, for any kind @k@, if the
-- @PolyKinds@ extension is enabled. More on this later.)
-- The 'Generic1' class is also derivable.
--
-- The representation 'Rep1' is ever so slightly different from 'Rep'.
-- Let us look at 'Tree' as an example again:
--
-- @
-- data Tree a = Leaf a | Node (Tree a) (Tree a)
-- deriving 'Generic1'
-- @
--
-- The above declaration causes the following representation to be generated:
--
-- @
-- instance 'Generic1' Tree where
-- type 'Rep1' Tree =
-- 'D1' ('MetaData \"Tree\" \"Main\" \"package-name\" 'False)
-- ('C1' ('MetaCons \"Leaf\" 'PrefixI 'False)
-- ('S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- 'Par1')
-- ':+:'
-- 'C1' ('MetaCons \"Node\" 'PrefixI 'False)
-- ('S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- ('Rec1' Tree)
-- ':*:'
-- 'S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- ('Rec1' Tree)))
-- ...
-- @
--
-- The representation reuses 'D1', 'C1', 'S1' (and thereby 'M1') as well
-- as ':+:' and ':*:' from 'Rep'. (This reusability is the reason that we
-- carry around the dummy type argument for kind-@*@-types, but there are
-- already enough different names involved without duplicating each of
-- these.)
--
-- What's different is that we now use 'Par1' to refer to the parameter
-- (and that parameter, which used to be @a@), is not mentioned explicitly
-- by name anywhere; and we use 'Rec1' to refer to a recursive use of @Tree a@.
-- *** Representation of @* -> *@ types
--
-- |
--
-- Unlike 'Rec0', the 'Par1' and 'Rec1' type constructors do not
-- map to 'K1'. They are defined directly, as follows:
--
-- @
-- newtype 'Par1' p = 'Par1' { 'unPar1' :: p } -- gives access to parameter p
-- newtype 'Rec1' f p = 'Rec1' { 'unRec1' :: f p } -- a wrapper
-- @
--
-- In 'Par1', the parameter @p@ is used for the first time, whereas 'Rec1' simply
-- wraps an application of @f@ to @p@.
--
-- Note that 'K1' (in the guise of 'Rec0') can still occur in a 'Rep1' representation,
-- namely when the datatype has a field that does not mention the parameter.
--
-- The declaration
--
-- @
-- data WithInt a = WithInt Int a
-- deriving 'Generic1'
-- @
--
-- yields
--
-- @
-- instance 'Generic1' WithInt where
-- type 'Rep1' WithInt =
-- 'D1' ('MetaData \"WithInt\" \"Main\" \"package-name\" 'False)
-- ('C1' ('MetaCons \"WithInt\" 'PrefixI 'False)
-- ('S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- ('Rec0' Int)
-- ':*:'
-- 'S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- 'Par1'))
-- @
--
-- If the parameter @a@ appears underneath a composition of other type constructors,
-- then the representation involves composition, too:
--
-- @
-- data Rose a = Fork a [Rose a]
-- @
--
-- yields
--
-- @
-- instance 'Generic1' Rose where
-- type 'Rep1' Rose =
-- 'D1' ('MetaData \"Rose\" \"Main\" \"package-name\" 'False)
-- ('C1' ('MetaCons \"Fork\" 'PrefixI 'False)
-- ('S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- 'Par1'
-- ':*:'
-- 'S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- ([] ':.:' 'Rec1' Rose)))
-- @
--
-- where
--
-- @
-- newtype (':.:') f g p = 'Comp1' { 'unComp1' :: f (g p) }
-- @
-- *** Representation of @k -> *@ types
--
-- |
--
-- The 'Generic1' class can be generalized to range over types of kind
-- @k -> *@, for any kind @k@. To do so, derive a 'Generic1' instance with the
-- @PolyKinds@ extension enabled. For example, the declaration
--
-- @
-- data Proxy (a :: k) = Proxy deriving 'Generic1'
-- @
--
-- yields a slightly different instance depending on whether @PolyKinds@ is
-- enabled. If compiled without @PolyKinds@, then @'Rep1' Proxy :: * -> *@, but
-- if compiled with @PolyKinds@, then @'Rep1' Proxy :: k -> *@.
-- *** Representation of unlifted types
--
-- |
--
-- If one were to attempt to derive a Generic instance for a datatype with an
-- unlifted argument (for example, 'Int#'), one might expect the occurrence of
-- the 'Int#' argument to be marked with @'Rec0' 'Int#'@. This won't work,
-- though, since 'Int#' is of an unlifted kind, and 'Rec0' expects a type of
-- kind @*@.
--
-- One solution would be to represent an occurrence of 'Int#' with 'Rec0 Int'
-- instead. With this approach, however, the programmer has no way of knowing
-- whether the 'Int' is actually an 'Int#' in disguise.
--
-- Instead of reusing 'Rec0', a separate data family 'URec' is used to mark
-- occurrences of common unlifted types:
--
-- @
-- data family URec a p
--
-- data instance 'URec' ('Ptr' ()) p = 'UAddr' { 'uAddr#' :: 'Addr#' }
-- data instance 'URec' 'Char' p = 'UChar' { 'uChar#' :: 'Char#' }
-- data instance 'URec' 'Double' p = 'UDouble' { 'uDouble#' :: 'Double#' }
-- data instance 'URec' 'Int' p = 'UFloat' { 'uFloat#' :: 'Float#' }
-- data instance 'URec' 'Float' p = 'UInt' { 'uInt#' :: 'Int#' }
-- data instance 'URec' 'Word' p = 'UWord' { 'uWord#' :: 'Word#' }
-- @
--
-- Several type synonyms are provided for convenience:
--
-- @
-- type 'UAddr' = 'URec' ('Ptr' ())
-- type 'UChar' = 'URec' 'Char'
-- type 'UDouble' = 'URec' 'Double'
-- type 'UFloat' = 'URec' 'Float'
-- type 'UInt' = 'URec' 'Int'
-- type 'UWord' = 'URec' 'Word'
-- @
--
-- The declaration
--
-- @
-- data IntHash = IntHash Int#
-- deriving 'Generic'
-- @
--
-- yields
--
-- @
-- instance 'Generic' IntHash where
-- type 'Rep' IntHash =
-- 'D1' ('MetaData \"IntHash\" \"Main\" \"package-name\" 'False)
-- ('C1' ('MetaCons \"IntHash\" 'PrefixI 'False)
-- ('S1' ('MetaSel 'Nothing
-- 'NoSourceUnpackedness
-- 'NoSourceStrictness
-- 'DecidedLazy)
-- 'UInt'))
-- @
--
-- Currently, only the six unlifted types listed above are generated, but this
-- may be extended to encompass more unlifted types in the future.
#if 0
-- *** Limitations
--
-- |
--
-- /TODO/
--
-- /TODO:/ Also clear up confusion about 'Rec0' and 'Rec1' not really indicating recursion.
--
#endif
-----------------------------------------------------------------------------
-- * Generic representation types
V1, U1(..), Par1(..), Rec1(..), K1(..), M1(..)
, (:+:)(..), (:*:)(..), (:.:)(..)
-- ** Unboxed representation types
, URec(..)
, type UAddr, type UChar, type UDouble
, type UFloat, type UInt, type UWord
-- ** Synonyms for convenience
, Rec0, R
, D1, C1, S1, D, C, S
-- * Meta-information
, Datatype(..), Constructor(..), Selector(..)
, Fixity(..), FixityI(..), Associativity(..), prec
, SourceUnpackedness(..), SourceStrictness(..), DecidedStrictness(..)
, Meta(..)
-- * Generic type classes
, Generic(..), Generic1(..)
) where
-- We use some base types
import Data.Either ( Either (..) )
import Data.Maybe ( Maybe(..), fromMaybe )
import GHC.Integer ( Integer, integerToInt )
import GHC.Prim ( Addr#, Char#, Double#, Float#, Int#, Word# )
import GHC.Ptr ( Ptr )
import GHC.Types
-- Needed for instances
import GHC.Arr ( Ix )
import GHC.Base ( Alternative(..), Applicative(..), Functor(..)
, Monad(..), MonadPlus(..), String )
import GHC.Classes ( Eq(..), Ord(..) )
import GHC.Enum ( Bounded, Enum )
import GHC.Read ( Read(..), lex, readParen )
import GHC.Show ( Show(..), showString )
-- Needed for metadata
import Data.Proxy ( Proxy(..) )
import GHC.TypeLits ( Nat, Symbol, KnownSymbol, KnownNat, symbolVal, natVal )
--------------------------------------------------------------------------------
-- Representation types
--------------------------------------------------------------------------------
-- | Void: used for datatypes without constructors
data V1 (p :: k)
deriving (Functor, Generic, Generic1)
deriving instance Eq (V1 p)
deriving instance Ord (V1 p)
deriving instance Read (V1 p)
deriving instance Show (V1 p)
-- | Unit: used for constructors without arguments
data U1 (p :: k) = U1
deriving (Generic, Generic1)
-- | @since 4.9.0.0
instance Eq (U1 p) where
_ == _ = True
-- | @since 4.9.0.0
instance Ord (U1 p) where
compare _ _ = EQ
-- | @since 4.9.0.0
instance Read (U1 p) where
readsPrec d = readParen (d > 10) (\r -> [(U1, s) | ("U1",s) <- lex r ])
-- | @since 4.9.0.0
instance Show (U1 p) where
showsPrec _ _ = showString "U1"
-- | @since 4.9.0.0
instance Functor U1 where
fmap _ _ = U1
-- | @since 4.9.0.0
instance Applicative U1 where
pure _ = U1
_ <*> _ = U1
-- | @since 4.9.0.0
instance Alternative U1 where
empty = U1
_ <|> _ = U1
-- | @since 4.9.0.0
instance Monad U1 where
_ >>= _ = U1
-- | @since 4.9.0.0
instance MonadPlus U1
-- | Used for marking occurrences of the parameter
newtype Par1 p = Par1 { unPar1 :: p }
deriving (Eq, Ord, Read, Show, Functor, Generic, Generic1)
-- | @since 4.9.0.0
instance Applicative Par1 where
pure a = Par1 a
Par1 f <*> Par1 x = Par1 (f x)
-- | @since 4.9.0.0
instance Monad Par1 where
Par1 x >>= f = f x
-- | Recursive calls of kind @* -> *@ (or kind @k -> *@, when @PolyKinds@
-- is enabled)
newtype Rec1 (f :: k -> *) (p :: k) = Rec1 { unRec1 :: f p }
deriving (Eq, Ord, Read, Show, Functor, Generic, Generic1)
-- | @since 4.9.0.0
instance Applicative f => Applicative (Rec1 f) where
pure a = Rec1 (pure a)
Rec1 f <*> Rec1 x = Rec1 (f <*> x)
-- | @since 4.9.0.0
instance Alternative f => Alternative (Rec1 f) where
empty = Rec1 empty
Rec1 l <|> Rec1 r = Rec1 (l <|> r)
-- | @since 4.9.0.0
instance Monad f => Monad (Rec1 f) where
Rec1 x >>= f = Rec1 (x >>= \a -> unRec1 (f a))
-- | @since 4.9.0.0
instance MonadPlus f => MonadPlus (Rec1 f)
-- | Constants, additional parameters and recursion of kind @*@
newtype K1 (i :: *) c (p :: k) = K1 { unK1 :: c }
deriving (Eq, Ord, Read, Show, Functor, Generic, Generic1)
-- | @since 4.9.0.0
instance Applicative f => Applicative (M1 i c f) where
pure a = M1 (pure a)
M1 f <*> M1 x = M1 (f <*> x)
-- | @since 4.9.0.0
instance Alternative f => Alternative (M1 i c f) where
empty = M1 empty
M1 l <|> M1 r = M1 (l <|> r)
-- | @since 4.9.0.0
instance Monad f => Monad (M1 i c f) where
M1 x >>= f = M1 (x >>= \a -> unM1 (f a))
-- | @since 4.9.0.0
instance MonadPlus f => MonadPlus (M1 i c f)
-- | Meta-information (constructor names, etc.)
newtype M1 (i :: *) (c :: Meta) (f :: k -> *) (p :: k) = M1 { unM1 :: f p }
deriving (Eq, Ord, Read, Show, Functor, Generic, Generic1)
-- | Sums: encode choice between constructors
infixr 5 :+:
data (:+:) (f :: k -> *) (g :: k -> *) (p :: k) = L1 (f p) | R1 (g p)
deriving (Eq, Ord, Read, Show, Functor, Generic, Generic1)
-- | Products: encode multiple arguments to constructors
infixr 6 :*:
data (:*:) (f :: k -> *) (g :: k -> *) (p :: k) = f p :*: g p
deriving (Eq, Ord, Read, Show, Functor, Generic, Generic1)
-- | @since 4.9.0.0
instance (Applicative f, Applicative g) => Applicative (f :*: g) where
pure a = pure a :*: pure a
(f :*: g) <*> (x :*: y) = (f <*> x) :*: (g <*> y)
-- | @since 4.9.0.0
instance (Alternative f, Alternative g) => Alternative (f :*: g) where
empty = empty :*: empty
(x1 :*: y1) <|> (x2 :*: y2) = (x1 <|> x2) :*: (y1 <|> y2)
-- | @since 4.9.0.0
instance (Monad f, Monad g) => Monad (f :*: g) where
(m :*: n) >>= f = (m >>= \a -> fstP (f a)) :*: (n >>= \a -> sndP (f a))
where
fstP (a :*: _) = a
sndP (_ :*: b) = b
-- | @since 4.9.0.0
instance (MonadPlus f, MonadPlus g) => MonadPlus (f :*: g)
-- | Composition of functors
infixr 7 :.:
newtype (:.:) (f :: k2 -> *) (g :: k1 -> k2) (p :: k1) =
Comp1 { unComp1 :: f (g p) }
deriving (Eq, Ord, Read, Show, Functor, Generic, Generic1)
-- | @since 4.9.0.0
instance (Applicative f, Applicative g) => Applicative (f :.: g) where
pure x = Comp1 (pure (pure x))
Comp1 f <*> Comp1 x = Comp1 (fmap (<*>) f <*> x)
-- | @since 4.9.0.0
instance (Alternative f, Applicative g) => Alternative (f :.: g) where
empty = Comp1 empty
Comp1 x <|> Comp1 y = Comp1 (x <|> y)
-- | Constants of unlifted kinds
--
-- @since 4.9.0.0
data family URec (a :: *) (p :: k)
-- | Used for marking occurrences of 'Addr#'
--
-- @since 4.9.0.0
data instance URec (Ptr ()) (p :: k) = UAddr { uAddr# :: Addr# }
deriving (Eq, Ord, Functor, Generic, Generic1)
-- | Used for marking occurrences of 'Char#'
--
-- @since 4.9.0.0
data instance URec Char (p :: k) = UChar { uChar# :: Char# }
deriving (Eq, Ord, Show, Functor, Generic, Generic1)
-- | Used for marking occurrences of 'Double#'
--
-- @since 4.9.0.0
data instance URec Double (p :: k) = UDouble { uDouble# :: Double# }
deriving (Eq, Ord, Show, Functor, Generic, Generic1)
-- | Used for marking occurrences of 'Float#'
--
-- @since 4.9.0.0
data instance URec Float (p :: k) = UFloat { uFloat# :: Float# }
deriving (Eq, Ord, Show, Functor, Generic, Generic1)
-- | Used for marking occurrences of 'Int#'
--
-- @since 4.9.0.0
data instance URec Int (p :: k) = UInt { uInt# :: Int# }
deriving (Eq, Ord, Show, Functor, Generic, Generic1)
-- | Used for marking occurrences of 'Word#'
--
-- @since 4.9.0.0
data instance URec Word (p :: k) = UWord { uWord# :: Word# }
deriving (Eq, Ord, Show, Functor, Generic, Generic1)
-- | Type synonym for @'URec' 'Addr#'@
--
-- @since 4.9.0.0
type UAddr = URec (Ptr ())
-- | Type synonym for @'URec' 'Char#'@
--
-- @since 4.9.0.0
type UChar = URec Char
-- | Type synonym for @'URec' 'Double#'@
--
-- @since 4.9.0.0
type UDouble = URec Double
-- | Type synonym for @'URec' 'Float#'@
--
-- @since 4.9.0.0
type UFloat = URec Float
-- | Type synonym for @'URec' 'Int#'@
--
-- @since 4.9.0.0
type UInt = URec Int
-- | Type synonym for @'URec' 'Word#'@
--
-- @since 4.9.0.0
type UWord = URec Word
-- | Tag for K1: recursion (of kind @*@)
data R
-- | Type synonym for encoding recursion (of kind @*@)
type Rec0 = K1 R
-- | Tag for M1: datatype
data D
-- | Tag for M1: constructor
data C
-- | Tag for M1: record selector
data S
-- | Type synonym for encoding meta-information for datatypes
type D1 = M1 D
-- | Type synonym for encoding meta-information for constructors
type C1 = M1 C
-- | Type synonym for encoding meta-information for record selectors
type S1 = M1 S
-- | Class for datatypes that represent datatypes
class Datatype d where
-- | The name of the datatype (unqualified)
datatypeName :: t d (f :: k -> *) (a :: k) -> [Char]
-- | The fully-qualified name of the module where the type is declared
moduleName :: t d (f :: k -> *) (a :: k) -> [Char]
-- | The package name of the module where the type is declared
--
-- @since 4.9.0.0
packageName :: t d (f :: k -> *) (a :: k) -> [Char]
-- | Marks if the datatype is actually a newtype
--
-- @since 4.7.0.0
isNewtype :: t d (f :: k -> *) (a :: k) -> Bool
isNewtype _ = False
-- | @since 4.9.0.0
instance (KnownSymbol n, KnownSymbol m, KnownSymbol p, SingI nt)
=> Datatype ('MetaData n m p nt) where
datatypeName _ = symbolVal (Proxy :: Proxy n)
moduleName _ = symbolVal (Proxy :: Proxy m)
packageName _ = symbolVal (Proxy :: Proxy p)
isNewtype _ = fromSing (sing :: Sing nt)
-- | Class for datatypes that represent data constructors
class Constructor c where
-- | The name of the constructor
conName :: t c (f :: k -> *) (a :: k) -> [Char]
-- | The fixity of the constructor
conFixity :: t c (f :: k -> *) (a :: k) -> Fixity
conFixity _ = Prefix
-- | Marks if this constructor is a record
conIsRecord :: t c (f :: k -> *) (a :: k) -> Bool
conIsRecord _ = False
-- | @since 4.9.0.0
instance (KnownSymbol n, SingI f, SingI r)
=> Constructor ('MetaCons n f r) where
conName _ = symbolVal (Proxy :: Proxy n)
conFixity _ = fromSing (sing :: Sing f)
conIsRecord _ = fromSing (sing :: Sing r)
-- | Datatype to represent the fixity of a constructor. An infix
-- | declaration directly corresponds to an application of 'Infix'.
data Fixity = Prefix | Infix Associativity Int
deriving (Eq, Show, Ord, Read, Generic)
-- | This variant of 'Fixity' appears at the type level.
--
-- @since 4.9.0.0
data FixityI = PrefixI | InfixI Associativity Nat
-- | Get the precedence of a fixity value.
prec :: Fixity -> Int
prec Prefix = 10
prec (Infix _ n) = n
-- | Datatype to represent the associativity of a constructor
data Associativity = LeftAssociative
| RightAssociative
| NotAssociative
deriving (Eq, Show, Ord, Read, Enum, Bounded, Ix, Generic)
-- | The unpackedness of a field as the user wrote it in the source code. For
-- example, in the following data type:
--
-- @
-- data E = ExampleConstructor Int
-- {\-\# NOUNPACK \#-\} Int
-- {\-\# UNPACK \#-\} Int
-- @
--
-- The fields of @ExampleConstructor@ have 'NoSourceUnpackedness',
-- 'SourceNoUnpack', and 'SourceUnpack', respectively.
--
-- @since 4.9.0.0
data SourceUnpackedness = NoSourceUnpackedness
| SourceNoUnpack
| SourceUnpack
deriving (Eq, Show, Ord, Read, Enum, Bounded, Ix, Generic)
-- | The strictness of a field as the user wrote it in the source code. For
-- example, in the following data type:
--
-- @
-- data E = ExampleConstructor Int ~Int !Int
-- @
--
-- The fields of @ExampleConstructor@ have 'NoSourceStrictness',
-- 'SourceLazy', and 'SourceStrict', respectively.
--
-- @since 4.9.0.0
data SourceStrictness = NoSourceStrictness
| SourceLazy
| SourceStrict
deriving (Eq, Show, Ord, Read, Enum, Bounded, Ix, Generic)
-- | The strictness that GHC infers for a field during compilation. Whereas
-- there are nine different combinations of 'SourceUnpackedness' and
-- 'SourceStrictness', the strictness that GHC decides will ultimately be one
-- of lazy, strict, or unpacked. What GHC decides is affected both by what the
-- user writes in the source code and by GHC flags. As an example, consider
-- this data type:
--
-- @
-- data E = ExampleConstructor {\-\# UNPACK \#-\} !Int !Int Int
-- @
--
-- * If compiled without optimization or other language extensions, then the
-- fields of @ExampleConstructor@ will have 'DecidedStrict', 'DecidedStrict',
-- and 'DecidedLazy', respectively.
--
-- * If compiled with @-XStrictData@ enabled, then the fields will have
-- 'DecidedStrict', 'DecidedStrict', and 'DecidedStrict', respectively.
--
-- * If compiled with @-O2@ enabled, then the fields will have 'DecidedUnpack',
-- 'DecidedStrict', and 'DecidedLazy', respectively.
--
-- @since 4.9.0.0
data DecidedStrictness = DecidedLazy
| DecidedStrict
| DecidedUnpack
deriving (Eq, Show, Ord, Read, Enum, Bounded, Ix, Generic)
-- | Class for datatypes that represent records
class Selector s where
-- | The name of the selector
selName :: t s (f :: k -> *) (a :: k) -> [Char]
-- | The selector's unpackedness annotation (if any)
--
-- @since 4.9.0.0
selSourceUnpackedness :: t s (f :: k -> *) (a :: k) -> SourceUnpackedness
-- | The selector's strictness annotation (if any)
--
-- @since 4.9.0.0
selSourceStrictness :: t s (f :: k -> *) (a :: k) -> SourceStrictness
-- | The strictness that the compiler inferred for the selector
--
-- @since 4.9.0.0
selDecidedStrictness :: t s (f :: k -> *) (a :: k) -> DecidedStrictness
-- | @since 4.9.0.0
instance (SingI mn, SingI su, SingI ss, SingI ds)
=> Selector ('MetaSel mn su ss ds) where
selName _ = fromMaybe "" (fromSing (sing :: Sing mn))
selSourceUnpackedness _ = fromSing (sing :: Sing su)
selSourceStrictness _ = fromSing (sing :: Sing ss)
selDecidedStrictness _ = fromSing (sing :: Sing ds)
-- | Representable types of kind *.
-- This class is derivable in GHC with the DeriveGeneric flag on.
class Generic a where
-- | Generic representation type
type Rep a :: * -> *
-- | Convert from the datatype to its representation
from :: a -> (Rep a) x
-- | Convert from the representation to the datatype
to :: (Rep a) x -> a
-- | Representable types of kind @* -> *@ (or kind @k -> *@, when @PolyKinds@
-- is enabled).
-- This class is derivable in GHC with the @DeriveGeneric@ flag on.
class Generic1 (f :: k -> *) where
-- | Generic representation type
type Rep1 f :: k -> *
-- | Convert from the datatype to its representation
from1 :: f a -> (Rep1 f) a
-- | Convert from the representation to the datatype
to1 :: (Rep1 f) a -> f a
--------------------------------------------------------------------------------
-- Meta-data
--------------------------------------------------------------------------------
-- | Datatype to represent metadata associated with a datatype (@MetaData@),
-- constructor (@MetaCons@), or field selector (@MetaSel@).
--
-- * In @MetaData n m p nt@, @n@ is the datatype's name, @m@ is the module in
-- which the datatype is defined, @p@ is the package in which the datatype
-- is defined, and @nt@ is @'True@ if the datatype is a @newtype@.
--
-- * In @MetaCons n f s@, @n@ is the constructor's name, @f@ is its fixity,
-- and @s@ is @'True@ if the constructor contains record selectors.
--
-- * In @MetaSel mn su ss ds@, if the field is uses record syntax, then @mn@ is
-- 'Just' the record name. Otherwise, @mn@ is 'Nothing'. @su@ and @ss@ are
-- the field's unpackedness and strictness annotations, and @ds@ is the
-- strictness that GHC infers for the field.
--
-- @since 4.9.0.0
data Meta = MetaData Symbol Symbol Symbol Bool
| MetaCons Symbol FixityI Bool
| MetaSel (Maybe Symbol)
SourceUnpackedness SourceStrictness DecidedStrictness
--------------------------------------------------------------------------------
-- Derived instances
--------------------------------------------------------------------------------
deriving instance Generic [a]
deriving instance Generic (Maybe a)
deriving instance Generic (Either a b)
deriving instance Generic Bool
deriving instance Generic Ordering
deriving instance Generic (Proxy t)
deriving instance Generic ()
deriving instance Generic ((,) a b)
deriving instance Generic ((,,) a b c)
deriving instance Generic ((,,,) a b c d)
deriving instance Generic ((,,,,) a b c d e)
deriving instance Generic ((,,,,,) a b c d e f)
deriving instance Generic ((,,,,,,) a b c d e f g)
deriving instance Generic1 []
deriving instance Generic1 Maybe
deriving instance Generic1 (Either a)
deriving instance Generic1 Proxy
deriving instance Generic1 ((,) a)
deriving instance Generic1 ((,,) a b)
deriving instance Generic1 ((,,,) a b c)
deriving instance Generic1 ((,,,,) a b c d)
deriving instance Generic1 ((,,,,,) a b c d e)
deriving instance Generic1 ((,,,,,,) a b c d e f)
--------------------------------------------------------------------------------
-- Copied from the singletons package
--------------------------------------------------------------------------------
-- | The singleton kind-indexed data family.
data family Sing (a :: k)
-- | A 'SingI' constraint is essentially an implicitly-passed singleton.
-- If you need to satisfy this constraint with an explicit singleton, please
-- see 'withSingI'.
class SingI (a :: k) where
-- | Produce the singleton explicitly. You will likely need the @ScopedTypeVariables@
-- extension to use this method the way you want.
sing :: Sing a
-- | The 'SingKind' class is essentially a /kind/ class. It classifies all kinds
-- for which singletons are defined. The class supports converting between a singleton
-- type and the base (unrefined) type which it is built from.
class SingKind k where
-- | Get a base type from a proxy for the promoted kind. For example,
-- @DemoteRep Bool@ will be the type @Bool@.
type DemoteRep k :: *
-- | Convert a singleton to its unrefined version.
fromSing :: Sing (a :: k) -> DemoteRep k
-- Singleton symbols
data instance Sing (s :: Symbol) where
SSym :: KnownSymbol s => Sing s
-- | @since 4.9.0.0
instance KnownSymbol a => SingI a where sing = SSym
-- | @since 4.9.0.0
instance SingKind Symbol where
type DemoteRep Symbol = String
fromSing (SSym :: Sing s) = symbolVal (Proxy :: Proxy s)
-- Singleton booleans
data instance Sing (a :: Bool) where
STrue :: Sing 'True
SFalse :: Sing 'False
-- | @since 4.9.0.0
instance SingI 'True where sing = STrue
-- | @since 4.9.0.0
instance SingI 'False where sing = SFalse
-- | @since 4.9.0.0
instance SingKind Bool where
type DemoteRep Bool = Bool
fromSing STrue = True
fromSing SFalse = False
-- Singleton Maybe
data instance Sing (b :: Maybe a) where
SNothing :: Sing 'Nothing
SJust :: Sing a -> Sing ('Just a)
-- | @since 4.9.0.0
instance SingI 'Nothing where sing = SNothing
-- | @since 4.9.0.0
instance SingI a => SingI ('Just a) where sing = SJust sing
-- | @since 4.9.0.0
instance SingKind a => SingKind (Maybe a) where
type DemoteRep (Maybe a) = Maybe (DemoteRep a)
fromSing SNothing = Nothing
fromSing (SJust a) = Just (fromSing a)
-- Singleton Fixity
data instance Sing (a :: FixityI) where
SPrefix :: Sing 'PrefixI
SInfix :: Sing a -> Integer -> Sing ('InfixI a n)
-- | @since 4.9.0.0
instance SingI 'PrefixI where sing = SPrefix
-- | @since 4.9.0.0
instance (SingI a, KnownNat n) => SingI ('InfixI a n) where
sing = SInfix (sing :: Sing a) (natVal (Proxy :: Proxy n))
-- | @since 4.9.0.0
instance SingKind FixityI where
type DemoteRep FixityI = Fixity
fromSing SPrefix = Prefix
fromSing (SInfix a n) = Infix (fromSing a) (I# (integerToInt n))
-- Singleton Associativity
data instance Sing (a :: Associativity) where
SLeftAssociative :: Sing 'LeftAssociative
SRightAssociative :: Sing 'RightAssociative
SNotAssociative :: Sing 'NotAssociative
-- | @since 4.9.0.0
instance SingI 'LeftAssociative where sing = SLeftAssociative
-- | @since 4.9.0.0
instance SingI 'RightAssociative where sing = SRightAssociative
-- | @since 4.9.0.0
instance SingI 'NotAssociative where sing = SNotAssociative
-- | @since 4.0.0.0
instance SingKind Associativity where
type DemoteRep Associativity = Associativity
fromSing SLeftAssociative = LeftAssociative
fromSing SRightAssociative = RightAssociative
fromSing SNotAssociative = NotAssociative
-- Singleton SourceUnpackedness
data instance Sing (a :: SourceUnpackedness) where
SNoSourceUnpackedness :: Sing 'NoSourceUnpackedness
SSourceNoUnpack :: Sing 'SourceNoUnpack
SSourceUnpack :: Sing 'SourceUnpack
-- | @since 4.9.0.0
instance SingI 'NoSourceUnpackedness where sing = SNoSourceUnpackedness
-- | @since 4.9.0.0
instance SingI 'SourceNoUnpack where sing = SSourceNoUnpack
-- | @since 4.9.0.0
instance SingI 'SourceUnpack where sing = SSourceUnpack
-- | @since 4.9.0.0
instance SingKind SourceUnpackedness where
type DemoteRep SourceUnpackedness = SourceUnpackedness
fromSing SNoSourceUnpackedness = NoSourceUnpackedness
fromSing SSourceNoUnpack = SourceNoUnpack
fromSing SSourceUnpack = SourceUnpack
-- Singleton SourceStrictness
data instance Sing (a :: SourceStrictness) where
SNoSourceStrictness :: Sing 'NoSourceStrictness
SSourceLazy :: Sing 'SourceLazy
SSourceStrict :: Sing 'SourceStrict
-- | @since 4.9.0.0
instance SingI 'NoSourceStrictness where sing = SNoSourceStrictness
-- | @since 4.9.0.0
instance SingI 'SourceLazy where sing = SSourceLazy
-- | @since 4.9.0.0
instance SingI 'SourceStrict where sing = SSourceStrict
-- | @since 4.9.0.0
instance SingKind SourceStrictness where
type DemoteRep SourceStrictness = SourceStrictness
fromSing SNoSourceStrictness = NoSourceStrictness
fromSing SSourceLazy = SourceLazy
fromSing SSourceStrict = SourceStrict
-- Singleton DecidedStrictness
data instance Sing (a :: DecidedStrictness) where
SDecidedLazy :: Sing 'DecidedLazy
SDecidedStrict :: Sing 'DecidedStrict
SDecidedUnpack :: Sing 'DecidedUnpack
-- | @since 4.9.0.0
instance SingI 'DecidedLazy where sing = SDecidedLazy
-- | @since 4.9.0.0
instance SingI 'DecidedStrict where sing = SDecidedStrict
-- | @since 4.9.0.0
instance SingI 'DecidedUnpack where sing = SDecidedUnpack
-- | @since 4.9.0.0
instance SingKind DecidedStrictness where
type DemoteRep DecidedStrictness = DecidedStrictness
fromSing SDecidedLazy = DecidedLazy
fromSing SDecidedStrict = DecidedStrict
fromSing SDecidedUnpack = DecidedUnpack
|
snoyberg/ghc
|
libraries/base/GHC/Generics.hs
|
bsd-3-clause
| 47,373 | 0 | 12 | 10,608 | 6,506 | 3,953 | 2,553 | -1 | -1 |
module WhereIn2 where
--A definition can be demoted to the local 'where' binding of a friend declaration,
--if it is only used by this friend declaration.
--Demoting a definition narrows down the scope of the definition.
--In this example, demote the top level 'sq' to 'sumSquares'
--This example also aims to test the renaming of clashed/capture names.
sumSquares x y = sq x + sq y +pow
where pow=2
sq 0 = 0
sq z = z^pow
pow=2
anotherFun 0 y = sq y
where sq x = x^2
|
kmate/HaRe
|
old/testing/demote/WhereIn2_TokOut.hs
|
bsd-3-clause
| 495 | 0 | 7 | 117 | 92 | 49 | 43 | 8 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Instance1 where
import Definition
type instance F Int = Int
|
vladfi1/hs-misc
|
conflict/Instance1.hs
|
mit
| 100 | 0 | 4 | 18 | 18 | 12 | 6 | 4 | 0 |
/*Owner & Copyrights: Vance King Saxbe. A.*//* Copyright (c) <2014> Author Vance King Saxbe. A, and contributors Power Dominion Enterprise, Precieux Consulting and other contributors. Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting and GoldSax Technologies email @[email protected]. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager.*/{-# Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting, GoldSax Money, GoldSax Treasury, GoldSax Finance, GoldSax Banking and GoldSax Technologies email @[email protected]. Development teams from Power Dominion Enterprise, Precieux Consulting. This Engagement sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager. LANGUAGE EmptyDataDecls, GADTs #-}
module GoldSaxMachine.Users where
data AllowEverything
data AllowProducts
data AllowPurchases
data Person = Person { firstName :: String, lastName :: String }
data User r where
Admin :: Person -> User AllowEverything
StoreManager :: Person -> User AllowEverything
StorePerson :: Person -> User AllowProducts
Client :: Person -> User AllowPurchases
/*email to provide support at [email protected], [email protected], For donations please write to [email protected]*/
|
VanceKingSaxbeA/GoldSaxMachineStore
|
GoldSaxMachineModule13/src/Chapter13/Users.hs
|
mit
| 1,478 | 37 | 16 | 206 | 341 | 170 | 171 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.