code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Transputer ( Op(..), Transputer(..), Registers(..), StatusRegisters(..) ) where
import Data.ByteString
import Data.Int ( Int32 )
import Control.Lens
data Op = Prefix
| NegativePrefix
| Operate
| LoadConstant
| LoadLocal
| StoreLocal
| LoadLocalPointer
| AddConstant
| EqualsConstant
| Jump
| ConditionalJump
| LoadNonLocal
| StoreNonLocal
| LoadNonLocalPointer
| Call
| AdjustWorkspace
| Reverse
| Add
| Subtract
| Multiply
| Divide
| Remainder
| Sum
| Difference
| Product
| And
| Or
| ExclusiveOr
| BitwiseNot
| ShiftLeft
| ShiftRight
| GreaterThan
| LoopEnd
| ByteCount
| WordCount
| LoadPointerToInstruction
| MinimumInteger
| ByteSubscript
| WordSubscript
| MoveMessage
| InputMessage
| OutputMessage
| LoadByte
| StoreByte
| OutputByte
| OutputWord
| GeneralCall
| GeneralAdjustWorkspace
| Return
| StartProcess
| EndProcess
| RunProcess
| StopProcess
| LoadCurrentPriority
| LoadTimer
| TimerInput
| AltStart
| AltWait
| AltEnd
| TimerAltStart
| TimerAltWait
| EnableSkip
| DisableSkip
| EnableChannel
| DisableChannel
| EnableTimer
| DisableTimer
| CheckSubscriptFromZero
| CheckCountFromOne
| TestErrorFalseAndClear
| StopOnError
| SetError
| ExtendToWord
| CheckWord
| ExtendToDouble
| CheckSingle
| LongAdd
| LongSubtract
| LongSum
| LongDiff
| LongMultiply
| LongDivide
| LongShiftLeft
| LongShiftRight
| Normalise
| ResetChannel
| TestProcessorAnalysing
| StoreHighPriorityFrontPointer
| StoreLowPriorityFrontPointer
| StoreTimer
| StoreHighPriorityBackPointer
| StoreLowPriorityBackPointer
| SaveHighPriorityQueueRegisters
| SaveLowPriorityQueueRegisters
| ClearHaltOnError
| SetHaltOnError
| TestHaltOnError
| FractionalMultiply
| UnpackSingleLengthFpNumber
| RoundSingleLengthFpNumber
| PostNormaliseCorrectionOfSingleLengthFpNumber
| LoadSingleLengthInfinity
| CheckSingleLengthFpInfinityOrNaN
| DuplicateTopOfStack
| InitialiseDataForTwoDimensionalBlockMove
| TwoDimensionalBlockCopy
| TwoDimensionalBlockCopyNonZeroBytes
| TwoDimensionalBlockCopyZeroBytes
| CalculateCRCOnWord
| CalculateCRCOnByte
| CountBitsSetInWord
| ReverseBitsInWord
| ReverseBottomNBitsInWord
| FormDoubleWordSubscript
deriving (Show)
data Registers = Registers {
_iptr :: Int32,
_wptr :: Int32,
_areg :: Int32,
_breg :: Int32,
_creg :: Int32,
_oreg :: Int32,
_sreg :: StatusRegisters
} deriving (Show)
data StatusRegisters = StatusRegisters {
_errorFlag :: Bool,
_moveBit :: Bool,
_haltOnErr :: Bool,
_gotoSnp :: Bool,
_ioBit :: Bool,
_timeIns :: Bool,
_timeDel :: Bool,
_distAndIns :: Bool
} deriving (Show)
data Transputer = Transputer {
_registers :: Registers,
_memory :: ByteString,
_programEnd :: Int,
_id :: Int
} deriving (Show)
| rossng/transputer-simulator | src/Transputer.hs | apache-2.0 | 3,595 | 0 | 8 | 1,289 | 569 | 373 | 196 | 144 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Code Dx | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | secdec/zap-extensions | addOns/codedx/src/main/javahelp/org/zaproxy/zap/extension/codedx/resources/help_ru_RU/helpset_ru_RU.hs | apache-2.0 | 968 | 78 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
module Distribution.Sandbox.Utils where
import System.Directory (doesFileExist)
import System.FilePath ((</>))
import Data.Maybe (mapMaybe, listToMaybe)
import Codex.Internal
findSandbox :: FilePath -> IO (Maybe FilePath)
findSandbox prjDir = do
fileExists <- doesFileExist configFile
if fileExists then readSandboxDir else return Nothing where
readSandboxDir = do
fileContent <- readFile configFile
return $ removePrefixMany $ lines fileContent
configFile = prjDir </> "cabal.sandbox.config"
removePrefixMany = maybeFunctionMany $ removePrefix "prefix:"
maybeFunctionMany :: (a -> Maybe b) -> [a] -> Maybe b
maybeFunctionMany func list = listToMaybe $ mapMaybe func list
readSandboxSources :: FilePath -> IO [FilePath]
readSandboxSources sandboxPath = do
fileExists <- doesFileExist sourcesFile
if fileExists then readSources else return [] where
readSources = do
fileContent <- readFile sourcesFile
return $ projects fileContent where
projects :: String -> [FilePath]
projects x = sources x >>= (\x' -> fmap fst $ snd x')
sources :: String -> [(String, [(FilePath, Int)])]
sources x = read x
sourcesFile = sandboxPath </> "add-source-timestamps"
| aloiscochard/codex | src/Distribution/Sandbox/Utils.hs | apache-2.0 | 1,245 | 0 | 13 | 248 | 367 | 190 | 177 | 28 | 2 |
{-# LANGUAGE RecordWildCards #-}
module NLP.FeatureStructure.Reid
(
-- * Pure operations
reidGraph
-- * Reid monad
, ReidT
, Reid
, runReidT
, runReid
-- * Primitives
, reid
, reidGraphM
, split
) where
import Control.Applicative ((<$>), (<*>))
import qualified Control.Monad.State.Strict as S
import Control.Monad.Identity (Identity(..))
import qualified Data.Traversable as Tr
import qualified Data.Map.Strict as M
import NLP.FeatureStructure.Core
import qualified NLP.FeatureStructure.Join as J
import qualified NLP.FeatureStructure.Graph as G
-- | State of the reidentification monad.
data ReidS i = ReidS {
-- | Base identifier: the maximal value (plus 1) assigned
-- to any of the keys in the mapping.
base :: !ID
-- | Current ID mapping.
, imap :: !(M.Map i ID)
} deriving (Show, Eq, Ord)
-- | The reidentification monad transformer.
type ReidT i m = S.StateT (ReidS i) m
-- | The reidentification monad.
type Reid i = ReidT i Identity
-- | Run the reidentification monad transformer.
runReidT :: Monad m => ReidT i m a -> m (a, ReidS i)
runReidT = flip S.runStateT $ ReidS 0 M.empty
-- | Run the reidentification monad.
runReid :: Reid i a -> (a, ReidS i)
runReid = runIdentity . runReidT
-- | Set split point.
split :: Monad m => ReidT i m ()
split = S.modify $ \s -> s {imap = M.empty}
-- | Re-identify a single identifier.
reid :: (Monad m, Ord i) => i -> ReidT i m ID
reid x = S.state $ \s@ReidS{..} -> case M.lookup x imap of
Just y -> (y, s)
Nothing -> ( base, ReidS
{ base = base + 1
, imap = M.insert x base imap } )
-- | Reidentify the graph within the monad.
reidGraphM
:: (Functor m, Monad m, Ord i)
=> G.Graph i f a
-> ReidT i m (G.Graph ID f a)
reidGraphM g =
G.Graph <$> reidNodeMap (G.nodeMap g)
where
reidNodeMap m = M.fromList <$> mapM reidNodePair (M.toList m)
reidNodePair (i, x) = (,) <$> reid i <*> reidNode x
reidNode (G.Interior m) = fmap G.Interior $ Tr.mapM reid m
reidNode (G.Frontier x) = return $ G.Frontier x
-- | Reidentify the graph.
reidGraph :: Ord i => G.Graph i f a -> J.Res i ID f a
reidGraph g = case runReid $ reidGraphM g of
(g', ReidS{..}) -> J.Res
{ J.resGraph = g'
, J.convID = \i -> case M.lookup i imap of
Just j -> j
Nothing -> error "reidGraph: no such identifier" }
| kawu/feature-structure | src/NLP/FeatureStructure/Reid.hs | bsd-2-clause | 2,414 | 0 | 14 | 604 | 800 | 442 | 358 | 58 | 2 |
module FractalFlame.Flam3.Types.Xform where
import FractalFlame.Types.Base
import FractalFlame.Types.LinearParams
import FractalFlame.Variation.Types.Variation
import FractalFlame.Variation.Types.VParams
-- | Basic unit sampled by Iterated Function System. Corresponds to an xform element in a flam3 file.
data Xform = Xform {
preParams :: Maybe LinearParams -- ^ params for transform before variations are run
, postParams :: Maybe LinearParams -- ^ params for transform after variations are run
, colorIx :: Maybe Coord -- ^ color index associated with these transforms. Nothing means keep old colorIx
, weight :: Coord -- ^ likelihood of selection by IFS, value from 0 to 1
, symmetry :: Coord
, variations :: [Variation] -- ^ weighted set of variations
}
| anthezium/fractal_flame_renderer_haskell | FractalFlame/Flam3/Types/Xform.hs | bsd-2-clause | 817 | 0 | 9 | 168 | 97 | 64 | 33 | 12 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Database.Relational.Schema.Oracle
( normalizeColumn, notNull, getType
, columnsQuerySQL, primaryKeyQuerySQL
) where
import Control.Applicative ((<|>), (<$>))
import Data.ByteString (ByteString)
import Data.Char (toLower)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Time (LocalTime)
import Language.Haskell.TH (TypeQ)
import Database.Relational.Query
import Database.Relational.Schema.OracleDataDictionary.ConsColumns (dbaConsColumns)
import qualified Database.Relational.Schema.OracleDataDictionary.ConsColumns as ConsCols
import Database.Relational.Schema.OracleDataDictionary.Constraints (dbaConstraints)
import qualified Database.Relational.Schema.OracleDataDictionary.Constraints as Cons
import Database.Relational.Schema.OracleDataDictionary.TabColumns (DbaTabColumns, dbaTabColumns)
import qualified Database.Relational.Schema.OracleDataDictionary.TabColumns as Cols
-- NOT COMPLETED
-- (ref: http://docs.oracle.com/cd/B28359_01/server.111/b28318/datatype.htm)
-- | Mapping between type in Oracle DB and Haskell type.
mapFromSqlDefault :: Map String TypeQ
mapFromSqlDefault = Map.fromList
[ ("CHAR", [t|String|])
, ("VARCHAR", [t|String|]) -- deprecated
, ("VARCHAR2", [t|String|])
, ("NCHAR", [t|String|])
, ("NVARCHAR2", [t|String|])
-- , ("NUMBER", [t|Integer or Double|]) see 'getType'
, ("BINARY_FLOAT", [t|Double|]) -- Float don't work
, ("BINARY_DOUBLE", [t|Double|])
, ("DATE", [t|LocalTime|])
, ("BLOB", [t|ByteString|])
, ("CLOB", [t|String|])
, ("NCLOB", [t|String|])
, ("LONG RAW", [t|ByteString|]) -- deprecated
, ("RAW", [t|ByteString|])
, ("ROWID", [t|String|])
, ("UROWID", [t|String|])
]
-- | Normalize column name string to query Oracle DB data dictionary.
normalizeColumn :: String -> String
normalizeColumn = map toLower
-- | Not-null attribute information of column.
notNull :: DbaTabColumns -> Bool
notNull = (== Just "N") . Cols.nullable
-- | Get column normalized name and column Haskell type.
getType :: Map String TypeQ -- ^ Type mapping specified by user
-> DbaTabColumns -- ^ Column info in data dictionary
-> Maybe (String, TypeQ) -- ^ Result normalized name and mapped Haskell type
getType mapFromSql cols = do
key <- Cols.dataType cols
typ <- if key == "NUMBER"
then return $ numberType $ Cols.dataScale cols
else Map.lookup key mapFromSql <|> Map.lookup key mapFromSqlDefault
return (normalizeColumn $ Cols.columnName cols, mayNull typ)
where
mayNull typ
| notNull cols = typ
| otherwise = [t|Maybe $(typ)|]
numberType Nothing = [t|Integer|]
numberType (Just n)
| n <= 0 = [t|Integer|]
| otherwise = [t|Double|]
-- | 'Relation' to query 'DbaTabColumns' from owner name and table name.
columnsRelationFromTable :: Relation (String, String) DbaTabColumns
columnsRelationFromTable = relation' $ do
cols <- query dbaTabColumns
(owner, ()) <- placeholder $ \owner ->
wheres $ cols ! Cols.owner' .=. owner
(name, ()) <- placeholder $ \name ->
wheres $ cols ! Cols.tableName' .=. name
asc $ cols ! Cols.columnId'
return (owner >< name, cols)
-- | Phantom typed 'Query' to get 'DbaTabColumns' from owner name and table name.
columnsQuerySQL :: Query (String, String) DbaTabColumns
columnsQuerySQL = relationalQuery columnsRelationFromTable
-- | 'Relation' to query primary key name from owner name and table name.
primaryKeyRelation :: Relation (String, String) (Maybe String)
primaryKeyRelation = relation' $ do
cons <- query dbaConstraints
cols <- query dbaTabColumns
consCols <- query dbaConsColumns
wheres $ cons ! Cons.owner' .=. just (cols ! Cols.owner')
wheres $ cons ! Cons.tableName' .=. cols ! Cols.tableName'
wheres $ consCols ! ConsCols.columnName' .=. just (cols ! Cols.columnName')
wheres $ cons ! Cons.constraintName' .=. consCols ! ConsCols.constraintName'
wheres $ cols ! Cols.nullable' .=. just (value "N")
wheres $ cons ! Cons.constraintType' .=. just (value "P")
(owner, ()) <- placeholder $ \owner ->
wheres $ cons ! Cons.owner' .=. just owner
(name, ()) <- placeholder $ \name ->
wheres $ cons ! Cons.tableName' .=. name
asc $ consCols ! ConsCols.position'
return (owner >< name, consCols ! ConsCols.columnName')
-- | Phantom typed 'Query' to get primary key name from owner name and table name.
primaryKeyQuerySQL :: Query (String, String) (Maybe String)
primaryKeyQuerySQL = relationalQuery primaryKeyRelation
| amutake/haskell-relational-record-driver-oracle | src/Database/Relational/Schema/Oracle.hs | bsd-3-clause | 4,619 | 0 | 13 | 848 | 1,192 | 696 | 496 | 85 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE PolyKinds #-}
{-# OPTIONS_HADDOCK not-home #-}
module Servant.API.Header (
Header(..),
) where
import Data.ByteString (ByteString)
import Data.Typeable (Typeable)
import GHC.TypeLits (Symbol)
-- | Extract the given header's value as a value of type @a@.
--
-- Example:
--
-- >>> newtype Referer = Referer Text deriving (Eq, Show)
-- >>>
-- >>> -- GET /view-my-referer
-- >>> type MyApi = "view-my-referer" :> Header "from" Referer :> Get '[JSON] Referer
data Header (sym :: Symbol) a = Header a
| MissingHeader
| UndecodableHeader ByteString
deriving (Typeable, Eq, Show, Functor)
-- $setup
-- >>> import Servant.API
-- >>> import Data.Aeson
-- >>> import Data.Text
| zerobuzz/servant | servant/src/Servant/API/Header.hs | bsd-3-clause | 913 | 0 | 6 | 256 | 108 | 73 | 35 | 14 | 0 |
import Criterion.Main
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Binary as CB
import Control.Pipe.Binary
import Control.Pipe.Combinators
import Control.Pipe
testFile :: FilePath
testFile = "bench/general.hs"
main :: IO ()
main = defaultMain
[ bench "bigsum-pipes" (whnfIO $ runPipe $ (mapM_ yield [1..1000 :: Int] >> return 0) >+> fold (+) 0)
, bench "bigsum-conduit" (whnfIO $ C.runResourceT $ CL.sourceList [1..1000 :: Int] C.$$ CL.fold (+) 0)
, bench "fileread-pipes" (whnfIO $ runPipe $ fileReader testFile >+> discard)
, bench "fileread-conduit" (whnfIO $ C.runResourceT $ CB.sourceFile testFile C.$$ CL.sinkNull)
, bench "map-pipes" (whnfIO $ runPipe $ (mapM_ yield [1..1000 :: Int] >> return 0) >+> pipe (+1) >+> fold (+) 0)
, bench "map-conduit" (whnfIO $ C.runResourceT $ CL.sourceList [1..1000 :: Int] C.$= CL.map (+ 1) C.$$ CL.fold (+) 0)
]
| pcapriotti/pipes-extra | bench/general.hs | bsd-3-clause | 946 | 0 | 15 | 165 | 367 | 201 | 166 | 17 | 1 |
module MiniMarkdown where
{-# LANGUAGE NoMonomorphismRestriction #-}
import Text.ParserCombinators.Parsec (Parser)
import Text.Parsec
import Control.Applicative (pure, liftA2, (*>),(<*),(<*>),(<$>))
import Data.List (intersperse)
import Debug.Trace
data Token = Head (Int, String)
| Par [Token]
| Bold String
| Emph String
| ICode String
| BCode [Token]
| Url (String, String)
| List [Token]
| MString String
| Blockquote [Token]
| Hrule
| EmptyLine
| LeadingWs
| Ws
| LeadingEmptyLine
| InlineNL
| EndPar
| Emptym
deriving Show
--instance Show Token where
htmlshow (Bold x) = wrap "strong" x
htmlshow (Emph x) = wrap "em" x
htmlshow (MString x) = x
htmlshow (Head (l, x)) = wrap ("h" ++ show l) (namedWrap "a" "name" x x)
htmlshow (Url (l, x)) = href x l
htmlshow (List x) = wrap "ul" $ wrap "li" $ concat $ map htmlshow x
htmlshow (ICode x) = wrap "code" x
htmlshow (BCode x) = wrap "pre" $ wrap "code" $ concat $
intersperse "\n" $ map htmlshow x
htmlshow (Par xs) = wrap "p" $ concat $ map htmlshow xs
htmlshow Hrule = "<hr />"
-- Empty Elems
htmlshow Ws = " "
htmlshow EndPar = ""
htmlshow Emptym = ""
htmlshow EmptyLine = ""
htmlshow InlineNL = " "
htmlshow LeadingWs = ""
htmlshow LeadingEmptyLine = ""
type HTML = String
type Markdown = String
wrap a b = concat ["<",a,">",b,"</",a,">"]
namedWrap a b c d = concat ["<", a," ",b,"=\"",c,"\">",d,"</",a,">"]
href a b = namedWrap "a" "href" a b
{- Block Parsers
-------------
Tries to find a markdown block, if none of the block parsers
is succesful all empty lines are consumend (handles \n at the
beginning or end) -}
parseMarkdownBlocks :: Parser [Token]
parseMarkdownBlocks = many $ choice (map try bParser)
where bParser = [wsp,lineOfWsp,swl,headl,hrule,list,bcode,par,endBlock]
-- consume whitespaces at beginning of line if less than 3
wsp :: Parser Token
wsp = do
upTo 3 ws (oneOf " \t") <|>
upTo 2 ws (oneOf "\t") <|>
upTo 1 ws (oneOf "\t")
return LeadingWs
lineOfWsp = do
many1 ws
try $ notFollowedBy (noneOf " \t")
lookAhead nl
return EmptyLine
upTo n p end = do
try $ count n p
notFollowedBy end
headl :: Parser Token
headl = Head <$> liftA2 (,) numB cont
where numB = fmap length (many1 (char '#') <* many1 space)
cont = many (noneOf "\n") <* nl
list :: Parser Token
list = List <$> between open close parseMarkdownInline
where open = (oneOf "*-+") <* space
close = lookAhead $ try endBlock
hrule :: Parser Token
hrule = rule '*' <|> rule '-' <|> rule '_'
where rule :: Char -> Parser Token
rule x = do
count 3 (char x)
many (char x) <* nl
return Hrule
bcode :: Parser Token
bcode = fmap BCode (blockCode <|> blockCode2)
blockCode :: Parser [Token]
blockCode = between del del (many purestring)
where del = (count 3 (char '~')) <* nl
blockCode2 :: Parser [Token]
blockCode2 = between open close (many purestring)
where open = try (string " ") <|> try (string "\t") -- NOTE how to throw a Failure
close = do
try $ nl
(notFollowedBy (string " ") <|> notFollowedBy (string "\t"))
return " "
-- Parse a string skipping initial ws including \n
purestring :: Parser Token
purestring = do
notFollowedBy $ string "~~~\n"
optional (many1 (oneOf " \t"))
cnt <- many1Till (noneOf "\n") nl
return $ MString cnt
par :: Parser Token
par = Par <$> parseMarkdownInline <* try endBlock
endBlock :: Parser Token
endBlock = string "\n\n" *> nl *> return EndPar
swl :: Parser Token
swl = nl *> notFollowedBy ws *> return LeadingEmptyLine
anyTillEndBlock :: Parser String
anyTillEndBlock = anyTill endBlock
anyTill :: Parser Token -> Parser String
anyTill close = many1Till anyChar $ lookAhead $ try $ close
-- optEOL = optional (many nl)
eol :: Parser Token
eol = nl *> pure Emptym
nl :: Parser Char
nl = char '\n'
ws :: Parser Char
ws = char ' '
{- Inline Parser -}
parseMarkdownInline :: Parser [Token]
parseMarkdownInline = many1 (markdownInline <|> mstring)
markdownInline = choice (map try [url, emph, bold, iCode, ws', inlineNL])
ws' :: Parser Token
ws' = do
try $ many1 $ oneOf " \t"
lookAhead $ try $ noneOf " \t"
return Ws
inlineNL :: Parser Token
inlineNL = nl *> notFollowedBy nl *> return InlineNL
url :: Parser Token
url = Url <$> liftA2 (,) name url
where name = between open close $ delChar "]"
where open = char '[' <* lookAhead (noneOf " *")
close = char ']'
url = between open close $ delChar ")"
where open = char '(' <* lookAhead (noneOf " *")
close = char ')'
delChar :: String -> Parser String
delChar x = many (noneOf ("\n" ++ x))
emph :: Parser Token
emph = Emph <$> (between open del $ delChar "*")
where open = del <* lookAhead (noneOf " *")
del = char '*'
iCode = ICode <$> (between del del $ delChar "`")
where del = char '`'
bold :: Parser Token
bold = Bold <$> (between open del $ delChar "*")
where open = del <* notFollowedBy space
del = count 2 $ char '*'
mstring :: Parser Token
mstring = MString <$> many1Till (noneOf " \t\n") newToken
where newToken = lookAhead markdownInline <|> lookAhead eol
many1Till p end = do
notFollowedBy end
p1 <- p
ps <- manyTill p end
return (p1:ps)
{- Parser -}
parseMarkdown :: String -> Either ParseError [Token]
parseMarkdown inp = parse parseMarkdownBlocks "(unknown)" inp
parse2HTML inp = do
res <- parseMarkdown inp
let res' = concat $ map htmlshow res
return res'
writeHTMLString :: Markdown -> HTML
writeHTMLString inp = stripEither $ parse2HTML (inp ++ "\n\n")
stripEither (Right x) = x
stripEither (Left x) = show x
| greole/calendula | MiniMarkdown.hs | bsd-3-clause | 6,218 | 0 | 13 | 1,835 | 2,151 | 1,091 | 1,060 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module RazorsLambda.Interactive where
import Control.Monad
import Control.Monad.Free
import Control.Monad.Free.TH
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Text.PrettyPrint.ANSI.Leijen as ANSI
import Text.Trifecta.Result hiding (Result)
import RazorsLambda.AST
import RazorsLambda.Eval
import RazorsLambda.Parser
import RazorsLambda.TypeCheck
type IEnv = (TCEnv, EvalEnv)
iEmptyEnv :: IEnv
iEmptyEnv = (Map.empty, Map.empty)
data Interactive r
= IGetInput (Text -> r)
| ILoadFile FilePath (Text -> r)
| IParseCommand Text (Command -> r)
| IReportResult Result r
| IGetEnv (IEnv -> r)
| ISetEnv IEnv r
| IExit
deriving (Functor)
data UnknownCommand = UnknownCommand Text
deriving (Show)
data Command
= CEvalExpr Text
| CTypeOf Text
| CLoadFile FilePath
| CExit
| CUnknown Text
| CEmpty
data Result
= RValue Value
| RType Type
| RUnknownCommand Text
| RParseError ANSI.Doc
| REvalError EvalError
| RTCError TCError
| REmpty
$(makeFree ''Interactive)
repl :: Free Interactive ()
repl = forever $ do
l <- iGetInput
c <- iParseCommand l
iReportResult =<< runCmd c
runCmd :: Command -> Free Interactive Result
runCmd c = do
(tcEnv, eEnv) <- iGetEnv
case c of
CEvalExpr txt ->
case parseExpr txt of
Failure err -> return (RParseError err)
Success e ->
case runTCIn tcEnv (typeCheckExpr e) of
Left err -> return (RTCError err)
Right _ ->
case runEvalIn eEnv (evalExpr e) of
Left err -> return (REvalError err)
Right (v, _) -> return (RValue v)
CTypeOf txt ->
case parseExpr txt of
Failure err -> return (RParseError err)
Success e ->
case runTCIn tcEnv (typeCheckExpr e) of
Left err -> return (RTCError err)
Right t -> return (RType t)
CLoadFile fp -> do
txt <- iLoadFile fp
case parseModule txt of
Failure err -> return (RParseError err)
Success m ->
case runTCIn tcEnv (typeCheckModule m) of
Left err -> return (RTCError err)
Right tcEnv' -> do
case runEvalIn eEnv (evalModule m) of
Left err -> return (REvalError err)
Right (eEnv', _) -> do
iSetEnv (tcEnv', eEnv')
return REmpty
CExit -> iExit
CEmpty -> return REmpty
CUnknown cmd -> return (RUnknownCommand cmd)
| acfoltzer/RazorsLambda | src/RazorsLambda/Interactive.hs | bsd-3-clause | 2,642 | 0 | 26 | 738 | 825 | 423 | 402 | 89 | 14 |
module Bindings.GObject (
module Bindings.GObject.BaseObjectType,
module Bindings.GObject.BoxedTypes,
module Bindings.GObject.Closures,
module Bindings.GObject.EnumerationAndFlagTypes,
module Bindings.GObject.GenericValues,
module Bindings.GObject.GParamSpec,
module Bindings.GObject.GTypeModule,
module Bindings.GObject.GTypePlugin,
module Bindings.GObject.ParametersAndValues,
module Bindings.GObject.Signals,
module Bindings.GObject.TypeInformation,
module Bindings.GObject.ValueArrays,
module Bindings.GObject.Varargs
) where
import Bindings.GObject.BaseObjectType
import Bindings.GObject.BoxedTypes
import Bindings.GObject.Closures
import Bindings.GObject.EnumerationAndFlagTypes
import Bindings.GObject.GenericValues
import Bindings.GObject.GParamSpec
import Bindings.GObject.GTypeModule
import Bindings.GObject.GTypePlugin
import Bindings.GObject.ParametersAndValues
import Bindings.GObject.Signals
import Bindings.GObject.TypeInformation
import Bindings.GObject.ValueArrays
import Bindings.GObject.Varargs
| Yuras/bindings-gobject | src/Bindings/GObject.hs | bsd-3-clause | 1,054 | 0 | 5 | 97 | 177 | 122 | 55 | 27 | 0 |
module GHCApi where
import Control.Monad
import CoreMonad
import DynFlags
import ErrMsg
import Exception
import GHC
import GHC.Paths (libdir)
import Types
----------------------------------------------------------------
withGHC :: (MonadPlus m) => Ghc (m a) -> IO (m a)
withGHC body = ghandle ignore $ runGhc (Just libdir) body
where
ignore :: (MonadPlus m) => SomeException -> IO (m a)
ignore _ = return mzero
----------------------------------------------------------------
initSession0 :: Options -> Ghc [PackageId]
initSession0 opt = getSessionDynFlags >>=
(>>= setSessionDynFlags) . setGhcFlags opt
initSession :: Options -> [String] -> [FilePath] -> Bool -> Ghc LogReader
initSession opt cmdOpts idirs logging = do
dflags <- getSessionDynFlags
let opts = map noLoc cmdOpts
(dflags',_,_) <- parseDynamicFlags dflags opts
(dflags'',readLog) <- liftIO . (>>= setLogger logging) . setGhcFlags opt . setFlags dflags' $ idirs
setSessionDynFlags dflags''
return readLog
----------------------------------------------------------------
setFlags :: DynFlags -> [FilePath] -> DynFlags
setFlags d idirs = d'
where
d' = d {
packageFlags = ghcPackage : packageFlags d
, importPaths = idirs
, ghcLink = NoLink
, hscTarget = HscInterpreted
}
ghcPackage :: PackageFlag
ghcPackage = ExposePackage "ghc"
setGhcFlags :: Monad m => Options -> DynFlags -> m DynFlags
setGhcFlags opt flagset =
do (flagset',_,_) <- parseDynamicFlags flagset (map noLoc (ghcOpts opt))
return flagset'
----------------------------------------------------------------
setTargetFile :: (GhcMonad m) => String -> m ()
setTargetFile file = do
target <- guessTarget file Nothing
setTargets [target]
| himura/ghc-mod | GHCApi.hs | bsd-3-clause | 1,760 | 0 | 13 | 320 | 526 | 272 | 254 | 41 | 1 |
{-# LANGUAGE TypeFamilies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Geometry.ThreeD.Combinators
-- Copyright : (c) 2013-2017 diagrams team (see LICENSE)
-- License : BSD-style (see LICENSE)
-- Maintainer : [email protected]
--
-- Alignment combinators specialized for three dimensions. See
-- "Geometry.Combinators" for more general alignment combinators.
--
-- The basic idea is that alignment is achieved by moving objects'
-- local origins relative to their envelopes or traces (or some other
-- sort of boundary). For example, to align several objects along
-- their tops, we first move their local origins to the upper edge of
-- their boundary (using e.g. @map 'alignZMax'@), and then put them
-- together with their local origins along a line (using e.g. 'cat'
-- from "Geometry.Combinators").
--
-----------------------------------------------------------------------------
module Geometry.ThreeD.Combinators
( -- * Absolute alignment
-- ** Align by envelope
alignXMin, alignXMax, alignYMin, alignYMax, alignZMin, alignZMax
-- ** Align by trace
, snugXMin, snugXMax, snugYMin, snugYMax, snugZMin, snugZMax
-- * Relative alignment
, alignX, snugX, alignY, snugY, alignZ, snugZ
-- * Centering
, centerX, centerY, centerZ
, centerXY, centerXZ, centerYZ, centerXYZ
, snugCenterX, snugCenterY, snugCenterZ
) where
import Geometry.Combinators
import Geometry.Envelope
import Geometry.Space
import Geometry.ThreeD.Types
import Geometry.ThreeD.Vector
import Geometry.Trace
import Geometry.Transform
import Geometry.TwoD.Combinators
-- | Translate the object along @unitX@ so that all points have
-- positive x-values.
alignXMin :: (InSpace v n a, R1 v, Enveloped a, HasOrigin a) => a -> a
alignXMin = align unit_X
snugXMin :: (InSpace v n a, R1 v, Enveloped a, HasOrigin a, Traced a) => a -> a
snugXMin = snug unit_X
-- | Translate the object along @unitX@ so that all points have
-- negative x-values.
alignXMax :: (InSpace v n a, R1 v, Enveloped a, HasOrigin a) => a -> a
alignXMax = align unitX
snugXMax :: (InSpace v n a, R1 v, Enveloped a, HasOrigin a, Traced a) => a -> a
snugXMax = snug unitX
-- | Translate the object along @unitY@ so that all points have
-- positive y-values.
alignYMin :: (InSpace v n a, R2 v, Enveloped a, HasOrigin a) => a -> a
alignYMin = align unit_Y
snugYMin :: (InSpace v n a, R2 v, Enveloped a, HasOrigin a, Traced a) => a -> a
snugYMin = snug unit_Y
-- | Translate the object along @unitY@ so that all points have
-- negative y-values.
alignYMax :: (InSpace v n a, R2 v, Enveloped a, HasOrigin a) => a -> a
alignYMax = align unitY
snugYMax :: (InSpace v n a, R2 v, Enveloped a, HasOrigin a, Traced a) => a -> a
snugYMax = snug unitY
-- | Translate the object along @unitZ@ so that all points have
-- positive z-values.
alignZMin :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a) => a -> a
alignZMin = align unit_Z
snugZMin :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a, Traced a) => a -> a
snugZMin = snug unit_Z
-- | Translate the object along @unitZ@ so that all points have
-- negative z-values.
alignZMax :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a) => a -> a
alignZMax = align unitZ
snugZMax :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a, Traced a) => a -> a
snugZMax = snug unitZ
-- | Like 'alignX', but moving the local origin in the Z direction, with an
-- argument of @1@ corresponding to the top edge and @(-1)@ corresponding
-- to the bottom edge.
alignZ :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a) => n -> a -> a
alignZ = alignBy unitZ
-- | See the documentation for 'alignZ'.
snugZ :: (InSpace v n a, Enveloped a, Traced a, HasOrigin a, R3 v) => n -> a -> a
snugZ = snugBy unitZ
-- | Center the local origin along the Z-axis.
centerZ :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a) => a -> a
centerZ = alignBy unitZ 0
snugCenterZ :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a, Traced a) => a -> a
snugCenterZ = snugBy unitZ 0
-- | Center along both the X- and Z-axes.
centerXZ :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a) => a -> a
centerXZ = centerX . centerZ
-- | Center along both the Y- and Z-axes.
centerYZ :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a) => a -> a
centerYZ = centerZ . centerY
-- | Center an object in three dimensions.
centerXYZ :: (InSpace v n a, R3 v, Enveloped a, HasOrigin a) => a -> a
centerXYZ = centerX . centerY . centerZ
| cchalmers/geometry | src/Geometry/ThreeD/Combinators.hs | bsd-3-clause | 4,587 | 0 | 7 | 945 | 1,160 | 644 | 516 | 55 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main (main) where
import Control.Concurrent
--import Codec.Text.Detect (detectEncodingName)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T
import Debug.Trace (trace)
import System.Directory
import System.Environment
import System.FilePath
import System.Process
import Test.QuickCheck
import Language.CSharp.Arbitrary
import Language.CSharp.Lexer
import Language.CSharp.Parser
import Language.CSharp.Pretty
------------------------------------------------------------------------
-- QuickCheck
main :: IO ()
main = do
result <- quickCheckWithResult args prop_roundtrip
putStrLn $ "replay = read \"Just ("
++ show (usedSeed result) ++ ", "
++ show (usedSize result) ++ ")\""
where
args = stdArgs
{ maxSuccess = 1000
, maxSize = 3
, chatty = False
, replay = read "Just (969080827 2147483396, 2)"
}
prop_roundtrip (ast :: CompilationUnit) =
case parseCSharp sourceName tokens of
Left e -> trace (source ++ "\n" ++ show e) False
Right a -> a == ast
where
source = render' ast
sourceName = "[Generated C#]"
tokens = lexer sourceName (T.pack source)
------------------------------------------------------------------------
-- Parsing external files
{-
main' :: IO ()
main' = do
args <- getArgs
fs <- getFiles (if null args then "." else head args)
ts <- mapM analyzeFile fs
putStrLn $ "Lexed " ++ show (sum ts) ++ " tokens"
getFiles :: FilePath -> IO [FilePath]
getFiles path | csharp = return [path]
| otherwise = do
isDir <- doesDirectoryExist path
if not isDir
then return []
else do
fs <- getDirectoryContents path
fs' <- mapM getFiles (map prefix $ filter isReal fs)
return (concat fs')
where
csharp = takeExtension path == ".cs"
prefix = (path </>)
isReal "." = False
isReal ".." = False
isReal _ = True
analyzeFile :: FilePath -> IO Int
analyzeFile path = {-# SCC "analyze" #-} do
bs <- B.readFile path
let encoding = {-# SCC "detect" #-} detect bs
text = {-# SCC "decode" #-} decode' encoding bs
tokens = {-# SCC "lexer" #-} lexer path text
case {-# SCC "parse" #-} parseCSharp path tokens of
Left err -> error (show err)
Right xs -> writeFile path' $ render' xs ++ "\n"
putStrLn $ file ++ ": " ++ show (length tokens)
++ " tokens (" ++ encoding ++ ")"
let diff = "diff -sBI '\\w*//' -I '\\w*#'"
putStrLn $ diff ++ " " ++ file ++ " " ++ file'
pid <- runCommand $ diff ++ " " ++ path ++ " " ++ path'
waitForProcess pid
return (length tokens)
where
file = takeFileName path
path' = replaceExtension path ".roundtrip.cs"
file' = takeFileName path'
detect bs = case detectEncoding bs of
Nothing -> error $ file ++ ": could not detect encoding"
Just x -> x
decode' enc bs = case decode enc bs of
Nothing -> error $ file ++ ": " ++ enc ++ " is not a supported encoding"
Just x -> x
detectEncoding :: B.ByteString -> Maybe String
detectEncoding bs = detectEncodingName $ L.fromChunks [bs]
decode :: String -> B.ByteString -> Maybe T.Text
decode "UTF-8" = Just . T.decodeUtf8With T.lenientDecode
decode "UTF-16LE" = Just . T.decodeUtf16LE
decode "UTF-16BE" = Just . T.decodeUtf16BE
decode "UTF-32LE" = Just . T.decodeUtf32LE
decode "UTF-32BE" = Just . T.decodeUtf32BE
decode "ASCII" = Just . T.decodeASCII
decode "windows-1252" = Just . T.decodeASCII
decode _ = const Nothing
------------------------------------------------------------------------
forkJoin :: (a -> IO b) -> [a] -> IO [b]
forkJoin f xs = (fork f xs) >>= join
join :: [MVar b] -> IO [b]
join = mapM takeMVar
fork :: (a -> IO b) -> [a] -> IO [MVar b]
fork f = mapM (fork1 f)
fork1 :: (a -> IO b) -> a -> IO (MVar b)
fork1 f x = do
cell <- newEmptyMVar
forkIO $ do { result <- f x; putMVar cell $! result }
return cell
-}
| jystic/language-csharp | src/Main.hs | bsd-3-clause | 4,467 | 0 | 13 | 1,211 | 337 | 198 | 139 | 39 | 2 |
-- | This module provides the type and the rules for building the computation graph.
-- Refinement and evaluation steps depend on the (abstract) semantics.
-- Strategies for finding instances are should be implemented here.
module Jat.CompGraph
(
MkJGraph
, JGraph
, mkJGraph
, mkJGraph2Dot
, mkJGraph2TRS
, mkJGraphIO
, simplifySCC
)
where
import Jat.Constraints (PAFun, PAVar, PATerm, isTop)
import Jat.JatM
import Jat.PState
import Jat.Utils.Dot
import Jat.Utils.Fun
import Jat.Utils.Pretty as PP
import qualified Jinja.Program as P
import Data.Rewriting.Rule (Rule (..))
import Control.Monad.State (liftIO)
import Data.Graph.Inductive as Gr
import Data.GraphViz.Types.Canonical
import Data.Maybe (fromMaybe)
import System.IO (hFlush,stdout)
import qualified Control.Exception as E
import qualified Data.GraphViz.Attributes.Complete as GV
import qualified Data.Text.Lazy as T
import Data.List ((\\))
--import Debug.Trace
-- finding instance/merge nodes for backjumps
-- assumptions:
-- * code is unstructured, i.e. decomposition in general not possible
-- * if there are multiple candidates then one is the predecessor of the other one
-- i.e. c1 ->n -> c2 ->n e is valid but c1 ->n e, c2 ->n e, c1 /->n c2, c2 /-> c1 not
-- then predecessor is defined by wrt. to the transivity relation
-- finding a candidate reduces to (predc) topsort if we do not allow merging of nodes stemming from different back jump points
data ELabel =
EvaluationLabel PATerm P.Instruction
| InstanceLabel
| RefinementLabel PATerm
deriving Show
instance Pretty ELabel where
pretty (EvaluationLabel fm ins) = pretty fm <> colon <> pretty ins
pretty (RefinementLabel fm) = pretty fm
pretty l = text $ show l
type NLabel i a = PState i a
type JGraph i a = Gr (NLabel i a) ELabel
-- | A 'JContext' corresponds to a (non-terminal) leaf node.
type JContext i a = Context (NLabel i a) ELabel
-- | The type of the compuation graph.
data MkJGraph i a = MkJGraph (JGraph i a) [JContext i a]
-- | Builds the computation graph, given class name and method name.
mkJGraph :: (Monad m, IntDomain i, MemoryModel a) => P.ClassId -> P.MethodId -> JatM m (MkJGraph i a)
mkJGraph cn mn = mkInitialNode cn mn >>= mkSteps
mkInitialNode :: (Monad m, IntDomain i, MemoryModel a) => P.ClassId -> P.MethodId -> JatM m (MkJGraph i a)
mkInitialNode cn mn = do
k <- freshKey
st <- mkInitialState cn mn
let ctx = ([],k,st,[])
g = ctx & Gr.empty
return $ MkJGraph g [ctx]
mkSteps :: (Monad m, IntDomain i, MemoryModel a) => MkJGraph i a -> JatM m (MkJGraph i a)
mkSteps mg@(MkJGraph _ []) = return mg
mkSteps mg = mkStep mg >>= mkSteps
-- a single step constitues of
-- * finding a merge node
-- * if non can be found make an evaluation step
mkStep :: (Monad m, IntDomain i, MemoryModel a) => MkJGraph i a -> JatM m (MkJGraph i a)
mkStep (MkJGraph g (ctx:ctxs)) | isTerminal' ctx = return $ MkJGraph g ctxs
mkStep g = tryLoop g |>> mkEval g
state' :: JContext i a -> PState i a
state' = lab'
instruction' :: P.Program -> JContext i a -> P.Instruction
instruction' p = instruction p . state'
isTerminal' :: JContext i a -> Bool
isTerminal' (_,_,st,s) = null s && isTerminal st
isTarget' :: Monad m => JContext i a -> JatM m Bool
isTarget' = isTarget . state'
isSimilar' :: JContext i a -> JContext i a -> Bool
isSimilar' ctx1 ctx2 = isSimilar (state' ctx1) (state' ctx2)
leq' :: (Monad m, IntDomain i, MemoryModel a) => JContext i a -> JContext i a -> JatM m Bool
leq' ctx1 ctx2 = getProgram >>= \p -> return $ leq p (state' ctx1) (state' ctx2)
join' :: (Monad m, IntDomain i, MemoryModel a) => JContext i a -> JContext i a -> JatM m (PState i a)
join' ctx1 ctx2 = lub (state' ctx1) (state' ctx2)
-- if a similar predecessor can be found then try to make an instance node, if not possible join the nodes
tryLoop :: (Monad m, IntDomain i, MemoryModel a) => MkJGraph i a -> JatM m (Maybe (MkJGraph i a))
tryLoop (MkJGraph _ []) = error "Jat.CompGraph.tryInstance: empty context."
tryLoop mg@(MkJGraph g (ctx:_)) = do
b <- isTarget' ctx
if b then eval candidate else return Nothing
where
eval Nothing = return Nothing
eval (Just n) = Just `liftM` (tryInstance nctx mg |>> (mkJoin nctx mg >>= mkEval))
where nctx = context g n
candidate = rdfsnWith (condition ctx) (pre' ctx) g
condition ctx1 ctx2 =
isSimilar' ctx1 ctx2 && null [ undefined | (_,_,RefinementLabel _) <- inn' ctx2]
-- perform a backwards dfs wrt to minimum predecessor
-- if nodes are not merge nodes they have only one predecessor
-- if the node is a merge node then we follow the node with minimum key
rdfsnWith :: (JContext i a -> Bool) -> [Node] -> JGraph i a -> Maybe Node
rdfsnWith _ _ g | isEmpty g = Nothing
rdfsnWith _ [] _ = Nothing
rdfsnWith f (v:vs) g = case match v g of
(Just c , g') -> if f c then Just (node' c) else rdfsnWith f (predi c ++ vs) g'
(Nothing, g') -> rdfsnWith f vs g'
where
predi ctx = [ n | n <- minNode $ pre' ctx, n < node' ctx]
where
minNode [] = []
minNode l = [minimum l]
tryInstance :: (Monad m, IntDomain i, MemoryModel a) => JContext i a -> MkJGraph i a -> JatM m (Maybe (MkJGraph i a))
--tryInstance ctx2 (MkJGraph _ (ctx1:_)) | trace (">>> tryInstance: " ++ show (ctx2,ctx1)) False = undefined
tryInstance ctx2 mg@(MkJGraph _ (ctx1:_)) = do
b <- leq' ctx1 ctx2
if b then Just `liftM` mkInstanceNode ctx2 mg else return Nothing
tryInstance _ _ = return Nothing
mkInstanceNode :: Monad m => JContext i a -> MkJGraph i a -> JatM m (MkJGraph i a)
mkInstanceNode ctx2 (MkJGraph g (ctx1:ctxs)) = return $ MkJGraph g' ctxs
where g' = insEdge (node' ctx1, node' ctx2, InstanceLabel) g
mkInstanceNode _ _ = error "Jat.CompGraph.mkInstance: empty context."
-- make and integrate a join node
-- nodes and contexts are suitably removed
mkJoin :: (Monad m, IntDomain i, MemoryModel a) => JContext i a -> MkJGraph i a -> JatM m (MkJGraph i a)
--mkJoin ctx2 (MkJGraph _ (ctx1:_)) | trace (">>> mkJoin: \n" ++ show (ctx2,ctx1)) False = undefined
mkJoin ctx2 (MkJGraph g (ctx1:ctxs)) = do
k <- freshKey
st3 <- join' ctx1 ctx2
let edge = (InstanceLabel, node' ctx2)
ctx3 = ([edge],k,st3,[])
g1 = delNodes successors g
g2 = ctx3 & g1
{-g2 = ctx3 & g-}
return $ MkJGraph g2 (ctx3: filter (\lctx -> node' lctx `notElem` successors) ctxs)
{-return $ MkJGraph g2 (ctx3: ctxs)-}
where
ctxn = node' ctx2
successors = dfsUntil ((<= ctxn) . node') (suc' ctx2) g
mkJoin _ _ = error "Jat.CompGraph.mkInstance: empty context."
dfsUntil :: (JContext i a -> Bool) -> [Node] -> JGraph i a -> [Node]
dfsUntil _ _ g | isEmpty g = []
dfsUntil _ [] _ = []
dfsUntil f (v:vs) g = case match v g of
(Just c , g') -> if f c then dfsUntil f vs g' else node' c : dfsUntil f (suc' c ++ vs) g'
(Nothing, g') -> dfsUntil f vs g'
mkEval :: (Monad m, IntDomain i, MemoryModel a) => MkJGraph i a -> JatM m (MkJGraph i a)
mkEval mg@(MkJGraph _ (ctx:_)) = do
p <- getProgram
let st = state' ctx
ins = instruction' p ctx
step <- exec st
case step of
Evaluation e -> addNodes (flip EvaluationLabel ins) [e] mg
Refinement rs -> addNodes RefinementLabel rs mg
Abstraction a -> addNodes (const InstanceLabel) [a] mg
where
addNodes :: Monad m => (PATerm -> ELabel) -> [(PState i a, PATerm)] -> MkJGraph i a -> JatM m (MkJGraph i a)
addNodes label rs (MkJGraph g (origin:ctxs)) = foldM (addNode (node' origin)) (MkJGraph g ctxs) rs
where
addNode k1 (MkJGraph g1 ctxs1) (st,con) = do
k2 <- freshKey
let edge = (label con, k1)
ctx2 = ([edge],k2,st,[])
g2 = ctx2 & g1
return $ MkJGraph g2 (ctx2:ctxs1)
addNodes _ _ _ = error "Jat.CompGraph.mkEval: assertion error: unexpected case."
mkEval _ = error "Jat.CompGraph.mkEval: emtpy context."
-- | Returns the Dot representation of a constructed computation graph..
mkJGraph2Dot :: (Pretty a,IntDomain i) => MkJGraph i a -> DotGraph Int
mkJGraph2Dot (MkJGraph g ctxs) =
DotGraph {
strictGraph = True
, directedGraph = True
, graphID = Just (Str $ T.pack "g")
, graphStatements = DotStmts {
attrStmts = []
, subGraphs = []
, nodeStmts = map mkCNode (labNodes g) ++ map (mkCtxNode . labNode') ctxs
, edgeStmts = map mkCEdge $ labEdges g
}
}
where
mkCNode (k,st) =
DotNode {
nodeID = k
, nodeAttributes = [
GV.Label (GV.StrLabel . T.pack . display $ text "s" <> int k PP.<$> pretty st)
, GV.Shape GV.BoxShape
]
}
mkCtxNode (k,st) =
DotNode {
nodeID = -k
, nodeAttributes = [
GV.Label (GV.StrLabel . T.pack . display $ text "s" <> int k PP.<$> pretty st)
, GV.Shape GV.BoxShape
]
}
mkCEdge (k1,k2,l) =
DotEdge {
fromNode = k1
, toNode = k2
, edgeAttributes = [
GV.Label (GV.StrLabel . T.pack . display $ pretty l)
]
}
simplifySCC :: (IntDomain i, MemoryModel a) => MkJGraph i a -> MkJGraph i a
simplifySCC (MkJGraph gr ctx) = MkJGraph (Gr.delNodes (allnodes \\ sccnodes) gr) ctx
where
allnodes = Gr.nodes gr
sccnodes = concat . filter (not . trivial) $ Gr.scc gr
trivial [_] = True
trivial _ = False
-- | Returns pairs of rewrite rules and constraints of a constructed
-- computation graph.
mkJGraph2TRS :: (Monad m, IntDomain i, MemoryModel a) => MkJGraph i a ->
JatM m (JGraph i a, [(Rule PAFun PAVar, [PATerm])])
{-mkJGraph2TRS (MkJGraph gr _) | trace ("gr" ++ show (Gr.nodes gr)) False = undefined-}
mkJGraph2TRS (MkJGraph gr _) = do
p <- getProgram
rs <- mapM (rule p) ledges
return (gr,rs)
where
{-rule _ (k,k',_) | trace (show (k,k')) False = undefined-}
rule _ (k,k',InstanceLabel) = ruleM (tsl s s k) (tsr s s k') []
where s = lookupN k
rule _ (k,k',RefinementLabel _) = ruleM (tsl t t k) (tsr t t k') []
where t = lookupN k'
rule p (k,k',EvaluationLabel con _) =
case maybePutField p s of
Just q -> ruleM (tsl s s k) (tsStar q s t k') (mkCon con)
Nothing -> ruleM (tsl s s k) (tsr s t k') (mkCon con)
where s = lookupN k
t = lookupN k'
ruleM ms mt con = do
s <- ms
t <- mt
return (Rule {lhs = s, rhs = t}, con)
lnodes = labNodes gr
ledges = labEdges gr
lookupN k = errmsg `fromMaybe` lookup k lnodes
errmsg = error "Jat.CompGraph.mkGraph2TRS: unexpected key"
tsl = state2TRS Nothing LHS
tsr = state2TRS Nothing RHS
tsStar q = state2TRS (Just q) RHS
mkCon con = if isTop con then [] else [con]
-- Interactive
data Command = NSteps Int | Until Int | Run | Help | Exit deriving (Show, Read)
-- | Provides a simple prompt, allowing to analyze the construction of the
-- computation graph.
mkJGraphIO :: (IntDomain i, MemoryModel a) => P.ClassId -> P.MethodId -> JatM IO (MkJGraph i a)
mkJGraphIO cn mn = do
liftIO $ putStrLn ":> enter command: (Help to see the list of commands)"
mkInitialNode cn mn >>= mkJGraphPrompt
mkJGraphPrompt :: (IntDomain i, MemoryModel a) => MkJGraph i a -> JatM IO (MkJGraph i a)
mkJGraphPrompt mg@(MkJGraph _ []) = do
liftIO $ writeFile "gr.dot" (dot2String $ mkJGraph2Dot mg)
liftIO $ putStrLn "fin"
return mg
mkJGraphPrompt mg@(MkJGraph g _) = do
liftIO $ writeFile "gr.dot" (dot2String $ mkJGraph2Dot mg)
liftIO $ print g
liftIO $ putStr ">: "
liftIO $ hFlush stdout
ecmd <- liftIO parseCmd
case ecmd of
Left _ -> mkJGraphPrompt mg
Right cmd -> case cmd of
NSteps n -> mkNStepsIO n mg
Until n -> mkUStepsIO n mg
Run -> mkStepsIO mg
Help -> do
liftIO $ putStrLn "NSteps int, Until pc, Run, Help, Exit"
mkJGraphPrompt mg
Exit -> return mg
where
parseCmd = do
cmd <- liftIO getLine
E.try (E.evaluate (read cmd :: Command)) :: IO (Either E.SomeException Command)
mkNStepsIO :: (IntDomain i, MemoryModel a) => Int -> MkJGraph i a -> JatM IO (MkJGraph i a)
mkNStepsIO _ mg@(MkJGraph _ []) = mkJGraphPrompt mg
mkNStepsIO n mg | n < 1 = mkJGraphPrompt mg
mkNStepsIO n mg = mkStep mg >>= mkNStepsIO (n-1)
mkUStepsIO :: (IntDomain i, MemoryModel a) => Int -> MkJGraph i a -> JatM IO (MkJGraph i a)
mkUStepsIO _ _ = undefined
{-mkUStepsIO _ mg@(MkJGraph _ []) = mkJGraphPrompt mg-}
{-mkUStepsIO n mg | n == (pc .frm . state' . context mg) = mkJGraphPrompt mg-}
{-mkUStepsIO n mg = mkStep mg >>= mkNStepsIO (n-1)-}
mkStepsIO :: (IntDomain i, MemoryModel a) => MkJGraph i a -> JatM IO (MkJGraph i a)
mkStepsIO mg@(MkJGraph _ []) = mkJGraphPrompt mg
mkStepsIO mg = mkStep mg >>= mkStepsIO
| ComputationWithBoundedResources/jat | src/Jat/CompGraph.hs | bsd-3-clause | 13,058 | 0 | 17 | 3,368 | 4,575 | 2,341 | 2,234 | 236 | 6 |
{-# LANGUAGE FlexibleContexts, DeriveDataTypeable #-}
module Aws.Xml
where
import Aws.Response
import Control.Monad.IO.Class
import Data.Attempt (Attempt(..))
import Data.Conduit (($$))
import Data.IORef
import Data.Monoid
import Data.Typeable
import Text.XML.Cursor
import qualified Control.Exception as E
import qualified Control.Failure as F
import qualified Data.Conduit as C
import qualified Data.Text as T
import qualified Text.XML.Cursor as Cu
import qualified Text.XML as XML
newtype XmlException = XmlException { xmlErrorMessage :: String }
deriving (Show, Typeable)
instance E.Exception XmlException
elContent :: T.Text -> Cursor -> [T.Text]
elContent name = laxElement name &/ content
elCont :: T.Text -> Cursor -> [String]
elCont name = laxElement name &/ content &| T.unpack
force :: F.Failure XmlException m => String -> [a] -> m a
force = Cu.force . XmlException
forceM :: F.Failure XmlException m => String -> [m a] -> m a
forceM = Cu.forceM . XmlException
textReadInt :: (F.Failure XmlException m, Num a) => T.Text -> m a
textReadInt s = case reads $ T.unpack s of
[(n,"")] -> return $ fromInteger n
_ -> F.failure $ XmlException "Invalid Integer"
readInt :: (F.Failure XmlException m, Num a) => String -> m a
readInt s = case reads s of
[(n,"")] -> return $ fromInteger n
_ -> F.failure $ XmlException "Invalid Integer"
xmlCursorConsumer ::
(Monoid m)
=> (Cu.Cursor -> Response m a)
-> IORef m
-> HTTPResponseConsumer a
xmlCursorConsumer parse metadataRef _status _headers source
= do doc <- source $$ XML.sinkDoc XML.def
let cursor = Cu.fromDocument doc
let Response metadata x = parse cursor
liftIO $ tellMetadataRef metadataRef metadata
case x of
Failure err -> liftIO $ C.resourceThrow err
Success v -> return v
| jgm/aws | Aws/Xml.hs | bsd-3-clause | 2,110 | 0 | 12 | 640 | 638 | 339 | 299 | 48 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module VC.Tree where
import Filesystem.Path (basename)
import Shelly
import Control.Monad.Extra
import Prelude hiding (FilePath)
#if __GLASGOW_HASKELL__ < 710
import Data.Traversable
#endif
check :: FilePath -> Sh Managed
check dir = do
names <- ls dir
if checkVCDir names
then return Managed
else do
-- TODO exclude symlinks from files
(subdirs, files) <- partitionDirs =<< ls dir
managed <- traverse check subdirs
return $ summary dir (files == []) managed
checkVCDir :: [FilePath] -> Bool
checkVCDir xs = let names = map basename xs in
any (`elem` names)
[ ".git", "_darcs", ".hg", ".svn", "CVS", ".fslckout"]
data Managed =
UnManaged FilePath
| Managed
| Empty
| SomeManaged [FilePath]
summary :: FilePath -> Bool -> [Managed] -> Managed
summary dir nofiles xs
| all isEmpty xs = if nofiles
then Empty
else UnManaged dir
| all isManaged xs = if nofiles then Managed
else UnManaged dir
| all isUnmanaged xs = UnManaged dir
| otherwise = SomeManaged $ if nofiles
then concatMap directories xs
else dir : concatMap directories xs
partitionDirs :: [FilePath] -> Sh ([FilePath], [FilePath])
partitionDirs paths = do
(dirs, nondirs) <- partitionM test_d paths
files <- filterM (fmap not . test_s) nondirs -- exclude symlinks
return (dirs, files)
isEmpty :: Managed -> Bool
isEmpty Empty = True
isEmpty _ = False
isManaged :: Managed-> Bool
isManaged Managed = True
isManaged Empty = True
isManaged _ = False
isUnmanaged :: Managed -> Bool
isUnmanaged (UnManaged _) = True
isUnmanaged Empty = True
isUnmanaged _ = False
directories :: Managed -> [FilePath]
directories (SomeManaged ds) = ds
directories (UnManaged d) = [d]
directories _ = []
| bergey/vcatt | src/VC/Tree.hs | bsd-3-clause | 1,941 | 0 | 14 | 518 | 603 | 316 | 287 | 57 | 4 |
module Muster.Internal.Charset
( Charset(..)
, none
, any
, elem
, notElem
, insert
, remove
, intersect
, oneOf
) where
import Prelude hiding (any, elem, notElem)
import qualified Data.List as L
import Data.Maybe
import Test.QuickCheck
data Charset = AnyOf String
| NoneOf String
deriving (Show)
instance Eq Charset where
(AnyOf xs) == (AnyOf ys) = xs == ys
(AnyOf xs) == (NoneOf ys) =
case AnyOf xs `intersect` NoneOf ys of
AnyOf xs' -> xs == xs'
NoneOf ys' -> ys == ys'
(NoneOf xs) == (AnyOf ys) = AnyOf ys == NoneOf xs
(NoneOf xs) == (NoneOf ys) = xs == ys
instance Arbitrary Charset where
arbitrary = oneof
[ AnyOf . L.sort . L.nub <$> arbitrary
, NoneOf . L.sort . L.nub <$> arbitrary
]
shrink (AnyOf xs) = map AnyOf $ shrink xs
shrink (NoneOf xs) = map NoneOf $ shrink xs
none :: Charset
none = AnyOf []
any :: Charset
any = NoneOf []
elem :: Char -> Charset -> Bool
elem c (AnyOf cs) = c `L.elem` cs
elem c (NoneOf cs) = c `L.notElem` cs
notElem :: Char -> Charset -> Bool
notElem c cs = not (c `elem` cs)
insertMissing :: Ord a => a -> [a] -> [a]
insertMissing x (y:ys)
| x > y = y : insertMissing x ys
| x < y = x : ys
| otherwise = y:ys
insertMissing x [] = [x]
insert :: Char -> Charset -> Charset
insert c (AnyOf cs) = AnyOf (insertMissing c cs)
insert c (NoneOf cs) = NoneOf (L.delete c cs)
remove :: Char -> Charset -> Charset
remove c (AnyOf cs) = AnyOf (L.delete c cs)
remove c (NoneOf cs) = NoneOf (insertMissing c cs)
intersect :: Charset -> Charset -> Charset
intersect (AnyOf xs) (AnyOf ys) = AnyOf $ xs `L.intersect` ys
intersect (AnyOf xs) (NoneOf ys) = AnyOf $ filter (`elem` NoneOf ys) xs
intersect (NoneOf xs) (AnyOf ys) = AnyOf ys `intersect` NoneOf xs
intersect (NoneOf xs) (NoneOf ys) = NoneOf . L.sort $ xs `L.union` ys
oneOf :: Charset -> Maybe Char
oneOf (AnyOf xs) = listToMaybe xs
oneOf (NoneOf xs) = listToMaybe $ filter (`elem` NoneOf xs) [minBound..maxBound]
| DasIch/haskell-muster | src/Muster/Internal/Charset.hs | bsd-3-clause | 2,025 | 0 | 11 | 496 | 954 | 494 | 460 | 60 | 1 |
module Container.Tree.PersistentCont where
import Annotation.Persistent (P)
import Control.Applicative
import Data.Binary
import Data.Maybe
import Generics.Cont
import Generics.Regular.Base ()
import Generics.Fixpoint
import Heap.Heap hiding (size)
import Prelude hiding (lookup, null)
import qualified Container.Tree.Abstract as F
import qualified Container.Tree.Cont as C
type Map k v = P (F.Tree k v) (FixA P (F.Tree k v))
empty :: (Binary k, Binary v) => HeapW (Map k v)
empty = mkAnnI C.empty
singleton :: (Binary k, Binary v) => k -> v -> HeapW (Map k v)
singleton k v = mkAnnI (C.singleton k v)
triplet :: (Binary k, Binary v) => k -> v -> k -> v -> k -> v -> HeapW (Map k v)
triplet a0 b0 a1 b1 a2 b2 = mkAnnI (C.triplet a0 b0 a1 b1 a2 b2)
lookup :: (Ord k, Binary k, Binary v) => k -> Map k v -> HeapR (Maybe v)
lookup k = mkAnnO (C.lookup k)
(!) :: (Ord k, Binary k, Binary a) => Map k a -> k -> HeapR a
(!) t k = fromMaybe (error "element not in the map") <$> lookup k t
member :: (Ord k, Binary k, Binary a) => k -> Map k a -> HeapR Bool
member k t = isJust <$> lookup k t
notMember :: (Ord k, Binary k, Binary a) => k -> Map k a -> HeapR Bool
notMember k t = not . isJust <$> lookup k t
size :: (Num c, Binary k, Binary v) => Map k v -> HeapR c
size = mkAnnO C.size
null :: (Binary k, Binary v) => Map k v -> HeapR Bool
null t = (== (0 :: Integer)) <$> size t
depth :: (Ord c, Num c, Binary k, Binary v) => Map k v -> HeapR c
depth = mkAnnO C.depth
alter :: (Ord k, Binary k, Binary v) => (Maybe v -> Maybe v) -> k -> Map k v -> HeapW (Map k v)
alter f k = mkAnnIO (C.alter f k)
insert :: (Ord k, Binary k, Binary v) => k -> v -> Map k v -> HeapW (Map k v)
insert k v = mkAnnIO (C.alter (const (Just v)) k)
delete :: (Ord k, Binary k, Binary v) => k -> Map k v -> HeapW (Map k v)
delete k = mkAnnIO (C.alter (const Nothing) k)
adjust :: (Ord k, Binary k, Binary v) => (v -> v) -> k -> Map k v -> HeapW (Map k v)
adjust f k = mkAnnIO (C.alter (fmap f) k)
| sebastiaanvisser/islay | src/Container/Tree/PersistentCont.hs | bsd-3-clause | 1,988 | 0 | 14 | 446 | 1,102 | 567 | 535 | 41 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE RankNTypes #-}
module LazyCrossCheck.Utils where
import Data.Data
import LazyCrossCheck.Primitives
getConstructors :: forall a . Data a => Proxy a -> [Constr]
getConstructors _ = dataTypeConstrs $ dataTypeOf (undefined :: a)
childProxies :: forall a . Data a => Proxy a -> Constr -> [DataExists]
childProxies _ = gmapQ wrapUp . (fromConstr :: Constr -> a)
wrapUp :: forall d . Data d => d -> DataExists
wrapUp _ = DataExists (undefined :: Proxy d)
data DataExists = forall d . Data d => DataExists (Proxy d)
| TristanAllwood/lazyCrossCheck | LazyCrossCheck/Utils.hs | bsd-3-clause | 606 | 0 | 9 | 101 | 191 | 102 | 89 | 13 | 1 |
import CCO.ArithBool (eval)
import CCO.Component (component, printer, ioWrap)
import CCO.Tree (parser, Tree (fromTree, toTree))
import Control.Arrow (Arrow (arr), (>>>))
main = ioWrap $
parser >>> component toTree >>> component eval >>> arr fromTree >>> printer | UU-ComputerScience/uu-cco | uu-cco-examples/src/EvalArithBool.hs | bsd-3-clause | 273 | 0 | 9 | 46 | 99 | 58 | 41 | 6 | 1 |
-- $Id$
module Scorer.Util
( module Scorer.Util
, module Control.Types
)
where
import Control.Types
import Time
zeit :: IO String
zeit = do
clock <- getClockTime
cal <- toCalendarTime clock
return $ calendarTimeToString cal
-------------------------------------------------------------------------------
fill :: Int -> String -> String
fill n s = take n $ s ++ take n (repeat ' ')
sep :: Int -> String -> String
sep n s = fill n s ++ " | "
stretch :: Int -> String -> String
stretch = stretchWith ' '
stretchWith :: Char -> Int -> String -> String
stretchWith c n s =
let missing = max 0 ( n - length s )
in replicate missing c ++ s
-------------------------------------------------------------------------------
heading :: String -> String
heading s = unlines [ s, take (length s) (repeat '-') ]
-------------------------------------------------------------------------------
pairAdd :: (Num a) => (a,a) -> (a,a) -> (a,a)
pairAdd (a,b) (c,d) = (a+c,b+d)
| Erdwolf/autotool-bonn | src/Scorer/Util.hs | gpl-2.0 | 1,000 | 0 | 12 | 196 | 353 | 187 | 166 | 24 | 1 |
{-# LANGUAGE RecordWildCards, RankNTypes #-}
module Buildsome.Slave
( Slave, newWithUnmask
, target
, str
, wait, waitCatch
, cancel
) where
import qualified Buildsome.Color as Color
import Control.Concurrent.Async (Async)
import qualified Control.Concurrent.Async as Async
import qualified Control.Exception as E
import Lib.ColorText (ColorText)
import Lib.FilePath (FilePath)
import Lib.Makefile (Target)
import qualified Lib.Printer as Printer
import Lib.Show (show)
import Lib.TimeInstances ()
import Prelude.Compat hiding (show, FilePath)
data Slave a = Slave
{ slaveTarget :: Target
, slavePrinterId :: Printer.Id
, slaveOutputPaths :: [FilePath]
, slaveExecution :: Async a
}
target :: Slave a -> Target
target = slaveTarget
-- The annotation here is to prevent hlint from collapsing our lambda
-- below which is needed due to rank-2 typing.
--
-- The :: String in the annotation is needed because of GHC weirdness
-- with see
-- http://comments.gmane.org/gmane.comp.lang.haskell.glasgow.bugs/74427
{-# ANN newWithUnmask ("HLint: ignore Avoid lambda" :: String) #-}
newWithUnmask :: Target -> Printer.Id -> [FilePath] -> ((forall b. IO b -> IO b) -> IO a) -> IO (Slave a)
newWithUnmask tgt printerId outputPaths action =
E.uninterruptibleMask $ \unmaskUninterruptible ->
Slave tgt printerId outputPaths
<$> Async.asyncWithUnmask
-- NOTE: Using unmaskUninterruptible is not allowed in the
-- child thread! However, it is impossible to put
-- uninterruptibleMask just on the parent side of the thread
-- creation while still allowing child to inherit a mask state
-- EXCEPT using this undefined behavior. And without
-- uninterruptibleMask wrapping of this, double async
-- exception stops the exception handler, leaking threads.
(\unmask -> unmaskUninterruptible (action unmask))
str :: Slave a -> ColorText
str slave =
show (slavePrinterId slave) <> ": " <> cTarget (show (slaveOutputPaths slave))
where
Color.Scheme{..} = Color.scheme
wait :: Slave a -> IO a
wait = Async.wait . slaveExecution
waitCatch :: Slave a -> IO (Either E.SomeException a)
waitCatch = Async.waitCatch . slaveExecution
cancel :: Slave a -> IO ()
cancel = Async.cancel . slaveExecution
| buildsome/buildsome | src/Buildsome/Slave.hs | gpl-2.0 | 2,382 | 0 | 14 | 528 | 490 | 279 | 211 | 42 | 1 |
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
module SSH.Session where
import Control.Concurrent.Chan
import Control.Monad.IO.Class
import Control.Monad.Trans.State
import Data.Binary (decode, encode)
import Data.Word
import System.IO
import qualified Codec.Crypto.SimpleAES as A
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Map as M
import SSH.Channel
import SSH.Crypto
import SSH.Debug
import SSH.NetReader
import SSH.Packet
import SSH.Sender
import SSH.Util
type Session = StateT SessionState IO
data SessionState
= Initial
{ ssConfig :: SessionConfig
, ssChannelConfig :: ChannelConfig
, ssThem :: Handle
, ssSend :: SenderMessage -> IO ()
, ssPayload :: LBS.ByteString
, ssTheirVersion :: String
, ssOurKEXInit :: LBS.ByteString
, ssInSeq :: Word32
}
| GotKEXInit
{ ssConfig :: SessionConfig
, ssChannelConfig :: ChannelConfig
, ssThem :: Handle
, ssSend :: SenderMessage -> IO ()
, ssPayload :: LBS.ByteString
, ssTheirVersion :: String
, ssOurKEXInit :: LBS.ByteString
, ssInSeq :: Word32
, ssTheirKEXInit :: LBS.ByteString
, ssOutCipher :: Cipher
, ssInCipher :: Cipher
, ssOutHMACPrep :: LBS.ByteString -> HMAC
, ssInHMACPrep :: LBS.ByteString -> HMAC
}
| Final
{ ssConfig :: SessionConfig
, ssChannelConfig :: ChannelConfig
, ssChannels :: M.Map Word32 (Chan ChannelMessage)
, ssID :: LBS.ByteString
, ssThem :: Handle
, ssSend :: SenderMessage -> IO ()
, ssPayload :: LBS.ByteString
, ssGotNEWKEYS :: Bool
, ssInSeq :: Word32
, ssInCipher :: Cipher
, ssInHMAC :: HMAC
, ssInKey :: BS.ByteString
, ssInVector :: BS.ByteString
, ssUser :: Maybe String
}
data SessionConfig =
SessionConfig
{ scAuthMethods :: [String]
, scAuthorize :: Authorize -> Session Bool
, scKeyPair :: KeyPair
}
data Authorize
= Password String String
| PublicKey String PublicKey
instance Sender Session where
send m = gets ssSend >>= io . ($ m)
defaultSessionConfig :: SessionConfig
defaultSessionConfig =
SessionConfig
{ scAuthMethods = ["publickey"]
, scAuthorize = const (return True)
, scKeyPair = RSAKeyPair (RSAPublicKey 0 0) 0
{-\(Password u p) ->-}
{-return $ u == "test" && p == "test"-}
}
net :: NetReader a -> Session a
net r = do
pl <- gets ssPayload
let (res, new) = runState r pl
modify (\s -> s { ssPayload = new })
return res
newChannelID :: Session Word32
newChannelID = gets ssChannels >>= return . findNext . M.keys
where
findNext :: [Word32] -> Word32
findNext ks = head . filter (not . (`elem` ks)) $ [0..]
getChannel :: Word32 -> Session (Chan ChannelMessage)
getChannel i = do
mc <- gets (M.lookup i . ssChannels)
case mc of
Just c -> return c
Nothing -> error $ "unknown channel: " ++ show i
decrypt :: LBS.ByteString -> Session LBS.ByteString
decrypt m
| m == LBS.empty = return m
| otherwise = do
s <- get
case s of
Final
{ ssInCipher = Cipher AES CBC bs@16 _
, ssInKey = key
, ssInVector = vector
} -> do
let blocks = toBlocks bs m
decrypted =
A.crypt A.CBC key vector A.Decrypt m
modify (\ss -> ss { ssInVector = strictLBS $ last blocks })
return decrypted
_ -> error "no decrypt for current state"
getPacket :: Session ()
getPacket = do
s <- get
h <- gets ssThem
case s of
Final
{ ssGotNEWKEYS = True
, ssInCipher = Cipher _ _ bs _
, ssInHMAC = HMAC ms f
, ssInSeq = is
} -> do
let firstChunk = max 8 bs
firstEnc <- liftIO $ LBS.hGet h firstChunk
first <- decrypt firstEnc
let packetLen = decode (LBS.take 4 first) :: Word32
paddingLen = decode (LBS.drop 4 first) :: Word8
dump ("got packet", is, first, packetLen, paddingLen)
restEnc <- liftIO $ LBS.hGet h (fromIntegral packetLen - firstChunk + 4)
dump ("got rest", restEnc)
rest <- decrypt restEnc
dump ("decrypted", rest)
let decrypted = first `LBS.append` rest
payload = extract packetLen paddingLen decrypted
dump ("getting hmac", ms)
mac <- liftIO $ LBS.hGet h ms
dump ("got mac", mac, decrypted, is)
dump ("hmac'd", f decrypted)
dump ("got mac, valid?", verify mac is decrypted f)
modify (\ss -> ss { ssPayload = payload })
_ -> do
first <- liftIO $ LBS.hGet h 5
let packetLen = decode (LBS.take 4 first) :: Word32
paddingLen = decode (LBS.drop 4 first) :: Word8
rest <- liftIO $ LBS.hGet h (fromIntegral packetLen - 5 + 4)
let payload = LBS.take (fromIntegral packetLen - fromIntegral paddingLen - 1) rest
modify (\ss -> ss { ssPayload = payload })
where
extract pkl pdl d = LBS.take (fromIntegral pkl - fromIntegral pdl - 1) (LBS.drop 5 d)
verify m is d f = m == f (encode (fromIntegral is :: Word32) `LBS.append` d)
| teh/teh-ssh | src/SSH/Session.hs | bsd-3-clause | 5,603 | 0 | 19 | 1,886 | 1,646 | 879 | 767 | 146 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Yaml.YamlLight
-- Copyright : Michael Ilseman (c) 2010
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : michael <dot> ilseman <at> gmail <dot> com
-- Stability : provisional
-- Portability : portable
--
-- A light-weight wrapper with utility functions around HsSyck
{-# LANGUAGE OverloadedStrings #-}
module Data.Yaml.YamlLight
( -- * YamlLight data type
YamlLight(..)
-- * YamlLight versions of Syck functions
, parseYaml, parseYamlFile, parseYamlBytes
-- * YamlLight utility functions
, fromYamlNode, lookupYL, lookupYLWith
, combineSequencedMaps, combineMappedSequences, getTerminalsKeys
-- ** Extractors
, unSeq, unMap, unStr
, (*!), (|!)
) where
import Control.Applicative
-- import Data.Data
import Data.List
import Data.Maybe
import Control.Arrow
import qualified Data.Yaml.Syck as Syck
import qualified Data.Map as Map
import qualified Data.ByteString as ByteString
{- | A light-weight, single ADT representation of a yaml document in contrast with what is provided by HsSyck.
Note that the YMap is an actual Map from
Data.Map, so behavior with respect to identical keys and ordering of entries will behave as Data.Map
dictates. This behavior is also in compliance with the Yaml spec. If you currently rely on HsSyck's
preservation of ordering, you can also consider representing
such maps as sequences of single entry maps. See the examples of \"Ordered Mappings\" in the Yaml
spec: <http://www.yaml.org/spec/1.2/spec.html>.
-}
data YamlLight = YMap (Map.Map YamlLight YamlLight)
| YSeq [YamlLight]
| YStr ByteString.ByteString
| YNil
deriving (Show, Ord, Eq)
convert :: (a -> Syck.YamlNode) -> (a -> YamlLight)
convert f = fromYamlNode . f
convertIO :: (a -> IO Syck.YamlNode) -> (a -> IO YamlLight)
convertIO f yn = fromYamlNode <$> f yn
-- | Parse a regular Haskell string
parseYaml :: String -> IO YamlLight
parseYaml = convertIO Syck.parseYaml
-- | Given a file name, parse contents of file
parseYamlFile :: String -> IO YamlLight
parseYamlFile = convertIO Syck.parseYamlFile
-- | Parse a ByteString buffer (this is faster)
parseYamlBytes :: ByteString.ByteString -> IO YamlLight
parseYamlBytes = convertIO Syck.parseYamlBytes
-- | Convert a Syck YamlNode to a YamlLight
fromYamlNode :: Syck.YamlNode -> YamlLight
fromYamlNode = yamlElemToLight . Syck.n_elem
yamlElemToLight :: Syck.YamlElem -> YamlLight
yamlElemToLight (Syck.EMap ms) = YMap . Map.fromList . map (\(a,b) -> (fromYamlNode a, fromYamlNode b)) $ ms
yamlElemToLight (Syck.ESeq s) = YSeq $ map fromYamlNode s
yamlElemToLight (Syck.EStr buf) = YStr buf
yamlElemToLight (Syck.ENil) = YNil
-- | Lookup the key's corresponding value in a Map. Returns Nothing if the YamlLight is not a map, or if
-- the key is not found
lookupYL :: YamlLight -> YamlLight -> Maybe YamlLight
lookupYL key (YMap m) = Map.lookup key m
lookupYL _ _ = Nothing
-- | General form of lookup. Will return the first element that satisfies predicate p, otherwise Nothing
lookupYLWith :: (YamlLight -> Bool) -> YamlLight -> Maybe YamlLight
lookupYLWith p (YMap m) = snd <$> (find (p . fst) $ Map.toList m)
lookupYLWith _ _ = Nothing
{- | Combine a sequence of YMaps into a list of (key,value) pairs. The ordering of the result preserves the ordering
of the sequence, but the ordering of the individual maps is as Data.Map handles it.
Example:
@
- key1: val1
key2: val2
- key3: val3
@
Would become:
@
[(key1,val1),(key2,val2),(key3,val3)]
@
where key1 and key2 might be arranged differently as Data.Map would
arrange them. This does not enforce uniqueness of keys across different maps.
Any items of the sequence that are not maps will not be present in the output list.
Returns Nothing if not called on a Sequence
-}
combineSequencedMaps :: YamlLight -> Maybe [(YamlLight, YamlLight)]
combineSequencedMaps (YSeq ys) = Just . concatMap Map.assocs . catMaybes $ map unMap ys
combineSequencedMaps _ = Nothing
{- | Take a YamlLight that is a YMap of keys to YSeqs, and return a list of (key,elem) pairs, where elem is an element
of the YSeq under key.
Example:
@
key1: [val1, val2, val3]
key2: [val4, val5]
@
Would become:
@
[(key1,val1),(key1,val2),(key1,val3),(key2,val4),(key2,val5)]
@
where the precise ordering of the key1 and key2 pairs depends on the ordering of Data.Map.
Any values of keys that are not sequences will not appear in the output list.
Returns Nothing if not called on a YMap.
-}
combineMappedSequences :: YamlLight -> Maybe [(YamlLight, YamlLight)]
combineMappedSequences (YMap m) = Just . concatMap flattenTags . removeSndMaybes $ mapThenList unSeq m
combineMappedSequences _ = Nothing
mapThenList :: (b -> Maybe [c]) -> Map.Map a b -> [(a, Maybe [c])]
mapThenList f m = Map.toList $ Map.map f m
removeSndMaybes :: [(a,Maybe [b])] -> [(a,[b])]
removeSndMaybes = map (second fromJust) . filter (isJust . snd)
flattenTags :: (a,[b]) -> [(a,b)]
flattenTags (a,bs) = map ((,) a) bs
{- | Create a list of all the terminal YStrs in a YamlLight tree, and couple them with a list of
all the keys above them.
Example:
@
- key1:
key1_1:
- \"str1\"
- \"str2\"
key1_2:
- \"str2\"
- \"str3\"
- key2:
\"str4\"
- \"str5\"
@
Would become:
@
[(\"str1\",[key1_1, key1]), (\"str2\", [key1_1, key1]), (\"str2\", [key1_2, key1]), (\"str3\",[key1_2, key1]), (\"str4\",[key2]), (\"str5\",[])
@
-}
getTerminalsKeys :: YamlLight -> [(ByteString.ByteString,[YamlLight])]
getTerminalsKeys = getTerminalsKeys' []
getTerminalsKeys' :: [YamlLight] -> YamlLight -> [(ByteString.ByteString,[YamlLight])]
getTerminalsKeys' hist (YStr s) = [(s,hist)]
getTerminalsKeys' hist (YSeq s) = concatMap (getTerminalsKeys' hist) s
getTerminalsKeys' hist (YMap m) = concat . Map.elems $ Map.mapWithKey (\k -> getTerminalsKeys' (k : hist)) m
getTerminalsKeys' _ _ = []
-- | Get the contents of a sequence
unSeq :: YamlLight -> Maybe [YamlLight]
unSeq (YSeq s) = Just s
unSeq _ = Nothing
-- | Get the contents of a map
unMap :: YamlLight -> Maybe (Map.Map YamlLight YamlLight)
unMap (YMap m) = Just m
unMap _ = Nothing
-- | Get the contents of a string
unStr :: YamlLight -> Maybe ByteString.ByteString
unStr (YStr s) = Just s
unStr _ = Nothing
-- | '(|!)' and '(*!)' are combinators that simplify accessing values within nested
-- maps. Consider the following YAML document.
--
-- @
-- foo:
-- a:
-- - 1
-- - 2
-- - 3
-- b: barry
-- bar:
-- c:
-- d: dale
-- e: estelle
-- @
--
--
-- @y *! \"foo\" |! \"a\" == Just (YSeq [YStr \"1\",YStr \"2\",YStr \"3\"])@
--
-- @y *! \"food\" |! \"a\" == Nothing@
--
-- @y *! \"bar\" == Just (YMap (fromList [(YStr \"c\",YMap
-- (fromList [(YStr \"d\",YStr \"dale\"),
-- (YStr \"e\",YStr \"estelle\")]))]))@
--
-- @y *! \"bar\" |! \"c\" |! \"d\" == Just (YStr \"dale\")@
--
-- Use an application of '*!' followed by zero or more applications of '|!' to get the
-- value you want.
--
-- For @y *! key@ will be returned if:
--
-- * @y@ not contructed with 'YMap'.
--
-- * @YStr key@ is not a key in the map.
--
-- Similarly for '|!'
--
(*!) :: YamlLight -> ByteString.ByteString -> Maybe YamlLight
(*!) = (|!) . Just
(|!) :: Maybe YamlLight -> ByteString.ByteString -> Maybe YamlLight
my |! s = do -- maybe monad
y <- my
lookupYL (YStr s) y
infixl 5 |!, *!
-- tests
performTest :: Show a => (YamlLight -> a) -> String -> IO ()
performTest f s = parseYaml s >>= print . f
cSeqMap1 = "[{key1: val1, key2: val2}, {key3: val3}]"
cMapSeq1 = "{key1: [val1, val2, val3], key2: [val4, val5]}"
gtKeys1 = " [{key1: \
\ { key1_1: [str1, str2] \
\ , key1_2: [str2, str3] }} \
\ , {key2: [str4]} \
\ , str5 \
\ ] "
gtKeys2 = "[a, b, c]"
gtKeys3 = "a: {b: [c, {d: [e, f]}]}"
gtKeys4 = "[{a: {b: [c1, c2], d: [e1, e2]}, f: [g]}, h]"
nestedMap = "foo:\n\
\ a:\n\
\ - 1\n\
\ - 2\n\
\ - 3\n\
\ b: barry\n\
\bar:\n\
\ c:\n\
\ d: dale\n\
\ e: estelle"
testCombineSequencedMaps1 = performTest combineSequencedMaps cSeqMap1
testCombineMappedSequences1 = performTest combineMappedSequences cMapSeq1
testGetTerminalsKeys1 = performTest getTerminalsKeys gtKeys1
testGetTerminalsKeys2 = performTest getTerminalsKeys gtKeys2
testGetTerminalsKeys3 = performTest getTerminalsKeys gtKeys3
testGetTerminalsKeys4 = performTest getTerminalsKeys gtKeys4
testNestedMap1 = performTest (\y -> y *! "foo" |! "a") nestedMap
testNestedMap2 = performTest (\y -> y *! "food" |! "a") nestedMap
testNestedMap3 = performTest (\y -> y *! "bar") nestedMap
testNestedMap4 = performTest (\y -> y *! "bar" |! "c" |! "d") nestedMap | ilseman2/yaml-light | Data/Yaml/YamlLight.hs | bsd-3-clause | 9,759 | 0 | 11 | 2,669 | 1,626 | 903 | 723 | 98 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Snap.Internal.Http.Server.Socket.Tests (tests) where
------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import qualified Network.Socket as N
------------------------------------------------------------------------------
import Control.Concurrent (forkIO, killThread, newEmptyMVar, putMVar, readMVar, takeMVar)
import qualified Control.Exception as E
import Data.IORef (newIORef, readIORef, writeIORef)
import Test.Framework (Test)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (assertEqual)
------------------------------------------------------------------------------
import qualified Snap.Internal.Http.Server.Socket as Sock
import Snap.Test.Common (eatException, expectException, withSock)
------------------------------------------------------------------------------
#ifdef HAS_UNIX_SOCKETS
import System.Directory (getTemporaryDirectory)
import System.FilePath ((</>))
import qualified System.Posix as Posix
# if !MIN_VERSION_unix(2,6,0)
import Control.Monad.State (replicateM)
import Control.Monad.Trans.State.Strict as State
import qualified Data.Vector.Unboxed as V
import System.Directory (createDirectoryIfMissing)
import System.Random (StdGen, newStdGen, randomR)
# endif
#else
import Snap.Internal.Http.Server.Address (AddressNotSupportedException)
#endif
------------------------------------------------------------------------------
#ifdef HAS_UNIX_SOCKETS
mkdtemp :: String -> IO FilePath
# if MIN_VERSION_unix(2,6,0)
mkdtemp = Posix.mkdtemp
# else
tMPCHARS :: V.Vector Char
tMPCHARS = V.fromList $! ['a'..'z'] ++ ['0'..'9']
mkdtemp template = do
suffix <- newStdGen >>= return . State.evalState (chooseN 8 tMPCHARS)
let dir = template ++ suffix
createDirectoryIfMissing False dir
return dir
where
choose :: V.Vector Char -> State.State StdGen Char
choose v = do let sz = V.length v
idx <- State.state $ randomR (0, sz - 1)
return $! (V.!) v idx
chooseN :: Int -> V.Vector Char -> State.State StdGen String
chooseN n v = replicateM n $ choose v
#endif
#endif
------------------------------------------------------------------------------
tests :: [Test]
tests = [
testUnixSocketBind
#if !MIN_VERSION_network(3,0,0)
, testAcceptFailure
, testSockClosedOnListenException
#endif
]
------------------------------------------------------------------------------
-- TODO: fix these tests which rely on deprecated socket apis
#if !MIN_VERSION_network(3,0,0)
testSockClosedOnListenException :: Test
testSockClosedOnListenException = testCase "socket/closedOnListenException" $ do
ref <- newIORef Nothing
expectException $ Sock.bindSocketImpl (sso ref) bs ls "127.0.0.1" 4444
(Just sock) <- readIORef ref
let (N.MkSocket _ _ _ _ mvar) = sock
readMVar mvar >>= assertEqual "socket closed" N.Closed
where
sso ref sock _ _ = do
let (N.MkSocket _ _ _ _ mvar) = sock
readMVar mvar >>= assertEqual "socket not connected" N.NotConnected
writeIORef ref (Just sock) >> fail "set socket option"
bs _ _ = fail "bindsocket"
ls _ _ = fail "listen"
------------------------------------------------------------------------------
testAcceptFailure :: Test
testAcceptFailure = testCase "socket/acceptAndInitialize" $ do
sockmvar <- newEmptyMVar
donemvar <- newEmptyMVar
E.bracket (Sock.bindSocket "127.0.0.1" $ fromIntegral N.aNY_PORT)
(N.close)
(\s -> do
p <- fromIntegral <$> N.socketPort s
forkIO $ server s sockmvar donemvar
E.bracket (forkIO $ client p)
(killThread)
(\_ -> do
csock <- takeMVar sockmvar
takeMVar donemvar
N.isConnected csock >>=
assertEqual "closed" False
)
)
where
server sock sockmvar donemvar = serve `E.finally` putMVar donemvar ()
where
serve = eatException $ E.mask $ \restore ->
Sock.acceptAndInitialize sock restore $ \(csock, _) -> do
putMVar sockmvar csock
fail "error"
client port = withSock port (const $ return ())
#endif
testUnixSocketBind :: Test
#ifdef HAS_UNIX_SOCKETS
testUnixSocketBind = testCase "socket/unixSocketBind" $
withSocketPath $ \path -> do
#if !MIN_VERSION_network(3,0,0)
E.bracket (Sock.bindUnixSocket Nothing path) N.close $ \sock -> do
N.isListening sock >>= assertEqual "listening" True
#endif
expectException $ E.bracket (Sock.bindUnixSocket Nothing "a/relative/path")
N.close doNothing
expectException $ E.bracket (Sock.bindUnixSocket Nothing "/relative/../path")
N.close doNothing
expectException $ E.bracket (Sock.bindUnixSocket Nothing "/hopefully/not/existing/path")
N.close doNothing
#ifdef LINUX
-- Most (all?) BSD systems ignore access mode on unix sockets.
-- Should we still check it?
-- This is pretty much for 100% coverage
expectException $ E.bracket (Sock.bindUnixSocket Nothing "/")
N.close doNothing
let mode = 0o766
E.bracket (Sock.bindUnixSocket (Just mode) path) N.close $ \_ -> do
-- Should check sockFd instead of path?
sockMode <- fmap Posix.fileMode $ Posix.getFileStatus path
assertEqual "access mode" (fromIntegral mode) $
Posix.intersectFileModes Posix.accessModes sockMode
#endif
where
doNothing _ = return ()
withSocketPath act = do
tmpRoot <- getTemporaryDirectory
tmpDir <- mkdtemp $ tmpRoot </> "snap-server-test-"
let path = tmpDir </> "unixSocketBind.sock"
E.finally (act path) $ do
eatException $ Posix.removeLink path
eatException $ Posix.removeDirectory tmpDir
#else
testUnixSocketBind = testCase "socket/unixSocketBind" $ do
caught <- E.catch (Sock.bindUnixSocket Nothing "/tmp/snap-sock.sock" >> return False)
$ \(e :: AddressNotSupportedException) -> length (show e) `seq` return True
assertEqual "not supported" True caught
#endif
| sopvop/snap-server | test/Snap/Internal/Http/Server/Socket/Tests.hs | bsd-3-clause | 6,866 | 0 | 20 | 1,901 | 1,417 | 736 | 681 | 60 | 1 |
import System.IO
import TPM
doExport :: String -> TPM_PUBKEY -> IO ()
doExport fileName pubKey = do
handle <- openFile fileName WriteMode
hPutStrLn handle $ show pubKey
hClose handle | armoredsoftware/protocol | tpm/mainline/attestation/exportEK.hs | bsd-3-clause | 189 | 0 | 8 | 35 | 68 | 31 | 37 | 7 | 1 |
module Compiler.Parser(parse) where
import Compiler.Lp
import Compiler.Util
parse :: String -> Program
parse = map parseRule . chunks . filter (not . dull) . lines
where
dull x = all isSpace x || "#" `isPrefixOf` x
chunks = rep (\(x:xs) -> first (x:) $ break (not . isSpace . head) xs)
parseRule :: [String] -> Rule
parseRule [x] = Rule name args $ NoChoice $ parseSeq body
where (name:args,body) = break' "=" $ lexemes x
parseRule (x:ys) = Rule name args $ Choice $ map (parseAlt . lexemes) ys
where (name:args) = lexemes x
parseAlt :: [String] -> (Bind Pat,Seq)
parseAlt (x:"=":y) = (parseBind parsePat x, parseSeq y)
parseSeq :: [String] -> Seq
parseSeq xs = Seq (map (parseBind parseExp) a) (if null b then "res" else uncurly $ head b)
where (a,b) = break ("{" `isPrefixOf`) xs
parseBind :: (String -> a) -> String -> Bind a
parseBind f x | "@" `isPrefixOf` b = Bind (Just a) $ f $ unround $ tail b
| otherwise = Bind Nothing $ f $ unround x
where (a,b) = span isAlpha x
parsePat :: String -> Pat
parsePat "_" = PWildcard
parsePat x@('\"':_) = PLit $ read x
parsePat x = PPrim x
parseExp :: String -> Exp
parseExp x@('\"':_) = Lit $ read x
parseExp x = Prim name $ map parseExp args
where (name:args) = words x
---------------------------------------------------------------------
-- UTILITIES
break' :: (Show a, Eq a) => a -> [a] -> ([a],[a])
break' x xs | null b = error $ "Parse error, expected " ++ show a ++ " in " ++ unwords (map show xs)
| otherwise = (a, tail b)
where (a,b) = break (== x) xs
lexemes :: String -> [String]
lexemes = f . words
where
f (x:xs) | isJust v = unwords (a++[b]) : f bs
where v = getBracket x
(a,b:bs) = break (fromJust v `elem`) (x:xs)
f (x:xs) = x : f xs
f [] = []
getBracket ('(':xs) = Just ')'
getBracket ('{':xs) = Just '}'
getBracket (_:xs) = getBracket xs
getBracket [] = Nothing
unround ('(':xs) | ")" `isSuffixOf` xs = init xs
unround x = x
uncurly ('{':xs) | "}" `isSuffixOf` xs = init xs
uncurly x = x
| silkapp/tagsoup | dead/parser/Compiler/Parser.hs | bsd-3-clause | 2,105 | 0 | 14 | 527 | 1,058 | 546 | 512 | 48 | 3 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnNames]{Extracting imported and top-level names in scope}
-}
{-# LANGUAGE CPP, NondecreasingIndentation #-}
module RnNames (
rnImports, getLocalNonValBinders,
rnExports, extendGlobalRdrEnvRn,
gresFromAvails,
calculateAvails,
reportUnusedNames,
checkConName
) where
#include "HsVersions.h"
import DynFlags
import HsSyn
import TcEnv ( isBrackStage )
import RnEnv
import RnHsDoc ( rnHsDoc )
import LoadIface ( loadSrcInterface )
import TcRnMonad
import PrelNames
import Module
import Name
import NameEnv
import NameSet
import Avail
import HscTypes
import RdrName
import Outputable
import Maybes
import SrcLoc
import BasicTypes ( TopLevelFlag(..) )
import ErrUtils
import Util
import FastString
import ListSetOps
import Control.Monad
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.List ( partition, (\\), find )
import qualified Data.Set as Set
import System.FilePath ((</>))
import System.IO
{-
************************************************************************
* *
\subsection{rnImports}
* *
************************************************************************
Note [Tracking Trust Transitively]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we import a package as well as checking that the direct imports are safe
according to the rules outlined in the Note [HscMain . Safe Haskell Trust Check]
we must also check that these rules hold transitively for all dependent modules
and packages. Doing this without caching any trust information would be very
slow as we would need to touch all packages and interface files a module depends
on. To avoid this we make use of the property that if a modules Safe Haskell
mode changes, this triggers a recompilation from that module in the dependcy
graph. So we can just worry mostly about direct imports.
There is one trust property that can change for a package though without
recompliation being triggered: package trust. So we must check that all
packages a module tranitively depends on to be trusted are still trusted when
we are compiling this module (as due to recompilation avoidance some modules
below may not be considered trusted any more without recompilation being
triggered).
We handle this by augmenting the existing transitive list of packages a module M
depends on with a bool for each package that says if it must be trusted when the
module M is being checked for trust. This list of trust required packages for a
single import is gathered in the rnImportDecl function and stored in an
ImportAvails data structure. The union of these trust required packages for all
imports is done by the rnImports function using the combine function which calls
the plusImportAvails function that is a union operation for the ImportAvails
type. This gives us in an ImportAvails structure all packages required to be
trusted for the module we are currently compiling. Checking that these packages
are still trusted (and that direct imports are trusted) is done in
HscMain.checkSafeImports.
See the note below, [Trust Own Package] for a corner case in this method and
how its handled.
Note [Trust Own Package]
~~~~~~~~~~~~~~~~~~~~~~~~
There is a corner case of package trust checking that the usual transitive check
doesn't cover. (For how the usual check operates see the Note [Tracking Trust
Transitively] below). The case is when you import a -XSafe module M and M
imports a -XTrustworthy module N. If N resides in a different package than M,
then the usual check works as M will record a package dependency on N's package
and mark it as required to be trusted. If N resides in the same package as M
though, then importing M should require its own package be trusted due to N
(since M is -XSafe so doesn't create this requirement by itself). The usual
check fails as a module doesn't record a package dependency of its own package.
So instead we now have a bool field in a modules interface file that simply
states if the module requires its own package to be trusted. This field avoids
us having to load all interface files that the module depends on to see if one
is trustworthy.
Note [Trust Transitive Property]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
So there is an interesting design question in regards to transitive trust
checking. Say I have a module B compiled with -XSafe. B is dependent on a bunch
of modules and packages, some packages it requires to be trusted as its using
-XTrustworthy modules from them. Now if I have a module A that doesn't use safe
haskell at all and simply imports B, should A inherit all the the trust
requirements from B? Should A now also require that a package p is trusted since
B required it?
We currently say no but saying yes also makes sense. The difference is, if a
module M that doesn't use Safe Haskell imports a module N that does, should all
the trusted package requirements be dropped since M didn't declare that it cares
about Safe Haskell (so -XSafe is more strongly associated with the module doing
the importing) or should it be done still since the author of the module N that
uses Safe Haskell said they cared (so -XSafe is more strongly associated with
the module that was compiled that used it).
Going with yes is a simpler semantics we think and harder for the user to stuff
up but it does mean that Safe Haskell will affect users who don't care about
Safe Haskell as they might grab a package from Cabal which uses safe haskell (say
network) and that packages imports -XTrustworthy modules from another package
(say bytestring), so requires that package is trusted. The user may now get
compilation errors in code that doesn't do anything with Safe Haskell simply
because they are using the network package. They will have to call 'ghc-pkg
trust network' to get everything working. Due to this invasive nature of going
with yes we have gone with no for now.
-}
-- | Process Import Decls
-- Do the non SOURCE ones first, so that we get a helpful warning for SOURCE
-- ones that are unnecessary
rnImports :: [LImportDecl RdrName]
-> RnM ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage)
rnImports imports = do
this_mod <- getModule
let (source, ordinary) = partition is_source_import imports
is_source_import d = ideclSource (unLoc d)
stuff1 <- mapAndReportM (rnImportDecl this_mod) ordinary
stuff2 <- mapAndReportM (rnImportDecl this_mod) source
-- Safe Haskell: See Note [Tracking Trust Transitively]
let (decls, rdr_env, imp_avails, hpc_usage) = combine (stuff1 ++ stuff2)
return (decls, rdr_env, imp_avails, hpc_usage)
where
combine :: [(LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)]
-> ([LImportDecl Name], GlobalRdrEnv, ImportAvails, AnyHpcUsage)
combine = foldr plus ([], emptyGlobalRdrEnv, emptyImportAvails, False)
plus (decl, gbl_env1, imp_avails1,hpc_usage1)
(decls, gbl_env2, imp_avails2,hpc_usage2)
= ( decl:decls,
gbl_env1 `plusGlobalRdrEnv` gbl_env2,
imp_avails1 `plusImportAvails` imp_avails2,
hpc_usage1 || hpc_usage2 )
rnImportDecl :: Module -> LImportDecl RdrName
-> RnM (LImportDecl Name, GlobalRdrEnv, ImportAvails, AnyHpcUsage)
rnImportDecl this_mod
(L loc decl@(ImportDecl { ideclName = loc_imp_mod_name, ideclPkgQual = mb_pkg
, ideclSource = want_boot, ideclSafe = mod_safe
, ideclQualified = qual_only, ideclImplicit = implicit
, ideclAs = as_mod, ideclHiding = imp_details }))
= setSrcSpan loc $ do
when (isJust mb_pkg) $ do
pkg_imports <- xoptM Opt_PackageImports
when (not pkg_imports) $ addErr packageImportErr
-- If there's an error in loadInterface, (e.g. interface
-- file not found) we get lots of spurious errors from 'filterImports'
let imp_mod_name = unLoc loc_imp_mod_name
doc = ppr imp_mod_name <+> ptext (sLit "is directly imported")
-- Check for self-import, which confuses the typechecker (Trac #9032)
-- ghc --make rejects self-import cycles already, but batch-mode may not
-- at least not until TcIface.tcHiBootIface, which is too late to avoid
-- typechecker crashes. ToDo: what about indirect self-import?
-- But 'import {-# SOURCE #-} M' is ok, even if a bit odd
when (not want_boot &&
imp_mod_name == moduleName this_mod &&
(case mb_pkg of -- If we have import "<pkg>" M, then we should
-- check that "<pkg>" is "this" (which is magic)
-- or the name of this_mod's package. Yurgh!
-- c.f. GHC.findModule, and Trac #9997
Nothing -> True
Just pkg_fs -> pkg_fs == fsLit "this" ||
fsToPackageKey pkg_fs == modulePackageKey this_mod))
(addErr (ptext (sLit "A module cannot import itself:") <+> ppr imp_mod_name))
-- Check for a missing import list (Opt_WarnMissingImportList also
-- checks for T(..) items but that is done in checkDodgyImport below)
case imp_details of
Just (False, _) -> return () -- Explicit import list
_ | implicit -> return () -- Do not bleat for implicit imports
| qual_only -> return ()
| otherwise -> whenWOptM Opt_WarnMissingImportList $
addWarn (missingImportListWarn imp_mod_name)
ifaces <- loadSrcInterface doc imp_mod_name want_boot mb_pkg
-- Compiler sanity check: if the import didn't say
-- {-# SOURCE #-} we should not get a hi-boot file
WARN( not want_boot && any mi_boot ifaces, ppr imp_mod_name ) do
-- Another sanity check: we should not get multiple interfaces
-- if we're looking for an hi-boot file
WARN( want_boot && length ifaces /= 1, ppr imp_mod_name ) do
-- Issue a user warning for a redundant {- SOURCE -} import
-- NB that we arrange to read all the ordinary imports before
-- any of the {- SOURCE -} imports.
--
-- in --make and GHCi, the compilation manager checks for this,
-- and indeed we shouldn't do it here because the existence of
-- the non-boot module depends on the compilation order, which
-- is not deterministic. The hs-boot test can show this up.
dflags <- getDynFlags
warnIf (want_boot && any (not.mi_boot) ifaces && isOneShot (ghcMode dflags))
(warnRedundantSourceImport imp_mod_name)
when (mod_safe && not (safeImportsOn dflags)) $
addErr (ptext (sLit "safe import can't be used as Safe Haskell isn't on!")
$+$ ptext (sLit $ "please enable Safe Haskell through either "
++ "Safe, Trustworthy or Unsafe"))
let
qual_mod_name = as_mod `orElse` imp_mod_name
imp_spec = ImpDeclSpec { is_mod = imp_mod_name, is_qual = qual_only,
is_dloc = loc, is_as = qual_mod_name }
-- filter the imports according to the import declaration
(new_imp_details, gres) <- filterImports ifaces imp_spec imp_details
let gbl_env = mkGlobalRdrEnv (filterOut from_this_mod gres)
from_this_mod gre = nameModule (gre_name gre) == this_mod
-- True <=> import M ()
import_all = case imp_details of
Just (is_hiding, L _ ls) -> not is_hiding && null ls
_ -> False
-- should the import be safe?
mod_safe' = mod_safe
|| (not implicit && safeDirectImpsReq dflags)
|| (implicit && safeImplicitImpsReq dflags)
let imports
= foldr plusImportAvails emptyImportAvails (map
(\iface ->
(calculateAvails dflags iface mod_safe' want_boot) {
imp_mods = unitModuleEnv (mi_module iface)
[(qual_mod_name, import_all, loc, mod_safe')] })
ifaces)
-- Complain if we import a deprecated module
whenWOptM Opt_WarnWarningsDeprecations (
forM_ ifaces $ \iface ->
case mi_warns iface of
WarnAll txt -> addWarn $ moduleWarn imp_mod_name txt
_ -> return ()
)
let new_imp_decl = L loc (decl { ideclSafe = mod_safe'
, ideclHiding = new_imp_details })
return (new_imp_decl, gbl_env, imports, any mi_hpc ifaces)
-- | Calculate the 'ImportAvails' induced by an import of a particular
-- interface, but without 'imp_mods'.
calculateAvails :: DynFlags
-> ModIface
-> IsSafeImport
-> IsBootInterface
-> ImportAvails
calculateAvails dflags iface mod_safe' want_boot =
let imp_mod = mi_module iface
orph_iface = mi_orphan iface
has_finsts = mi_finsts iface
deps = mi_deps iface
trust = getSafeMode $ mi_trust iface
trust_pkg = mi_trust_pkg iface
-- If the module exports anything defined in this module, just
-- ignore it. Reason: otherwise it looks as if there are two
-- local definition sites for the thing, and an error gets
-- reported. Easiest thing is just to filter them out up
-- front. This situation only arises if a module imports
-- itself, or another module that imported it. (Necessarily,
-- this invoves a loop.)
--
-- We do this *after* filterImports, so that if you say
-- module A where
-- import B( AType )
-- type AType = ...
--
-- module B( AType ) where
-- import {-# SOURCE #-} A( AType )
--
-- then you won't get a 'B does not export AType' message.
-- Compute new transitive dependencies
orphans | orph_iface = ASSERT( not (imp_mod `elem` dep_orphs deps) )
imp_mod : dep_orphs deps
| otherwise = dep_orphs deps
finsts | has_finsts = ASSERT( not (imp_mod `elem` dep_finsts deps) )
imp_mod : dep_finsts deps
| otherwise = dep_finsts deps
pkg = modulePackageKey (mi_module iface)
-- Does this import mean we now require our own pkg
-- to be trusted? See Note [Trust Own Package]
ptrust = trust == Sf_Trustworthy || trust_pkg
(dependent_mods, dependent_pkgs, pkg_trust_req)
| pkg == thisPackage dflags =
-- Imported module is from the home package
-- Take its dependent modules and add imp_mod itself
-- Take its dependent packages unchanged
--
-- NB: (dep_mods deps) might include a hi-boot file
-- for the module being compiled, CM. Do *not* filter
-- this out (as we used to), because when we've
-- finished dealing with the direct imports we want to
-- know if any of them depended on CM.hi-boot, in
-- which case we should do the hi-boot consistency
-- check. See LoadIface.loadHiBootInterface
((moduleName imp_mod,want_boot):dep_mods deps,dep_pkgs deps,ptrust)
| otherwise =
-- Imported module is from another package
-- Dump the dependent modules
-- Add the package imp_mod comes from to the dependent packages
ASSERT2( not (pkg `elem` (map fst $ dep_pkgs deps))
, ppr pkg <+> ppr (dep_pkgs deps) )
([], (pkg, False) : dep_pkgs deps, False)
in ImportAvails {
imp_mods = emptyModuleEnv, -- this gets filled in later
imp_orphs = orphans,
imp_finsts = finsts,
imp_dep_mods = mkModDeps dependent_mods,
imp_dep_pkgs = map fst $ dependent_pkgs,
-- Add in the imported modules trusted package
-- requirements. ONLY do this though if we import the
-- module as a safe import.
-- See Note [Tracking Trust Transitively]
-- and Note [Trust Transitive Property]
imp_trust_pkgs = if mod_safe'
then map fst $ filter snd dependent_pkgs
else [],
-- Do we require our own pkg to be trusted?
-- See Note [Trust Own Package]
imp_trust_own_pkg = pkg_trust_req
}
warnRedundantSourceImport :: ModuleName -> SDoc
warnRedundantSourceImport mod_name
= ptext (sLit "Unnecessary {-# SOURCE #-} in the import of module")
<+> quotes (ppr mod_name)
{-
************************************************************************
* *
\subsection{importsFromLocalDecls}
* *
************************************************************************
From the top-level declarations of this module produce
* the lexical environment
* the ImportAvails
created by its bindings.
Note [Top-level Names in Template Haskell decl quotes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See also: Note [Interactively-bound Ids in GHCi] in HscTypes
Consider a Template Haskell declaration quotation like this:
module M where
f x = h [d| f = 3 |]
When renaming the declarations inside [d| ...|], we treat the
top level binders specially in two ways
1. We give them an Internal name, not (as usual) an External one.
Otherwise the NameCache gets confused by a second allocation of
M.f. (We used to invent a fake module ThFake to avoid this, but
that had other problems, notably in getting the correct answer for
nameIsLocalOrFrom in lookupFixity. So we now leave tcg_module
unaffected.)
2. We make them *shadow* the outer bindings. If we don't do that,
we'll get a complaint when extending the GlobalRdrEnv, saying that
there are two bindings for 'f'. There are several tricky points:
* This shadowing applies even if the binding for 'f' is in a
where-clause, and hence is in the *local* RdrEnv not the *global*
RdrEnv.
* The *qualified* name M.f from the enclosing module must certainly
still be available. So we don't nuke it entirely; we just make
it seem like qualified import.
* We only shadow *External* names (which come from the main module)
Do not shadow *Inernal* names because in the bracket
[d| class C a where f :: a
f = 4 |]
rnSrcDecls will first call extendGlobalRdrEnvRn with C[f] from the
class decl, and *separately* extend the envt with the value binding.
3. We find out whether we are inside a [d| ... |] by testing the TH
stage. This is a slight hack, because the stage field was really
meant for the type checker, and here we are not interested in the
fields of Brack, hence the error thunks in thRnBrack.
-}
extendGlobalRdrEnvRn :: [AvailInfo]
-> MiniFixityEnv
-> RnM (TcGblEnv, TcLclEnv)
-- Updates both the GlobalRdrEnv and the FixityEnv
-- We return a new TcLclEnv only because we might have to
-- delete some bindings from it;
-- see Note [Top-level Names in Template Haskell decl quotes]
extendGlobalRdrEnvRn avails new_fixities
= do { (gbl_env, lcl_env) <- getEnvs
; stage <- getStage
; isGHCi <- getIsGHCi
; let rdr_env = tcg_rdr_env gbl_env
fix_env = tcg_fix_env gbl_env
th_bndrs = tcl_th_bndrs lcl_env
th_lvl = thLevel stage
-- Delete new_occs from global and local envs
-- If we are in a TemplateHaskell decl bracket,
-- we are going to shadow them
-- See Note [Top-level Names in Template Haskell decl quotes]
inBracket = isBrackStage stage
lcl_env_TH = lcl_env { tcl_rdr = delLocalRdrEnvList (tcl_rdr lcl_env) new_occs }
lcl_env2 | inBracket = lcl_env_TH
| otherwise = lcl_env
rdr_env2 = extendGlobalRdrEnv (isGHCi && not inBracket) rdr_env avails
-- Shadowing only applies for GHCi decls outside brackets
-- e.g. (Trac #4127a)
-- ghci> runQ [d| class C a where f :: a
-- f = True
-- instance C Int where f = 2 |]
-- We don't want the f=True to shadow the f class-op
lcl_env3 = lcl_env2 { tcl_th_bndrs = extendNameEnvList th_bndrs
[ (n, (TopLevel, th_lvl))
| n <- new_names ] }
fix_env' = foldl extend_fix_env fix_env new_names
dups = findLocalDupsRdrEnv rdr_env2 new_names
gbl_env' = gbl_env { tcg_rdr_env = rdr_env2, tcg_fix_env = fix_env' }
; traceRn (text "extendGlobalRdrEnvRn 1" <+> (ppr avails $$ (ppr dups)))
; mapM_ (addDupDeclErr . map gre_name) dups
; traceRn (text "extendGlobalRdrEnvRn 2" <+> (pprGlobalRdrEnv True rdr_env2))
; return (gbl_env', lcl_env3) }
where
new_names = concatMap availNames avails
new_occs = map nameOccName new_names
-- If there is a fixity decl for the gre, add it to the fixity env
extend_fix_env fix_env name
| Just (L _ fi) <- lookupFsEnv new_fixities (occNameFS occ)
= extendNameEnv fix_env name (FixItem occ fi)
| otherwise
= fix_env
where
occ = nameOccName name
{-
@getLocalDeclBinders@ returns the names for an @HsDecl@. It's
used for source code.
*** See "THE NAMING STORY" in HsDecls ****
-}
getLocalNonValBinders :: MiniFixityEnv -> HsGroup RdrName
-> RnM ((TcGblEnv, TcLclEnv), NameSet)
-- Get all the top-level binders bound the group *except*
-- for value bindings, which are treated separately
-- Specifically we return AvailInfo for
-- * type decls (incl constructors and record selectors)
-- * class decls (including class ops)
-- * associated types
-- * foreign imports
-- * pattern synonyms
-- * value signatures (in hs-boot files)
getLocalNonValBinders fixity_env
(HsGroup { hs_valds = binds,
hs_tyclds = tycl_decls,
hs_instds = inst_decls,
hs_fords = foreign_decls })
= do { -- Process all type/class decls *except* family instances
; tc_avails <- mapM new_tc (tyClGroupConcat tycl_decls)
; traceRn (text "getLocalNonValBinders 1" <+> ppr tc_avails)
; envs <- extendGlobalRdrEnvRn tc_avails fixity_env
; setEnvs envs $ do {
-- Bring these things into scope first
-- See Note [Looking up family names in family instances]
-- Process all family instances
-- to bring new data constructors into scope
; nti_avails <- concatMapM new_assoc inst_decls
-- Finish off with value binders:
-- foreign decls and pattern synonyms for an ordinary module
-- type sigs in case of a hs-boot file only
; is_boot <- tcIsHsBootOrSig
; let val_bndrs | is_boot = hs_boot_sig_bndrs
| otherwise = for_hs_bndrs ++ patsyn_hs_bndrs
; val_avails <- mapM new_simple val_bndrs
; let avails = nti_avails ++ val_avails
new_bndrs = availsToNameSet avails `unionNameSet`
availsToNameSet tc_avails
; traceRn (text "getLocalNonValBinders 2" <+> ppr avails)
; envs <- extendGlobalRdrEnvRn avails fixity_env
; return (envs, new_bndrs) } }
where
ValBindsIn val_binds val_sigs = binds
for_hs_bndrs :: [Located RdrName]
for_hs_bndrs = hsForeignDeclsBinders foreign_decls
patsyn_hs_bndrs :: [Located RdrName]
patsyn_hs_bndrs = hsPatSynBinders val_binds
-- In a hs-boot file, the value binders come from the
-- *signatures*, and there should be no foreign binders
hs_boot_sig_bndrs = [ L decl_loc (unLoc n)
| L decl_loc (TypeSig ns _ _) <- val_sigs, n <- ns]
-- the SrcSpan attached to the input should be the span of the
-- declaration, not just the name
new_simple :: Located RdrName -> RnM AvailInfo
new_simple rdr_name = do{ nm <- newTopSrcBinder rdr_name
; return (Avail nm) }
new_tc tc_decl -- NOT for type/data instances
= do { let bndrs = hsLTyClDeclBinders tc_decl
; names@(main_name : _) <- mapM newTopSrcBinder bndrs
; return (AvailTC main_name names) }
new_assoc :: LInstDecl RdrName -> RnM [AvailInfo]
new_assoc (L _ (TyFamInstD {})) = return []
-- type instances don't bind new names
new_assoc (L _ (DataFamInstD { dfid_inst = d }))
= do { avail <- new_di Nothing d
; return [avail] }
new_assoc (L _ (ClsInstD { cid_inst = ClsInstDecl
{ cid_poly_ty = inst_ty
, cid_datafam_insts = adts } }))
| Just (_, _, L loc cls_rdr, _) <- splitLHsInstDeclTy_maybe inst_ty
= do { cls_nm <- setSrcSpan loc $ lookupGlobalOccRn cls_rdr
; mapM (new_di (Just cls_nm) . unLoc) adts }
| otherwise
= return [] -- Do not crash on ill-formed instances
-- Eg instance !Show Int Trac #3811c
new_di :: Maybe Name -> DataFamInstDecl RdrName -> RnM AvailInfo
new_di mb_cls ti_decl
= do { main_name <- lookupFamInstName mb_cls (dfid_tycon ti_decl)
; sub_names <- mapM newTopSrcBinder (hsDataFamInstBinders ti_decl)
; return (AvailTC (unLoc main_name) sub_names) }
-- main_name is not bound here!
{-
Note [Looking up family names in family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
module M where
type family T a :: *
type instance M.T Int = Bool
We might think that we can simply use 'lookupOccRn' when processing the type
instance to look up 'M.T'. Alas, we can't! The type family declaration is in
the *same* HsGroup as the type instance declaration. Hence, as we are
currently collecting the binders declared in that HsGroup, these binders will
not have been added to the global environment yet.
Solution is simple: process the type family declarations first, extend
the environment, and then process the type instances.
************************************************************************
* *
\subsection{Filtering imports}
* *
************************************************************************
@filterImports@ takes the @ExportEnv@ telling what the imported module makes
available, and filters it through the import spec (if any).
Note [Dealing with imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
For import M( ies ), we take the mi_exports of M, and make
imp_occ_env :: OccEnv (Name, AvailInfo, Maybe Name)
One entry for each Name that M exports; the AvailInfo describes just
that Name.
The situation is made more complicated by associated types. E.g.
module M where
class C a where { data T a }
instance C Int where { data T Int = T1 | T2 }
instance C Bool where { data T Int = T3 }
Then M's export_avails are (recall the AvailTC invariant from Avails.hs)
C(C,T), T(T,T1,T2,T3)
Notice that T appears *twice*, once as a child and once as a parent.
From this we construct the imp_occ_env
C -> (C, C(C,T), Nothing
T -> (T, T(T,T1,T2,T3), Just C)
T1 -> (T1, T(T1,T2,T3), Nothing) -- similarly T2,T3
Note that the imp_occ_env will have entries for data constructors too,
although we never look up data constructors.
-}
filterImports
:: [ModIface]
-> ImpDeclSpec -- The span for the entire import decl
-> Maybe (Bool, Located [LIE RdrName]) -- Import spec; True => hiding
-> RnM (Maybe (Bool, Located [LIE Name]), -- Import spec w/ Names
[GlobalRdrElt]) -- Same again, but in GRE form
filterImports iface decl_spec Nothing
= return (Nothing, gresFromAvails prov (concatMap mi_exports iface))
where
prov = Imported [ImpSpec { is_decl = decl_spec, is_item = ImpAll }]
filterImports ifaces decl_spec (Just (want_hiding, L l import_items))
= do -- check for errors, convert RdrNames to Names
items1 <- mapM lookup_lie import_items
let items2 :: [(LIE Name, AvailInfo)]
items2 = concat items1
-- NB the AvailInfo may have duplicates, and several items
-- for the same parent; e.g N(x) and N(y)
names = availsToNameSet (map snd items2)
keep n = not (n `elemNameSet` names)
pruned_avails = filterAvails keep all_avails
hiding_prov = Imported [ImpSpec { is_decl = decl_spec, is_item = ImpAll }]
gres | want_hiding = gresFromAvails hiding_prov pruned_avails
| otherwise = concatMap (gresFromIE decl_spec) items2
return (Just (want_hiding, L l (map fst items2)), gres)
where
all_avails = concatMap mi_exports ifaces
-- See Note [Dealing with imports]
imp_occ_env :: OccEnv (Name, -- the name
AvailInfo, -- the export item providing the name
Maybe Name) -- the parent of associated types
imp_occ_env = mkOccEnv_C combine [ (nameOccName n, (n, a, Nothing))
| a <- all_avails, n <- availNames a]
where
-- See example in Note [Dealing with imports]
-- 'combine' is only called for associated types which appear twice
-- in the all_avails. In the example, we combine
-- T(T,T1,T2,T3) and C(C,T) to give (T, T(T,T1,T2,T3), Just C)
combine (name1, a1@(AvailTC p1 _), mp1)
(name2, a2@(AvailTC p2 _), mp2)
= ASSERT( name1 == name2 && isNothing mp1 && isNothing mp2 )
if p1 == name1 then (name1, a1, Just p2)
else (name1, a2, Just p1)
combine x y = pprPanic "filterImports/combine" (ppr x $$ ppr y)
lookup_name :: RdrName -> IELookupM (Name, AvailInfo, Maybe Name)
lookup_name rdr | isQual rdr = failLookupWith (QualImportError rdr)
| Just succ <- mb_success = return succ
| otherwise = failLookupWith BadImport
where
mb_success = lookupOccEnv imp_occ_env (rdrNameOcc rdr)
lookup_lie :: LIE RdrName -> TcRn [(LIE Name, AvailInfo)]
lookup_lie (L loc ieRdr)
= do (stuff, warns) <- setSrcSpan loc $
liftM (fromMaybe ([],[])) $
run_lookup (lookup_ie ieRdr)
mapM_ emit_warning warns
return [ (L loc ie, avail) | (ie,avail) <- stuff ]
where
-- Warn when importing T(..) if T was exported abstractly
emit_warning (DodgyImport n) = whenWOptM Opt_WarnDodgyImports $
addWarn (dodgyImportWarn n)
emit_warning MissingImportList = whenWOptM Opt_WarnMissingImportList $
addWarn (missingImportListItem ieRdr)
emit_warning BadImportW = whenWOptM Opt_WarnDodgyImports $
addWarn (lookup_err_msg BadImport)
run_lookup :: IELookupM a -> TcRn (Maybe a)
run_lookup m = case m of
Failed err -> addErr (lookup_err_msg err) >> return Nothing
Succeeded a -> return (Just a)
lookup_err_msg err = case err of
BadImport -> badImportItemErr (any mi_boot ifaces) decl_spec
ieRdr all_avails
IllegalImport -> illegalImportItemErr
QualImportError rdr -> qualImportItemErr rdr
-- For each import item, we convert its RdrNames to Names,
-- and at the same time construct an AvailInfo corresponding
-- to what is actually imported by this item.
-- Returns Nothing on error.
-- We return a list here, because in the case of an import
-- item like C, if we are hiding, then C refers to *both* a
-- type/class and a data constructor. Moreover, when we import
-- data constructors of an associated family, we need separate
-- AvailInfos for the data constructors and the family (as they have
-- different parents). See Note [Dealing with imports]
lookup_ie :: IE RdrName -> IELookupM ([(IE Name, AvailInfo)], [IELookupWarning])
lookup_ie ie = handle_bad_import $ do
case ie of
IEVar (L l n) -> do
(name, avail, _) <- lookup_name n
return ([(IEVar (L l name), trimAvail avail name)], [])
IEThingAll (L l tc) -> do
(name, avail@(AvailTC name2 subs), mb_parent) <- lookup_name tc
let warns | null (drop 1 subs) = [DodgyImport tc]
| not (is_qual decl_spec) = [MissingImportList]
| otherwise = []
case mb_parent of
-- non-associated ty/cls
Nothing -> return ([(IEThingAll (L l name), avail)], warns)
-- associated ty
Just parent -> return ([(IEThingAll (L l name),
AvailTC name2 (subs \\ [name])),
(IEThingAll (L l name),
AvailTC parent [name])],
warns)
IEThingAbs (L l tc)
| want_hiding -- hiding ( C )
-- Here the 'C' can be a data constructor
-- *or* a type/class, or even both
-> let tc_name = lookup_name tc
dc_name = lookup_name (setRdrNameSpace tc srcDataName)
in
case catIELookupM [ tc_name, dc_name ] of
[] -> failLookupWith BadImport
names -> return ([mkIEThingAbs l name | name <- names], [])
| otherwise
-> do nameAvail <- lookup_name tc
return ([mkIEThingAbs l nameAvail], [])
IEThingWith (L l rdr_tc) rdr_ns -> do
(name, AvailTC _ ns, mb_parent) <- lookup_name rdr_tc
-- Look up the children in the sub-names of the parent
let subnames = case ns of -- The tc is first in ns,
[] -> [] -- if it is there at all
-- See the AvailTC Invariant in Avail.hs
(n1:ns1) | n1 == name -> ns1
| otherwise -> ns
mb_children = lookupChildren subnames rdr_ns
children <- if any isNothing mb_children
then failLookupWith BadImport
else return (catMaybes mb_children)
case mb_parent of
-- non-associated ty/cls
Nothing -> return ([(IEThingWith (L l name) children,
AvailTC name (name:map unLoc children))],
[])
-- associated ty
Just parent -> return ([(IEThingWith (L l name) children,
AvailTC name (map unLoc children)),
(IEThingWith (L l name) children,
AvailTC parent [name])],
[])
_other -> failLookupWith IllegalImport
-- could be IEModuleContents, IEGroup, IEDoc, IEDocNamed
-- all errors.
where
mkIEThingAbs l (n, av, Nothing ) = (IEThingAbs (L l n),
trimAvail av n)
mkIEThingAbs l (n, _, Just parent) = (IEThingAbs (L l n),
AvailTC parent [n])
handle_bad_import m = catchIELookup m $ \err -> case err of
BadImport | want_hiding -> return ([], [BadImportW])
_ -> failLookupWith err
type IELookupM = MaybeErr IELookupError
data IELookupWarning
= BadImportW
| MissingImportList
| DodgyImport RdrName
-- NB. use the RdrName for reporting a "dodgy" import
data IELookupError
= QualImportError RdrName
| BadImport
| IllegalImport
failLookupWith :: IELookupError -> IELookupM a
failLookupWith err = Failed err
catchIELookup :: IELookupM a -> (IELookupError -> IELookupM a) -> IELookupM a
catchIELookup m h = case m of
Succeeded r -> return r
Failed err -> h err
catIELookupM :: [IELookupM a] -> [a]
catIELookupM ms = [ a | Succeeded a <- ms ]
{-
************************************************************************
* *
\subsection{Import/Export Utils}
* *
************************************************************************
-}
greExportAvail :: GlobalRdrElt -> AvailInfo
greExportAvail gre
= case gre_par gre of
ParentIs p -> AvailTC p [me]
NoParent | isTyConName me -> AvailTC me [me]
| otherwise -> Avail me
where
me = gre_name gre
plusAvail :: AvailInfo -> AvailInfo -> AvailInfo
plusAvail a1 a2
| debugIsOn && availName a1 /= availName a2
= pprPanic "RnEnv.plusAvail names differ" (hsep [ppr a1,ppr a2])
plusAvail a1@(Avail {}) (Avail {}) = a1
plusAvail (AvailTC _ []) a2@(AvailTC {}) = a2
plusAvail a1@(AvailTC {}) (AvailTC _ []) = a1
plusAvail (AvailTC n1 (s1:ss1)) (AvailTC n2 (s2:ss2))
= case (n1==s1, n2==s2) of -- Maintain invariant the parent is first
(True,True) -> AvailTC n1 (s1 : (ss1 `unionLists` ss2))
(True,False) -> AvailTC n1 (s1 : (ss1 `unionLists` (s2:ss2)))
(False,True) -> AvailTC n1 (s2 : ((s1:ss1) `unionLists` ss2))
(False,False) -> AvailTC n1 ((s1:ss1) `unionLists` (s2:ss2))
plusAvail a1 a2 = pprPanic "RnEnv.plusAvail" (hsep [ppr a1,ppr a2])
trimAvail :: AvailInfo -> Name -> AvailInfo
trimAvail (Avail n) _ = Avail n
trimAvail (AvailTC n ns) m = ASSERT( m `elem` ns) AvailTC n [m]
-- | filters 'AvailInfo's by the given predicate
filterAvails :: (Name -> Bool) -> [AvailInfo] -> [AvailInfo]
filterAvails keep avails = foldr (filterAvail keep) [] avails
-- | filters an 'AvailInfo' by the given predicate
filterAvail :: (Name -> Bool) -> AvailInfo -> [AvailInfo] -> [AvailInfo]
filterAvail keep ie rest =
case ie of
Avail n | keep n -> ie : rest
| otherwise -> rest
AvailTC tc ns ->
let left = filter keep ns in
if null left then rest else AvailTC tc left : rest
-- | Given an import\/export spec, construct the appropriate 'GlobalRdrElt's.
gresFromIE :: ImpDeclSpec -> (LIE Name, AvailInfo) -> [GlobalRdrElt]
gresFromIE decl_spec (L loc ie, avail)
= gresFromAvail prov_fn avail
where
is_explicit = case ie of
IEThingAll (L _ name) -> \n -> n == name
_ -> \_ -> True
prov_fn name = Imported [imp_spec]
where
imp_spec = ImpSpec { is_decl = decl_spec, is_item = item_spec }
item_spec = ImpSome { is_explicit = is_explicit name, is_iloc = loc }
mkChildEnv :: [GlobalRdrElt] -> NameEnv [Name]
mkChildEnv gres = foldr add emptyNameEnv gres
where
add (GRE { gre_name = n, gre_par = ParentIs p }) env = extendNameEnv_Acc (:) singleton env p n
add _ env = env
findChildren :: NameEnv [Name] -> Name -> [Name]
findChildren env n = lookupNameEnv env n `orElse` []
lookupChildren :: [Name] -> [Located RdrName] -> [Maybe (Located Name)]
-- (lookupChildren all_kids rdr_items) maps each rdr_item to its
-- corresponding Name all_kids, if the former exists
-- The matching is done by FastString, not OccName, so that
-- Cls( meth, AssocTy )
-- will correctly find AssocTy among the all_kids of Cls, even though
-- the RdrName for AssocTy may have a (bogus) DataName namespace
-- (Really the rdr_items should be FastStrings in the first place.)
lookupChildren all_kids rdr_items
-- = map (lookupFsEnv kid_env . occNameFS . rdrNameOcc) rdr_items
= map doOne rdr_items
where
doOne (L l r) = case (lookupFsEnv kid_env . occNameFS . rdrNameOcc) r of
Just n -> Just (L l n)
Nothing -> Nothing
kid_env = mkFsEnv [(occNameFS (nameOccName n), n) | n <- all_kids]
-- | Combines 'AvailInfo's from the same family
-- 'avails' may have several items with the same availName
-- E.g import Ix( Ix(..), index )
-- will give Ix(Ix,index,range) and Ix(index)
-- We want to combine these; addAvail does that
nubAvails :: [AvailInfo] -> [AvailInfo]
nubAvails avails = nameEnvElts (foldl add emptyNameEnv avails)
where
add env avail = extendNameEnv_C plusAvail env (availName avail) avail
{-
************************************************************************
* *
\subsection{Export list processing}
* *
************************************************************************
Processing the export list.
You might think that we should record things that appear in the export
list as ``occurrences'' (using @addOccurrenceName@), but you'd be
wrong. We do check (here) that they are in scope, but there is no
need to slurp in their actual declaration (which is what
@addOccurrenceName@ forces).
Indeed, doing so would big trouble when compiling @PrelBase@, because
it re-exports @GHC@, which includes @takeMVar#@, whose type includes
@ConcBase.StateAndSynchVar#@, and so on...
Note [Exports of data families]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose you see (Trac #5306)
module M where
import X( F )
data instance F Int = FInt
What does M export? AvailTC F [FInt]
or AvailTC F [F,FInt]?
The former is strictly right because F isn't defined in this module.
But then you can never do an explicit import of M, thus
import M( F( FInt ) )
because F isn't exported by M. Nor can you import FInt alone from here
import M( FInt )
because we don't have syntax to support that. (It looks like an import of
the type FInt.)
At one point I implemented a compromise:
* When constructing exports with no export list, or with module M(
module M ), we add the parent to the exports as well.
* But not when you see module M( f ), even if f is a
class method with a parent.
* Nor when you see module M( module N ), with N /= M.
But the compromise seemed too much of a hack, so we backed it out.
You just have to use an explicit export list:
module M( F(..) ) where ...
-}
type ExportAccum -- The type of the accumulating parameter of
-- the main worker function in rnExports
= ([LIE Name], -- Export items with Names
ExportOccMap, -- Tracks exported occurrence names
[AvailInfo]) -- The accumulated exported stuff
-- Not nub'd!
emptyExportAccum :: ExportAccum
emptyExportAccum = ([], emptyOccEnv, [])
type ExportOccMap = OccEnv (Name, IE RdrName)
-- Tracks what a particular exported OccName
-- in an export list refers to, and which item
-- it came from. It's illegal to export two distinct things
-- that have the same occurrence name
rnExports :: Bool -- False => no 'module M(..) where' header at all
-> Maybe (Located [LIE RdrName]) -- Nothing => no explicit export list
-> TcGblEnv
-> RnM TcGblEnv
-- Complains if two distinct exports have same OccName
-- Warns about identical exports.
-- Complains about exports items not in scope
rnExports explicit_mod exports
tcg_env@(TcGblEnv { tcg_mod = this_mod,
tcg_rdr_env = rdr_env,
tcg_imports = imports })
= unsetWOptM Opt_WarnWarningsDeprecations $
-- Do not report deprecations arising from the export
-- list, to avoid bleating about re-exporting a deprecated
-- thing (especially via 'module Foo' export item)
do {
-- If the module header is omitted altogether, then behave
-- as if the user had written "module Main(main) where..."
-- EXCEPT in interactive mode, when we behave as if he had
-- written "module Main where ..."
-- Reason: don't want to complain about 'main' not in scope
-- in interactive mode
; dflags <- getDynFlags
; let real_exports
| explicit_mod = exports
| ghcLink dflags == LinkInMemory = Nothing
| otherwise
= Just (noLoc [noLoc (IEVar (noLoc main_RDR_Unqual))])
-- ToDo: the 'noLoc' here is unhelpful if 'main'
-- turns out to be out of scope
; (rn_exports, avails) <- exports_from_avail real_exports rdr_env imports this_mod
; let final_avails = nubAvails avails -- Combine families
; traceRn (text "rnExports: Exports:" <+> ppr final_avails)
; return (tcg_env { tcg_exports = final_avails,
tcg_rn_exports = case tcg_rn_exports tcg_env of
Nothing -> Nothing
Just _ -> rn_exports,
tcg_dus = tcg_dus tcg_env `plusDU`
usesOnly (availsToNameSet final_avails) }) }
exports_from_avail :: Maybe (Located [LIE RdrName])
-- Nothing => no explicit export list
-> GlobalRdrEnv
-> ImportAvails
-> Module
-> RnM (Maybe [LIE Name], [AvailInfo])
exports_from_avail Nothing rdr_env _imports _this_mod
= -- The same as (module M) where M is the current module name,
-- so that's how we handle it.
let
avails = [ greExportAvail gre
| gre <- globalRdrEnvElts rdr_env
, isLocalGRE gre ]
in
return (Nothing, avails)
exports_from_avail (Just (L _ rdr_items)) rdr_env imports this_mod
= do (ie_names, _, exports) <- foldlM do_litem emptyExportAccum rdr_items
return (Just ie_names, exports)
where
do_litem :: ExportAccum -> LIE RdrName -> RnM ExportAccum
do_litem acc lie = setSrcSpan (getLoc lie) (exports_from_item acc lie)
kids_env :: NameEnv [Name] -- Maps a parent to its in-scope children
kids_env = mkChildEnv (globalRdrEnvElts rdr_env)
imported_modules = [ qual_name
| xs <- moduleEnvElts $ imp_mods imports,
(qual_name, _, _, _) <- xs ]
exports_from_item :: ExportAccum -> LIE RdrName -> RnM ExportAccum
exports_from_item acc@(ie_names, occs, exports)
(L loc (IEModuleContents (L lm mod)))
| let earlier_mods = [ mod
| (L _ (IEModuleContents (L _ mod))) <- ie_names ]
, mod `elem` earlier_mods -- Duplicate export of M
= do { warn_dup_exports <- woptM Opt_WarnDuplicateExports ;
warnIf warn_dup_exports (dupModuleExport mod) ;
return acc }
| otherwise
= do { implicit_prelude <- xoptM Opt_ImplicitPrelude
; warnDodgyExports <- woptM Opt_WarnDodgyExports
; let { exportValid = (mod `elem` imported_modules)
|| (moduleName this_mod == mod)
; gres = filter (isModuleExported implicit_prelude mod)
(globalRdrEnvElts rdr_env)
; new_exports = map greExportAvail gres
; names = map gre_name gres }
; checkErr exportValid (moduleNotImported mod)
; warnIf (warnDodgyExports && exportValid && null names)
(nullModuleExport mod)
; addUsedRdrNames (concat [ [mkRdrQual mod occ, mkRdrUnqual occ]
| occ <- map nameOccName names ])
-- The qualified and unqualified version of all of
-- these names are, in effect, used by this export
; occs' <- check_occs (IEModuleContents (noLoc mod)) occs names
-- This check_occs not only finds conflicts
-- between this item and others, but also
-- internally within this item. That is, if
-- 'M.x' is in scope in several ways, we'll have
-- several members of mod_avails with the same
-- OccName.
; traceRn (vcat [ text "export mod" <+> ppr mod
, ppr new_exports ])
; return (L loc (IEModuleContents (L lm mod)) : ie_names,
occs', new_exports ++ exports) }
exports_from_item acc@(lie_names, occs, exports) (L loc ie)
| isDoc ie
= do new_ie <- lookup_doc_ie ie
return (L loc new_ie : lie_names, occs, exports)
| otherwise
= do (new_ie, avail) <- lookup_ie ie
if isUnboundName (ieName new_ie)
then return acc -- Avoid error cascade
else do
occs' <- check_occs ie occs (availNames avail)
return (L loc new_ie : lie_names, occs', avail : exports)
-------------
lookup_ie :: IE RdrName -> RnM (IE Name, AvailInfo)
lookup_ie (IEVar (L l rdr))
= do gre <- lookupGreRn rdr
return (IEVar (L l (gre_name gre)), greExportAvail gre)
lookup_ie (IEThingAbs (L l rdr))
= do gre <- lookupGreRn rdr
let name = gre_name gre
avail = greExportAvail gre
return (IEThingAbs (L l name), avail)
lookup_ie ie@(IEThingAll (L l rdr))
= do name <- lookupGlobalOccRn rdr
let kids = findChildren kids_env name
addUsedKids rdr kids
warnDodgyExports <- woptM Opt_WarnDodgyExports
when (null kids) $
if isTyConName name
then when warnDodgyExports $ addWarn (dodgyExportWarn name)
else -- This occurs when you export T(..), but
-- only import T abstractly, or T is a synonym.
addErr (exportItemErr ie)
return (IEThingAll (L l name), AvailTC name (name:kids))
lookup_ie ie@(IEThingWith (L l rdr) sub_rdrs)
= do name <- lookupGlobalOccRn rdr
if isUnboundName name
then return (IEThingWith (L l name) [], AvailTC name [name])
else do
let mb_names = lookupChildren (findChildren kids_env name) sub_rdrs
if any isNothing mb_names
then do addErr (exportItemErr ie)
return (IEThingWith (L l name) [], AvailTC name [name])
else do let names = catMaybes mb_names
addUsedKids rdr (map unLoc names)
return (IEThingWith (L l name) names
, AvailTC name (name:map unLoc names))
lookup_ie _ = panic "lookup_ie" -- Other cases covered earlier
-------------
lookup_doc_ie :: IE RdrName -> RnM (IE Name)
lookup_doc_ie (IEGroup lev doc) = do rn_doc <- rnHsDoc doc
return (IEGroup lev rn_doc)
lookup_doc_ie (IEDoc doc) = do rn_doc <- rnHsDoc doc
return (IEDoc rn_doc)
lookup_doc_ie (IEDocNamed str) = return (IEDocNamed str)
lookup_doc_ie _ = panic "lookup_doc_ie" -- Other cases covered earlier
-- In an export item M.T(A,B,C), we want to treat the uses of
-- A,B,C as if they were M.A, M.B, M.C
addUsedKids parent_rdr kid_names
= addUsedRdrNames $ map (mk_kid_rdr . nameOccName) kid_names
where
mk_kid_rdr = case isQual_maybe parent_rdr of
Nothing -> mkRdrUnqual
Just (modName, _) -> mkRdrQual modName
isDoc :: IE RdrName -> Bool
isDoc (IEDoc _) = True
isDoc (IEDocNamed _) = True
isDoc (IEGroup _ _) = True
isDoc _ = False
-------------------------------
isModuleExported :: Bool -> ModuleName -> GlobalRdrElt -> Bool
-- True if the thing is in scope *both* unqualified, *and* with qualifier M
isModuleExported implicit_prelude mod (GRE { gre_name = name, gre_prov = prov })
| implicit_prelude && isBuiltInSyntax name = False
-- Optimisation: filter out names for built-in syntax
-- They just clutter up the environment (esp tuples), and the parser
-- will generate Exact RdrNames for them, so the cluttered
-- envt is no use. To avoid doing this filter all the time,
-- we use -XNoImplicitPrelude as a clue that the filter is
-- worth while. Really, it's only useful for GHC.Base and GHC.Tuple.
--
-- It's worth doing because it makes the environment smaller for
-- every module that imports the Prelude
| otherwise
= case prov of
LocalDef | Just name_mod <- nameModule_maybe name
-> moduleName name_mod == mod
| otherwise -> False
Imported is -> any unQualSpecOK is && any (qualSpecOK mod) is
-------------------------------
check_occs :: IE RdrName -> ExportOccMap -> [Name] -> RnM ExportOccMap
check_occs ie occs names -- 'names' are the entities specifed by 'ie'
= foldlM check occs names
where
check occs name
= case lookupOccEnv occs name_occ of
Nothing -> return (extendOccEnv occs name_occ (name, ie))
Just (name', ie')
| name == name' -- Duplicate export
-- But we don't want to warn if the same thing is exported
-- by two different module exports. See ticket #4478.
-> do unless (dupExport_ok name ie ie') $ do
warn_dup_exports <- woptM Opt_WarnDuplicateExports
warnIf warn_dup_exports (dupExportWarn name_occ ie ie')
return occs
| otherwise -- Same occ name but different names: an error
-> do { global_env <- getGlobalRdrEnv ;
addErr (exportClashErr global_env name' name ie' ie) ;
return occs }
where
name_occ = nameOccName name
dupExport_ok :: Name -> IE RdrName -> IE RdrName -> Bool
-- The Name is exported by both IEs. Is that ok?
-- "No" iff the name is mentioned explicitly in both IEs
-- or one of the IEs mentions the name *alone*
-- "Yes" otherwise
--
-- Examples of "no": module M( f, f )
-- module M( fmap, Functor(..) )
-- module M( module Data.List, head )
--
-- Example of "yes"
-- module M( module A, module B ) where
-- import A( f )
-- import B( f )
--
-- Example of "yes" (Trac #2436)
-- module M( C(..), T(..) ) where
-- class C a where { data T a }
-- instace C Int where { data T Int = TInt }
--
-- Example of "yes" (Trac #2436)
-- module Foo ( T ) where
-- data family T a
-- module Bar ( T(..), module Foo ) where
-- import Foo
-- data instance T Int = TInt
dupExport_ok n ie1 ie2
= not ( single ie1 || single ie2
|| (explicit_in ie1 && explicit_in ie2) )
where
explicit_in (IEModuleContents _) = False -- module M
explicit_in (IEThingAll r) = nameOccName n == rdrNameOcc (unLoc r) -- T(..)
explicit_in _ = True
single (IEVar {}) = True
single (IEThingAbs {}) = True
single _ = False
{-
*********************************************************
* *
\subsection{Unused names}
* *
*********************************************************
-}
reportUnusedNames :: Maybe (Located [LIE RdrName]) -- Export list
-> TcGblEnv -> RnM ()
reportUnusedNames _export_decls gbl_env
= do { traceRn ((text "RUN") <+> (ppr (tcg_dus gbl_env)))
; warnUnusedImportDecls gbl_env
; warnUnusedTopBinds unused_locals }
where
used_names :: NameSet
used_names = findUses (tcg_dus gbl_env) emptyNameSet
-- NB: currently, if f x = g, we only treat 'g' as used if 'f' is used
-- Hence findUses
-- Collect the defined names from the in-scope environment
defined_names :: [GlobalRdrElt]
defined_names = globalRdrEnvElts (tcg_rdr_env gbl_env)
-- Note that defined_and_used, defined_but_not_used
-- are both [GRE]; that's why we need defined_and_used
-- rather than just used_names
_defined_and_used, defined_but_not_used :: [GlobalRdrElt]
(_defined_and_used, defined_but_not_used)
= partition (gre_is_used used_names) defined_names
kids_env = mkChildEnv defined_names
-- This is done in mkExports too; duplicated work
gre_is_used :: NameSet -> GlobalRdrElt -> Bool
gre_is_used used_names (GRE {gre_name = name})
= name `elemNameSet` used_names
|| any (`elemNameSet` used_names) (findChildren kids_env name)
-- A use of C implies a use of T,
-- if C was brought into scope by T(..) or T(C)
-- Filter out the ones that are
-- (a) defined in this module, and
-- (b) not defined by a 'deriving' clause
-- The latter have an Internal Name, so we can filter them out easily
unused_locals :: [GlobalRdrElt]
unused_locals = filter is_unused_local defined_but_not_used
is_unused_local :: GlobalRdrElt -> Bool
is_unused_local gre = isLocalGRE gre && isExternalName (gre_name gre)
{-
*********************************************************
* *
\subsection{Unused imports}
* *
*********************************************************
This code finds which import declarations are unused. The
specification and implementation notes are here:
http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/UnusedImports
-}
type ImportDeclUsage
= ( LImportDecl Name -- The import declaration
, [AvailInfo] -- What *is* used (normalised)
, [Name] ) -- What is imported but *not* used
warnUnusedImportDecls :: TcGblEnv -> RnM ()
warnUnusedImportDecls gbl_env
= do { uses <- readMutVar (tcg_used_rdrnames gbl_env)
; let user_imports = filterOut (ideclImplicit . unLoc) (tcg_rn_imports gbl_env)
-- This whole function deals only with *user* imports
-- both for warning about unnecessary ones, and for
-- deciding the minimal ones
rdr_env = tcg_rdr_env gbl_env
; let usage :: [ImportDeclUsage]
usage = findImportUsage user_imports rdr_env (Set.elems uses)
; traceRn (vcat [ ptext (sLit "Uses:") <+> ppr (Set.elems uses)
, ptext (sLit "Import usage") <+> ppr usage])
; whenWOptM Opt_WarnUnusedImports $
mapM_ warnUnusedImport usage
; whenGOptM Opt_D_dump_minimal_imports $
printMinimalImports usage }
{-
Note [The ImportMap]
~~~~~~~~~~~~~~~~~~~~
The ImportMap is a short-lived intermediate data struture records, for
each import declaration, what stuff brought into scope by that
declaration is actually used in the module.
The SrcLoc is the location of the END of a particular 'import'
declaration. Why *END*? Because we don't want to get confused
by the implicit Prelude import. Consider (Trac #7476) the module
import Foo( foo )
main = print foo
There is an implicit 'import Prelude(print)', and it gets a SrcSpan
of line 1:1 (just the point, not a span). If we use the *START* of
the SrcSpan to identify the import decl, we'll confuse the implicit
import Prelude with the explicit 'import Foo'. So we use the END.
It's just a cheap hack; we could equally well use the Span too.
The AvailInfos are the things imported from that decl (just a list,
not normalised).
-}
type ImportMap = Map SrcLoc [AvailInfo] -- See [The ImportMap]
findImportUsage :: [LImportDecl Name]
-> GlobalRdrEnv
-> [RdrName]
-> [ImportDeclUsage]
findImportUsage imports rdr_env rdrs
= map unused_decl imports
where
import_usage :: ImportMap
import_usage = foldr (extendImportMap rdr_env) Map.empty rdrs
unused_decl decl@(L loc (ImportDecl { ideclHiding = imps }))
= (decl, nubAvails used_avails, nameSetElems unused_imps)
where
used_avails = Map.lookup (srcSpanEnd loc) import_usage `orElse` []
-- srcSpanEnd: see Note [The ImportMap]
used_names = availsToNameSet used_avails
used_parents = mkNameSet [n | AvailTC n _ <- used_avails]
unused_imps -- Not trivial; see eg Trac #7454
= case imps of
Just (False, L _ imp_ies) ->
foldr (add_unused . unLoc) emptyNameSet imp_ies
_other -> emptyNameSet -- No explicit import list => no unused-name list
add_unused :: IE Name -> NameSet -> NameSet
add_unused (IEVar (L _ n)) acc = add_unused_name n acc
add_unused (IEThingAbs (L _ n)) acc = add_unused_name n acc
add_unused (IEThingAll (L _ n)) acc = add_unused_all n acc
add_unused (IEThingWith (L _ p) ns) acc
= add_unused_with p (map unLoc ns) acc
add_unused _ acc = acc
add_unused_name n acc
| n `elemNameSet` used_names = acc
| otherwise = acc `extendNameSet` n
add_unused_all n acc
| n `elemNameSet` used_names = acc
| n `elemNameSet` used_parents = acc
| otherwise = acc `extendNameSet` n
add_unused_with p ns acc
| all (`elemNameSet` acc1) ns = add_unused_name p acc1
| otherwise = acc1
where
acc1 = foldr add_unused_name acc ns
-- If you use 'signum' from Num, then the user may well have
-- imported Num(signum). We don't want to complain that
-- Num is not itself mentioned. Hence the two cases in add_unused_with.
extendImportMap :: GlobalRdrEnv -> RdrName -> ImportMap -> ImportMap
-- For a used RdrName, find all the import decls that brought
-- it into scope; choose one of them (bestImport), and record
-- the RdrName in that import decl's entry in the ImportMap
extendImportMap rdr_env rdr imp_map
| [gre] <- lookupGRE_RdrName rdr rdr_env
, Imported imps <- gre_prov gre
= add_imp gre (bestImport imps) imp_map
| otherwise
= imp_map
where
add_imp :: GlobalRdrElt -> ImportSpec -> ImportMap -> ImportMap
add_imp gre (ImpSpec { is_decl = imp_decl_spec }) imp_map
= Map.insertWith add decl_loc [avail] imp_map
where
add _ avails = avail : avails -- add is really just a specialised (++)
decl_loc = srcSpanEnd (is_dloc imp_decl_spec)
-- For srcSpanEnd see Note [The ImportMap]
avail = greExportAvail gre
bestImport :: [ImportSpec] -> ImportSpec
bestImport iss
= case partition isImpAll iss of
([], imp_somes) -> textuallyFirst imp_somes
(imp_alls, _) -> textuallyFirst imp_alls
textuallyFirst :: [ImportSpec] -> ImportSpec
textuallyFirst iss = case sortWith (is_dloc . is_decl) iss of
[] -> pprPanic "textuallyFirst" (ppr iss)
(is:_) -> is
isImpAll :: ImportSpec -> Bool
isImpAll (ImpSpec { is_item = ImpAll }) = True
isImpAll _other = False
warnUnusedImport :: ImportDeclUsage -> RnM ()
warnUnusedImport (L loc decl, used, unused)
| Just (False,L _ []) <- ideclHiding decl
= return () -- Do not warn for 'import M()'
| Just (True, L _ hides) <- ideclHiding decl
, not (null hides)
, pRELUDE_NAME == unLoc (ideclName decl)
= return () -- Note [Do not warn about Prelude hiding]
| null used = addWarnAt loc msg1 -- Nothing used; drop entire decl
| null unused = return () -- Everything imported is used; nop
| otherwise = addWarnAt loc msg2 -- Some imports are unused
where
msg1 = vcat [pp_herald <+> quotes pp_mod <+> pp_not_used,
nest 2 (ptext (sLit "except perhaps to import instances from")
<+> quotes pp_mod),
ptext (sLit "To import instances alone, use:")
<+> ptext (sLit "import") <+> pp_mod <> parens Outputable.empty ]
msg2 = sep [pp_herald <+> quotes (pprWithCommas ppr unused),
text "from module" <+> quotes pp_mod <+> pp_not_used]
pp_herald = text "The" <+> pp_qual <+> text "import of"
pp_qual
| ideclQualified decl = text "qualified"
| otherwise = Outputable.empty
pp_mod = ppr (unLoc (ideclName decl))
pp_not_used = text "is redundant"
{-
Note [Do not warn about Prelude hiding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not warn about
import Prelude hiding( x, y )
because even if nothing else from Prelude is used, it may be essential to hide
x,y to avoid name-shadowing warnings. Example (Trac #9061)
import Prelude hiding( log )
f x = log where log = ()
Note [Printing minimal imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To print the minimal imports we walk over the user-supplied import
decls, and simply trim their import lists. NB that
* We do *not* change the 'qualified' or 'as' parts!
* We do not disard a decl altogether; we might need instances
from it. Instead we just trim to an empty import list
-}
printMinimalImports :: [ImportDeclUsage] -> RnM ()
-- See Note [Printing minimal imports]
printMinimalImports imports_w_usage
= do { imports' <- mapM mk_minimal imports_w_usage
; this_mod <- getModule
; dflags <- getDynFlags
; liftIO $
do { h <- openFile (mkFilename dflags this_mod) WriteMode
; printForUser dflags h neverQualify (vcat (map ppr imports')) }
-- The neverQualify is important. We are printing Names
-- but they are in the context of an 'import' decl, and
-- we never qualify things inside there
-- E.g. import Blag( f, b )
-- not import Blag( Blag.f, Blag.g )!
}
where
mkFilename dflags this_mod
| Just d <- dumpDir dflags = d </> basefn
| otherwise = basefn
where
basefn = moduleNameString (moduleName this_mod) ++ ".imports"
mk_minimal (L l decl, used, unused)
| null unused
, Just (False, _) <- ideclHiding decl
= return (L l decl)
| otherwise
= do { let ImportDecl { ideclName = L _ mod_name
, ideclSource = is_boot
, ideclPkgQual = mb_pkg } = decl
; ifaces <- loadSrcInterface doc mod_name is_boot mb_pkg
; let lies = map (L l) (concatMap (to_ie ifaces) used)
; return (L l (decl { ideclHiding = Just (False, L l lies) })) }
where
doc = text "Compute minimal imports for" <+> ppr decl
to_ie :: [ModIface] -> AvailInfo -> [IE Name]
-- The main trick here is that if we're importing all the constructors
-- we want to say "T(..)", but if we're importing only a subset we want
-- to say "T(A,B,C)". So we have to find out what the module exports.
to_ie _ (Avail n)
= [IEVar (noLoc n)]
to_ie _ (AvailTC n [m])
| n==m = [IEThingAbs (noLoc n)]
to_ie ifaces (AvailTC n ns)
= case [xs | iface <- ifaces
, AvailTC x xs <- mi_exports iface
, x == n
, x `elem` xs -- Note [Partial export]
] of
[xs] | all_used xs -> [IEThingAll (noLoc n)]
| otherwise -> [IEThingWith (noLoc n)
(map noLoc (filter (/= n) ns))]
_other -> map (IEVar . noLoc) ns
where
all_used avail_occs = all (`elem` ns) avail_occs
{-
Note [Partial export]
~~~~~~~~~~~~~~~~~~~~~
Suppose we have
module A( op ) where
class C a where
op :: a -> a
module B where
import A
f = ..op...
Then the minimal import for module B is
import A( op )
not
import A( C( op ) )
which we would usually generate if C was exported from B. Hence
the (x `elem` xs) test when deciding what to generate.
************************************************************************
* *
\subsection{Errors}
* *
************************************************************************
-}
qualImportItemErr :: RdrName -> SDoc
qualImportItemErr rdr
= hang (ptext (sLit "Illegal qualified name in import item:"))
2 (ppr rdr)
badImportItemErrStd :: IsBootInterface -> ImpDeclSpec -> IE RdrName -> SDoc
badImportItemErrStd is_boot decl_spec ie
= sep [ptext (sLit "Module"), quotes (ppr (is_mod decl_spec)), source_import,
ptext (sLit "does not export"), quotes (ppr ie)]
where
source_import | is_boot = ptext (sLit "(hi-boot interface)")
| otherwise = Outputable.empty
badImportItemErrDataCon :: OccName
-> IsBootInterface
-> ImpDeclSpec
-> IE RdrName
-> SDoc
badImportItemErrDataCon dataType is_boot decl_spec ie
= vcat [ ptext (sLit "In module")
<+> quotes (ppr (is_mod decl_spec))
<+> source_import <> colon
, nest 2 $ quotes datacon
<+> ptext (sLit "is a data constructor of")
<+> quotes (ppr dataType)
, ptext (sLit "To import it use")
, nest 2 $ quotes (ptext (sLit "import"))
<+> ppr (is_mod decl_spec)
<> parens_sp (ppr dataType <> parens_sp datacon)
, ptext (sLit "or")
, nest 2 $ quotes (ptext (sLit "import"))
<+> ppr (is_mod decl_spec)
<> parens_sp (ppr dataType <> ptext (sLit "(..)"))
]
where
datacon_occ = rdrNameOcc $ ieName ie
datacon = parenSymOcc datacon_occ (ppr datacon_occ)
source_import | is_boot = ptext (sLit "(hi-boot interface)")
| otherwise = Outputable.empty
parens_sp d = parens (space <> d <> space) -- T( f,g )
badImportItemErr :: IsBootInterface
-> ImpDeclSpec
-> IE RdrName
-> [AvailInfo]
-> SDoc
badImportItemErr is_boot decl_spec ie avails
= case find checkIfDataCon avails of
Just con -> badImportItemErrDataCon (availOccName con) is_boot decl_spec ie
Nothing -> badImportItemErrStd is_boot decl_spec ie
where
checkIfDataCon (AvailTC _ ns) =
case find (\n -> importedFS == nameOccNameFS n) ns of
Just n -> isDataConName n
Nothing -> False
checkIfDataCon _ = False
availOccName = nameOccName . availName
nameOccNameFS = occNameFS . nameOccName
importedFS = occNameFS . rdrNameOcc $ ieName ie
illegalImportItemErr :: SDoc
illegalImportItemErr = ptext (sLit "Illegal import item")
dodgyImportWarn :: RdrName -> SDoc
dodgyImportWarn item = dodgyMsg (ptext (sLit "import")) item
dodgyExportWarn :: Name -> SDoc
dodgyExportWarn item = dodgyMsg (ptext (sLit "export")) item
dodgyMsg :: (OutputableBndr n, HasOccName n) => SDoc -> n -> SDoc
dodgyMsg kind tc
= sep [ ptext (sLit "The") <+> kind <+> ptext (sLit "item")
<+> quotes (ppr (IEThingAll (noLoc tc)))
<+> ptext (sLit "suggests that"),
quotes (ppr tc) <+> ptext (sLit "has (in-scope) constructors or class methods,"),
ptext (sLit "but it has none") ]
exportItemErr :: IE RdrName -> SDoc
exportItemErr export_item
= sep [ ptext (sLit "The export item") <+> quotes (ppr export_item),
ptext (sLit "attempts to export constructors or class methods that are not visible here") ]
exportClashErr :: GlobalRdrEnv -> Name -> Name -> IE RdrName -> IE RdrName
-> MsgDoc
exportClashErr global_env name1 name2 ie1 ie2
= vcat [ ptext (sLit "Conflicting exports for") <+> quotes (ppr occ) <> colon
, ppr_export ie1' name1'
, ppr_export ie2' name2' ]
where
occ = nameOccName name1
ppr_export ie name = nest 3 (hang (quotes (ppr ie) <+> ptext (sLit "exports") <+>
quotes (ppr name))
2 (pprNameProvenance (get_gre name)))
-- get_gre finds a GRE for the Name, so that we can show its provenance
get_gre name
= case lookupGRE_Name global_env name of
(gre:_) -> gre
[] -> pprPanic "exportClashErr" (ppr name)
get_loc name = greSrcSpan (get_gre name)
(name1', ie1', name2', ie2') = if get_loc name1 < get_loc name2
then (name1, ie1, name2, ie2)
else (name2, ie2, name1, ie1)
-- the SrcSpan that pprNameProvenance prints out depends on whether
-- the Name is defined locally or not: for a local definition the
-- definition site is used, otherwise the location of the import
-- declaration. We want to sort the export locations in
-- exportClashErr by this SrcSpan, we need to extract it:
greSrcSpan :: GlobalRdrElt -> SrcSpan
greSrcSpan gre
| Imported (is:_) <- gre_prov gre = is_dloc (is_decl is)
| otherwise = name_span
where
name_span = nameSrcSpan (gre_name gre)
addDupDeclErr :: [Name] -> TcRn ()
addDupDeclErr []
= panic "addDupDeclErr: empty list"
addDupDeclErr names@(name : _)
= addErrAt (getSrcSpan (last sorted_names)) $
-- Report the error at the later location
vcat [ptext (sLit "Multiple declarations of") <+>
quotes (ppr (nameOccName name)),
-- NB. print the OccName, not the Name, because the
-- latter might not be in scope in the RdrEnv and so will
-- be printed qualified.
ptext (sLit "Declared at:") <+>
vcat (map (ppr . nameSrcLoc) sorted_names)]
where
sorted_names = sortWith nameSrcLoc names
dupExportWarn :: OccName -> IE RdrName -> IE RdrName -> SDoc
dupExportWarn occ_name ie1 ie2
= hsep [quotes (ppr occ_name),
ptext (sLit "is exported by"), quotes (ppr ie1),
ptext (sLit "and"), quotes (ppr ie2)]
dupModuleExport :: ModuleName -> SDoc
dupModuleExport mod
= hsep [ptext (sLit "Duplicate"),
quotes (ptext (sLit "Module") <+> ppr mod),
ptext (sLit "in export list")]
moduleNotImported :: ModuleName -> SDoc
moduleNotImported mod
= ptext (sLit "The export item `module") <+> ppr mod <>
ptext (sLit "' is not imported")
nullModuleExport :: ModuleName -> SDoc
nullModuleExport mod
= ptext (sLit "The export item `module") <+> ppr mod <> ptext (sLit "' exports nothing")
missingImportListWarn :: ModuleName -> SDoc
missingImportListWarn mod
= ptext (sLit "The module") <+> quotes (ppr mod) <+> ptext (sLit "does not have an explicit import list")
missingImportListItem :: IE RdrName -> SDoc
missingImportListItem ie
= ptext (sLit "The import item") <+> quotes (ppr ie) <+> ptext (sLit "does not have an explicit import list")
moduleWarn :: ModuleName -> WarningTxt -> SDoc
moduleWarn mod (WarningTxt _ txt)
= sep [ ptext (sLit "Module") <+> quotes (ppr mod) <> ptext (sLit ":"),
nest 2 (vcat (map ppr txt)) ]
moduleWarn mod (DeprecatedTxt _ txt)
= sep [ ptext (sLit "Module") <+> quotes (ppr mod)
<+> ptext (sLit "is deprecated:"),
nest 2 (vcat (map ppr txt)) ]
packageImportErr :: SDoc
packageImportErr
= ptext (sLit "Package-qualified imports are not enabled; use PackageImports")
-- This data decl will parse OK
-- data T = a Int
-- treating "a" as the constructor.
-- It is really hard to make the parser spot this malformation.
-- So the renamer has to check that the constructor is legal
--
-- We can get an operator as the constructor, even in the prefix form:
-- data T = :% Int Int
-- from interface files, which always print in prefix form
checkConName :: RdrName -> TcRn ()
checkConName name = checkErr (isRdrDataCon name) (badDataCon name)
badDataCon :: RdrName -> SDoc
badDataCon name
= hsep [ptext (sLit "Illegal data constructor name"), quotes (ppr name)]
| forked-upstream-packages-for-ghcjs/ghc | compiler/rename/RnNames.hs | bsd-3-clause | 77,667 | 3 | 26 | 24,043 | 14,142 | 7,280 | 6,862 | -1 | -1 |
{-# LANGUAGE ForeignFunctionInterface #-}
module Layout.Foreign where
import Foreign.C.Types
import GHC.Exts
foreign import ccall "wxStyledTextCtrl_ShowLines" wxStyledTextCtrl_ShowLines :: Ptr (Int) -> CInt -> CInt -> IO ()
foreign export ccall "wxStyledTextCtrl_ShowLines" wxStyledTextCtrl_ShowLines :: Ptr (Int) -> CInt -> CInt -> IO ()
foo = 0
| RefactoringTools/HaRe | test/testdata/Layout/Foreign.hs | bsd-3-clause | 355 | 0 | 10 | 52 | 93 | 51 | 42 | 7 | 1 |
module Test13 where
f = [(let x = 45 in [x])]
| mpickering/HaRe | old/testing/refacSlicing/Test13.hs | bsd-3-clause | 47 | 0 | 10 | 12 | 29 | 17 | 12 | 2 | 1 |
{-# LANGUAGE EmptyDataDecls, TypeFamilies, UndecidableInstances,
ScopedTypeVariables, TypeOperators,
FlexibleInstances, NoMonomorphismRestriction,
MultiParamTypeClasses, FlexibleContexts #-}
module IndTypesPerfMerge where
data a :* b = a :* b
infixr 6 :*
data TRUE
data FALSE
data Zero
data Succ a
type family Equals m n
type instance Equals Zero Zero = TRUE
type instance Equals (Succ a) Zero = FALSE
type instance Equals Zero (Succ a) = FALSE
type instance Equals (Succ a) (Succ b) = Equals a b
type family LessThan m n
type instance LessThan Zero Zero = FALSE
type instance LessThan (Succ n) Zero = FALSE
type instance LessThan Zero (Succ n) = TRUE
type instance LessThan (Succ m) (Succ n) = LessThan m n
newtype Tagged n a = Tagged a deriving (Show,Eq)
type family Cond p a b
type instance Cond TRUE a b = a
type instance Cond FALSE a b = b
class Merger a where
type Merged a
type UnmergedLeft a
type UnmergedRight a
mkMerge :: a -> UnmergedLeft a -> UnmergedRight a -> Merged a
class Mergeable a b where
type MergerType a b
merger :: a -> b -> MergerType a b
{-
merge ::
forall a b.
(Merger (MergerType a b), Mergeable a b,
UnmergedLeft (MergerType a b) ~ a,
UnmergedRight (MergerType a b) ~ b) =>
a -> b -> Merged (MergerType a b)
-}
merge x y = mkMerge (merger x y) x y
data TakeRight a
data TakeLeft a
data DiscardRightHead a b c d
data LeftHeadFirst a b c d
data RightHeadFirst a b c d
data EndMerge
instance Mergeable () () where
type MergerType () () = EndMerge
merger = undefined
instance Mergeable () (a :* b) where
type MergerType () (a :* b) = TakeRight (a :* b)
merger = undefined
instance Mergeable (a :* b) () where
type MergerType (a :* b) () = TakeLeft (a :* b)
merger = undefined
instance Mergeable (Tagged m a :* t1) (Tagged n b :* t2) where
type MergerType (Tagged m a :* t1) (Tagged n b :* t2) =
Cond (Equals m n) (DiscardRightHead (Tagged m a) t1 (Tagged n b) t2)
(Cond (LessThan m n) (LeftHeadFirst (Tagged m a) t1 (Tagged n b) t2)
(RightHeadFirst (Tagged m a ) t1 (Tagged n b) t2))
merger = undefined
instance Merger EndMerge where
type Merged EndMerge = ()
type UnmergedLeft EndMerge = ()
type UnmergedRight EndMerge = ()
mkMerge _ () () = ()
instance Merger (TakeRight a) where
type Merged (TakeRight a) = a
type UnmergedLeft (TakeRight a) = ()
type UnmergedRight (TakeRight a) = a
mkMerge _ () a = a
instance Merger (TakeLeft a) where
type Merged (TakeLeft a) = a
type UnmergedLeft (TakeLeft a) = a
type UnmergedRight (TakeLeft a) = ()
mkMerge _ a () = a
instance
(Mergeable t1 t2,
Merger (MergerType t1 t2),
t1 ~ UnmergedLeft (MergerType t1 t2),
t2 ~ UnmergedRight (MergerType t1 t2)) =>
Merger (DiscardRightHead h1 t1 h2 t2) where
type Merged (DiscardRightHead h1 t1 h2 t2) = h1 :* Merged (MergerType t1 t2)
type UnmergedLeft (DiscardRightHead h1 t1 h2 t2) = h1 :* t1
type UnmergedRight (DiscardRightHead h1 t1 h2 t2) = h2 :* t2
mkMerge _ (h1 :* t1) (h2 :* t2) = h1 :* mkMerge (merger t1 t2) t1 t2
instance
(Mergeable t1 (h2 :* t2),
Merger (MergerType t1 (h2 :* t2)),
t1 ~ UnmergedLeft (MergerType t1 (h2 :* t2)),
(h2 :* t2) ~ UnmergedRight (MergerType t1 (h2 :* t2))) =>
Merger (LeftHeadFirst h1 t1 h2 t2) where
type Merged (LeftHeadFirst h1 t1 h2 t2) = h1 :* Merged (MergerType t1 (h2 :* t2))
type UnmergedLeft (LeftHeadFirst h1 t1 h2 t2) = h1 :* t1
type UnmergedRight (LeftHeadFirst h1 t1 h2 t2) = h2 :* t2
mkMerge _ (h1 :* t1) (h2 :* t2) = h1 :* mkMerge (merger t1 (h2 :* t2)) t1 (h2 :* t2)
instance
(Mergeable (h1 :* t1) t2,
Merger (MergerType (h1 :* t1) t2),
(h1 :* t1) ~ UnmergedLeft (MergerType (h1 :* t1) t2),
t2 ~ UnmergedRight (MergerType (h1 :* t1) t2)) =>
Merger (RightHeadFirst h1 t1 h2 t2) where
type Merged (RightHeadFirst h1 t1 h2 t2) = h2 :* Merged (MergerType (h1 :* t1) t2)
type UnmergedLeft (RightHeadFirst h1 t1 h2 t2) = h1 :* t1
type UnmergedRight (RightHeadFirst h1 t1 h2 t2) = h2 :* t2
mkMerge _ (h1 :* t1) (h2 :* t2) = h2 :* mkMerge (merger (h1 :* t1) t2) (h1 :* t1) t2
| urbanslug/ghc | testsuite/tests/indexed-types/should_compile/IndTypesPerfMerge.hs | bsd-3-clause | 4,264 | 0 | 12 | 1,073 | 1,696 | 904 | 792 | -1 | -1 |
module Bug1 where
-- | We should have different anchors for constructors and types\/classes. This
-- hyperlink should point to the type constructor by default: 'T'.
data T = T
| DavidAlphaFox/ghc | utils/haddock/html-test/src/Bug1.hs | bsd-3-clause | 179 | 0 | 5 | 33 | 13 | 9 | 4 | 2 | 0 |
{-# LANGUAGE DataKinds #-}
-- This bug related to type trimming, and
-- hence showed up only with -O0
module Bug() where
data UnaryTypeC a = UnaryDataC a
type Bug = 'UnaryDataC
| ezyang/ghc | testsuite/tests/polykinds/T5912.hs | bsd-3-clause | 183 | 0 | 6 | 38 | 28 | 19 | 9 | 4 | 0 |
data Vec3 = Vec3 !Double !Double !Double
deriving (Show)
infixl 6 ^+^, ^-^
infixr 7 *^, <.>
negateV :: Vec3 -> Vec3
negateV (Vec3 x y z) = Vec3 (-x) (-y) (-z)
(^+^), (^-^) :: Vec3 -> Vec3 -> Vec3
Vec3 x1 y1 z1 ^+^ Vec3 x2 y2 z2 = Vec3 (x1 + x2) (y1 + y2) (z1 + z2)
v ^-^ v' = v ^+^ negateV v'
(*^) :: Double -> Vec3 -> Vec3
s *^ Vec3 x y z = Vec3 (s * x) (s * y) (s * z)
(<.>) :: Vec3 -> Vec3 -> Double
Vec3 x1 y1 z1 <.> Vec3 x2 y2 z2 = x1 * x2 + y1 * y2 + z1 * z2
magnitudeSq :: Vec3 -> Double
magnitudeSq v = v <.> v
normalized :: Vec3 -> Vec3
normalized v = (1 / sqrt (magnitudeSq v)) *^ v
class Surface s where
intersectSurfaceWithRay :: s -> Vec3 -> Vec3 -> Maybe Vec3
data Sphere = Sphere Vec3 Double
instance Surface Sphere where
intersectSurfaceWithRay (Sphere c r) o d =
let c' = c ^-^ o
b = c' <.> d
det = b^2 - magnitudeSq c' + r^2
det' = sqrt det
t1 = b - det'
t2 = b + det'
returnIntersection t =
let x = o ^+^ t *^ d
in Just (normalized (x ^-^ c))
in if det < 0 then Nothing
else if t1 > 1e-6 then returnIntersection t1
else if t2 > 1e-6 then returnIntersection t2
else Nothing
iappend :: Maybe Vec3 -> Maybe Vec3 -> Maybe Vec3
Nothing `iappend` i2 = i2
i1 `iappend` _ = i1
main :: IO ()
main = print $ foldl combine Nothing [Sphere (Vec3 0 0 0) 1]
where combine accum surf = accum `iappend`
intersectSurfaceWithRay surf (Vec3 0 0 5) (Vec3 0 0 (-1))
| olsner/ghc | testsuite/tests/numeric/should_run/T9407.hs | bsd-3-clause | 1,556 | 0 | 16 | 498 | 727 | 374 | 353 | 48 | 1 |
module FilenameDescr where
import Data.Char
import Data.Either
import Data.List
import BuildInfo
import Utils
import Tar
-- We can't just compare plain filenames, because versions numbers of GHC
-- and the libraries will vary. So we use FilenameDescr instead, which
-- abstracts out the version numbers.
type FilenameDescr = [FilenameDescrBit]
data FilenameDescrBit = VersionOf String
| HashOf String
| FP String
| Ways
deriving (Show, Eq, Ord)
normaliseDescr :: FilenameDescr -> FilenameDescr
normaliseDescr [] = []
normaliseDescr [x] = [x]
normaliseDescr (FP x1 : FP x2 : xs) = normaliseDescr (FP (x1 ++ x2) : xs)
normaliseDescr (x : xs) = x : normaliseDescr xs
-- Sanity check that the FilenameDescr matches the filename in the tar line
checkContent :: BuildInfo -> (FilenameDescr, TarLine) -> Errors
checkContent buildInfo (fd, tl)
= let fn = tlFileName tl
in case flattenFilenameDescr buildInfo fd of
Right fn' ->
if fn' == fn
then []
else if all isAscii fn
then ["checkContent: Can't happen: filename mismatch: "
++ show fn]
else [] -- Ugly kludge; don't worry too much if filepaths
-- containing non-ASCII chars have gone wrong
Left errs ->
errs
flattenFilenameDescr :: BuildInfo -> FilenameDescr
-> Either Errors FilePath
flattenFilenameDescr buildInfo fd = case partitionEithers (map f fd) of
([], strs) -> Right (concat strs)
(errs, _) -> Left (concat errs)
where f (FP fp) = Right fp
f (VersionOf thing)
= case lookup thing (biThingVersionMap buildInfo) of
Just v -> Right v
Nothing -> Left ["Can't happen: thing has no version in mapping"]
f (HashOf thing)
= case lookup thing (biThingHashMap buildInfo) of
Just v -> Right v
Nothing -> Left ["Can't happen: thing has no hash in mapping"]
f Ways = case biMaybeWays buildInfo of
Just ways -> Right $ intercalate "-" ways
Nothing -> Left ["Can't happen: No ways, but Ways is used"]
| urbanslug/ghc | distrib/compare/FilenameDescr.hs | bsd-3-clause | 2,287 | 0 | 14 | 749 | 554 | 286 | 268 | 48 | 8 |
{-# LANGUAGE ScopedTypeVariables #-}
import Control.Exception
import Control.Monad
import GHC.Conc
main = join $ atomically $ do
catchSTM
(throwSTM ThreadKilled `orElse` return (putStrLn "wtf"))
(\(e::SomeException) -> return (putStrLn "ok"))
| urbanslug/ghc | testsuite/tests/rts/T8035.hs | bsd-3-clause | 255 | 0 | 13 | 41 | 83 | 44 | 39 | 8 | 1 |
module Foundation where
import Prelude
import Yesod
import Yesod.Static
import Yesod.Auth
import Yesod.Auth.BrowserId
import Yesod.Auth.GoogleEmail
import Yesod.Default.Config
import Yesod.Default.Util (addStaticContentExternal)
import Network.HTTP.Conduit (Manager)
import qualified Settings
import Settings.Development (development)
import qualified Database.Persist
import Database.Persist.Sql (SqlPersistT)
import Settings.StaticFiles
import Settings (widgetFile, Extra (..))
import Model
import Text.Jasmine (minifym)
import Text.Hamlet (hamletFile)
import Yesod.Core.Types (Logger)
import Game.Lexicon
import Util.Message
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ settings :: AppConfig DefaultEnv Extra
, getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Database.Persist.PersistConfigPool Settings.PersistConf -- ^ Database connection pool.
, httpManager :: Manager
, persistConfig :: Settings.PersistConf
, appLogger :: Logger
, appLexicon :: Lexicon
}
-- Set up i18n messages. See the message folder.
mkMessage "App" "messages" "en"
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/routing-and-handlers
--
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
-- generates the rest of the code. Please see the linked documentation for an
-- explanation for this split.
mkYesodData "App" $(parseRoutesFile "config/routes")
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
approot = ApprootMaster $ appRoot . settings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = fmap Just $ defaultClientSessionBackend
(120 * 60) -- 120 minutes
"config/client_session_key.aes"
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
mEntity <- maybeAuth
let mUsername = fmap (userIdent . entityVal) mEntity
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
addScriptRemote "//code.jquery.com/jquery-1.10.2.min.js"
$(combineStylesheets 'StaticR
[ css_normalize_css
])
addStylesheetRemote "//netdna.bootstrapcdn.com/bootstrap/3.0.3/css/bootstrap.min.css"
addStylesheetRemote "//netdna.bootstrapcdn.com/bootstrap/3.0.3/css/bootstrap-theme.min.css"
addScriptRemote "//netdna.bootstrapcdn.com/bootstrap/3.0.3/js/bootstrap.min.js"
$(widgetFile "default-layout")
giveUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticRoot setting in Settings.hs
urlRenderOverride y (StaticR s) =
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent =
addStaticContentExternal minifym genFileName Settings.staticDir (StaticR . flip StaticRoute [])
where
-- Generate a unique filename based on the content itself
genFileName lbs
| development = "autogen-" ++ base64md5 lbs
| otherwise = base64md5 lbs
-- Place Javascript at bottom of the body tag so the rest of the page loads first
jsLoader _ = BottomOfBody
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog _ _source level =
development || level == LevelWarn || level == LevelError
makeLogger = return . appLogger
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlPersistT
runDB = defaultRunDB persistConfig connPool
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner connPool
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert $ User (credsIdent creds)
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId def, authGoogleEmail]
authHttpManager = httpManager
onLogin = setMessageSuccess "Successfully logged in."
onLogout = setMessageSuccess "Successfully logged out."
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- | Get the 'Extra' value, used to hold data from the settings.yml file.
getExtra :: Handler Extra
getExtra = fmap (appExtra . settings) getYesod
getLexicon :: Handler Lexicon
getLexicon = fmap appLexicon getYesod
-- Note: previous versions of the scaffolding included a deliver function to
-- send emails. Unfortunately, there are too many different options for us to
-- give a reasonable default. Instead, the information is available on the
-- wiki:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
| dphilipson/word_guesser_web | Foundation.hs | mit | 6,537 | 0 | 17 | 1,381 | 951 | 513 | 438 | -1 | -1 |
module Database.Posts (
createPost,
getPostsSince,
getPost,
postChildren,
) where
import BasePrelude
import Control.Concurrent.Chan
import Data.Text (Text)
import Data.Time.Clock (getCurrentTime, UTCTime)
import Database.Internal
toPost :: [SqlValue] -> ResolvedPost
toPost [idPost, idUser, content, idParent, at, count, _, name, email] =
( Post { postID = fromSql idPost
, postContent = fromSql content
, postAt = fromSql at
, postUserID = fromSql idUser
, postParentID = fromSql idParent
, postCount = fromSql count
}
, User { userID = fromSql idUser
, userName = fromSql name
, userEmail = fromSql email
}
)
postQuery :: Connection -> String -> [SqlValue] -> IO [ResolvedPost]
postQuery conn whereClause args = fmap toPost <$> quickQuery' conn query args
where query = unlines [ "select posts.*, count(children.id), users.* from posts"
, "left outer join posts as children"
, " on children.id_parent = posts.id"
, "inner join users on users.id = posts.id_user"
, whereClause
, "group by posts.id"
, "order by posts.id desc"
]
getPost :: Database -> ID -> IO (Maybe ResolvedPost)
getPost Database{dbConn} idPost = listToMaybe <$>
postQuery dbConn "where posts.id = ?" [toSql idPost]
postChildren :: Database -> ID -> IO [ResolvedPost]
postChildren Database{dbConn} idPost =
postQuery dbConn "where posts.id_parent = ?" [toSql idPost]
getPostsSince :: Database -> Maybe ID -> IO [ResolvedPost]
getPostsSince Database{dbConn} Nothing = postQuery dbConn "" []
getPostsSince Database{dbConn} (Just idPost) =
postQuery dbConn "where posts.id > ?" [toSql idPost]
insertPost :: Database -> User -> Text -> Maybe ID -> UTCTime -> IO (Maybe ResolvedPost)
insertPost db@(Database{dbConn, dbPostChan}) User{userID = idUser} content idParent at =
insertRow dbConn query args >>= maybe (return Nothing) report
where
report idPost = do
post <- fromJust <$> getPost db idPost
writeChan dbPostChan post
return (Just post)
query = unlines [ "insert into posts"
, "(id_user, content, id_parent, at)"
, "values (?, ?, ?, ?)"
]
args = [toSql idUser, toSql content, toSql idParent, toSql at]
createPost :: Database -> User -> Text -> Maybe ID -> IO (Maybe ResolvedPost)
createPost db user content parentID =
insertPost db user content parentID =<< getCurrentTime
| hlian/basilica | Database/Posts.hs | mit | 2,620 | 0 | 12 | 714 | 716 | 378 | 338 | -1 | -1 |
module Antiqua.Graphics.Window(
Window,
WindowSettings(..),
createWindow,
useWindow,
getKey,
getScroll,
resetScroll
) where
import qualified Graphics.UI.GLFW as GLFW
import Data.IORef
import System.IO.Unsafe
import Control.DeepSeq
import Antiqua.Common
import Graphics.Rendering.OpenGL.Raw
import Data.Bits ( (.|.) )
import System.Exit ( exitWith, ExitCode(..) )
initGL :: GLFW.Window -> IO ()
initGL win = do
glEnable gl_TEXTURE_2D
glShadeModel gl_SMOOTH
glClearColor 0 0 0 0
glClearDepth 1
glEnable gl_DEPTH_TEST
glDepthFunc gl_LEQUAL
glHint gl_PERSPECTIVE_CORRECTION_HINT gl_NICEST
(w,h) <- GLFW.getFramebufferSize win
glEnable gl_BLEND
glBlendFunc gl_SRC_ALPHA gl_ONE_MINUS_SRC_ALPHA
resizeScene win w h
resizeScene :: GLFW.WindowSizeCallback
resizeScene win w 0 = resizeScene win w 1
resizeScene _ w h = do
glViewport 0 0 (fromIntegral w) (fromIntegral h)
glMatrixMode gl_PROJECTION
glLoadIdentity
glOrtho 0 (fromIntegral w) (fromIntegral h) 0 0.1 100
glMatrixMode gl_MODELVIEW
glLoadIdentity
glFlush
drawScene :: GLuint -> IO () -> IO ()
drawScene tex render = do
glClear $ fromIntegral $ gl_COLOR_BUFFER_BIT
.|. gl_DEPTH_BUFFER_BIT
glLoadIdentity
glTranslatef 0 0 (-5)
glBindTexture gl_TEXTURE_2D tex
render
glFlush
shutdown :: GLFW.WindowCloseCallback
shutdown win = do
GLFW.destroyWindow win
GLFW.terminate
exitWith ExitSuccess
return ()
keyPressed :: GLFW.KeyCallback
keyPressed win GLFW.Key'Escape _ GLFW.KeyState'Pressed _ = shutdown win
keyPressed _ _ _ _ _ = return ()
{-# NOINLINE scrollRef #-}
scrollRef :: IORef Int
scrollRef = unsafePerformIO $ newIORef 0
resetScroll :: IO ()
resetScroll = do
atomicModifyIORef scrollRef (\_ -> (0,()))
getScroll :: IO Int
getScroll = do
state <- readIORef scrollRef
return state
scroll :: GLFW.ScrollCallback
scroll _ _ dy = do
let y = (floor . signum) dy
a <- atomicModifyIORef scrollRef (\_ -> (y,()))
a `deepseq` return ()
windowClosed :: GLFW.WindowCloseCallback
windowClosed win = shutdown win
class WindowSettings where
width :: Int
height :: Int
title :: String
createWindow :: WindowSettings => IO Window
createWindow = do
True <- GLFW.init
GLFW.defaultWindowHints
Just win <- GLFW.createWindow width height title Nothing Nothing
GLFW.makeContextCurrent (Just win)
GLFW.setScrollCallback win (Just scroll)
initGL win
GLFW.setWindowCloseCallback win (Just windowClosed)
GLFW.setFramebufferSizeCallback win (Just resizeScene)
GLFW.setKeyCallback win (Just keyPressed)
return $ Window win
data Window = Window GLFW.Window
getKey :: Window -> GLFW.Key -> IO GLFW.KeyState
getKey (Window win) key = GLFW.getKey win key
useWindow :: Window -> Texture -> IO () -> IO ()
useWindow (Window win) text action = do
GLFW.pollEvents
drawScene text action
GLFW.swapBuffers win
| olive/antiqua-prime | src/Antiqua/Graphics/Window.hs | mit | 3,037 | 0 | 12 | 677 | 978 | 473 | 505 | -1 | -1 |
-- source: http://stackoverflow.com/a/23124701/499478
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies,
FlexibleInstances, UndecidableInstances, IncoherentInstances #-}
module Augment (augmentWith) where
class Augment a b f h where
augmentWith :: (a -> b) -> f -> h
instance (a ~ c, h ~ b) => Augment a b c h where
augmentWith = ($)
instance (Augment a b d h', h ~ (c -> h')) => Augment a b (c -> d) h where
augmentWith g f = augmentWith g . f
| Wizek/fuzzy-match | src/Augment.hs | mit | 460 | 0 | 9 | 92 | 154 | 83 | 71 | 9 | 0 |
module Quipp.Vmp where
import Debug.Trace
import Control.Applicative ((<$>), (<*>))
import Control.Monad.Trans.Class (lift)
-- import transformers-0.3.0.0:Control.Monad.Trans.Class (lift)
import Data.Foldable (foldlM)
import Data.List (elemIndex)
import Data.Map (Map, (!))
import qualified Data.Map as Map
import Data.Maybe (fromJust)
import Data.Random (RVarT, RVar, normalT)
import Data.Random.Distribution.Exponential (exponentialT)
import Quipp.ExpFam
import Quipp.Factor
import Quipp.Util
updateVarVmp :: Eq v => FactorGraph v -> FactorGraphState v -> VarId -> Maybe (FactorGraphState v)
updateVarVmp graph state varid = do
likelihood <- newVarLikelihood graph state varid
return $ Map.insert varid likelihood state
stepVmp :: Eq v => FactorGraph v -> FactorGraphState v -> Maybe (FactorGraphState v)
stepVmp graph state =
foldlM (\st varid -> updateVarVmp graph st varid) state (Map.keys $ factorGraphVars graph)
updateVarGibbs :: Eq v => FactorGraph v -> FactorGraphState v -> VarId -> RVarT Maybe (FactorGraphState v)
updateVarGibbs graph state varid = do
likelihood <- lift (newVarLikelihood graph state varid)
value <- sampleRVar $ sampleLikelihood (fst $ factorGraphVars graph ! varid) $ likelihood
return $ Map.insert varid (KnownValue value) state
stepGibbs :: Eq v => FactorGraph v -> FactorGraphState v -> RVarT Maybe (FactorGraphState v)
stepGibbs graph state =
foldlM (\st varid -> updateVarGibbs graph st varid) state (Map.keys $ factorGraphVars graph)
updateVarMH :: Eq v => FactorGraph v -> FactorGraphState v -> VarId -> RVarT Maybe (FactorGraphState v)
updateVarMH graph state varid = do
let (ef, factorids) = factorGraphVars graph ! varid
likelihood <- lift (newVarLikelihood graph state varid)
proposal <- sampleRVar $ sampleLikelihood ef $ likelihood
let proposalState = Map.insert varid (KnownValue proposal) state
case state ! varid of
NatParam _ -> return proposalState
KnownValue oldValue -> do
proposalStateLikelihood <- lift (newVarLikelihood graph proposalState varid)
let stateLp s = sum (map (factorExpLogValue graph s) factorids)
mhLog = stateLp proposalState - likelihoodLogProbability ef likelihood proposal
- stateLp state + likelihoodLogProbability ef proposalStateLikelihood oldValue
if mhLog >= 0 then return proposalState
else do
logUnitInterval <- exponentialT 1.0
return $ if mhLog >= logUnitInterval then proposalState else state
stepMH :: Eq v => FactorGraph v -> FactorGraphState v -> RVarT Maybe (FactorGraphState v)
stepMH graph state =
foldlM (\st varid -> updateVarMH graph st varid) state (Map.keys $ factorGraphVars graph)
| jessica-taylor/quipp2 | src/Quipp/Vmp.hs | mit | 2,688 | 0 | 19 | 463 | 897 | 445 | 452 | 49 | 4 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings, TupleSections, GADTs #-}
module DayX where
import AdventPrelude
import Data.List (iterate)
input :: String
input = "......^.^^.....^^^^^^^^^...^.^..^^.^^^..^.^..^.^^^.^^^^..^^.^.^.....^^^^^..^..^^^..^^.^.^..^^..^^^.."
-- input = "..^^."
-- input = ".^^.^.^^^^"
showRow :: [Bool] -> String
showRow = fmap showCell
where showCell c | c = '^'
| otherwise = '.'
nextRow :: [Bool] -> [Bool]
nextRow = nextCell . (False :) . (<> [False])
where nextCell (l: rs@(c:r:_)) =
let n = (l && c && not r) || (c && r && not l) ||
(l && not c && not r) || (r && not c && not l)
in n : nextCell rs
nextCell _ = []
result1 =
let r0 = fmap (== '^') input
rs = take 40 (iterate nextRow r0)
in sum . fmap ((1 -) . fromEnum) $ concat rs
result2 =
let r0 = fmap (== '^') input
rs = take 400000 (iterate nextRow r0)
in sum . fmap ((1 -) . fromEnum) $ concat rs
| farrellm/advent-2016 | src/Day18.hs | mit | 985 | 0 | 16 | 260 | 383 | 202 | 181 | 25 | 2 |
main = putStrLn $ show solve
solve :: Int
solve = numSpiralDiagSum 1001
numSpiralDiagSum :: Int -> Int
numSpiralDiagSum n = sum $ map (sum . (uncurry every)) $ take (div (n+1) 2) $ zip (1:[2,4..]) numSpiral
where
f n xs = every n xs
-- Each layer of the infinite number spiral (e.g. [[1], [2,3,4,5,6,7,8,9], ..])
numSpiral :: [[Int]]
numSpiral = [1] : f 3 [2..]
where
f n k = let t = 4*(n-1) in (take t k) : f (n+2) (drop t k)
every n xs = case drop (n-1) xs of
[] -> []
(y:ys) -> y : every n ys
| pshendry/project-euler-solutions | 0028/solution.hs | mit | 549 | 0 | 13 | 158 | 280 | 144 | 136 | 12 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import Debug.Trace
{-
CODE CHALLENGE: Solve the Change Problem. The DPCHANGE pseudocode is reproduced below for your convenience.
Input: An integer money and an array Coins = (coin1, ..., coind).
Output: The minimum number of coins with denominations Coins that changes money.
Sample Input:
40
50,25,20,10,5,1
Sample Output:
2
DPCHANGE(money, Coins)
MinNumCoins(0) ← 0
for m ← 1 to money
MinNumCoins(m) ← ∞
for i ← 1 to |Coins|
if m ≥ coini
if MinNumCoins(m - coini) + 1 < MinNumCoins(m)
MinNumCoins(m) ← MinNumCoins(m - coini) + 1
output MinNumCoins(money)
-}
main = do
putStrLn "Enter the amount?"
n <- getLine
putStrLn "Enter the coins"
sCoins <- getLine
--let coins = map read $ words sCoins :: [Int]
let coins = map read $ wordsWhen (==',') sCoins :: [Int]
selectedCoins = dpChange coins (read n :: Int)
putStrLn $ "The result' is " ++ show selectedCoins
putStrLn $ show $ length selectedCoins
dpChange :: [Int] -> Int -> [Int]
dpChange coins amount = loop coins [] amount
where loop :: [Int] -> [Int] -> Int -> [Int]
loop coins selectedCoins amount
| amount == 0 = selectedCoins
| otherwise =
let (selectedCoin, coins') = selectCoin coins amount
remAmount = amount - selectedCoin
in loop coins' (selectedCoin:selectedCoins) remAmount
selectCoin :: [Int] -> Int -> (Int, [Int])
selectCoin allCoins@(c:coins) amount
| c < amount = (c, allCoins)
| c == amount = (c, coins)
| otherwise = selectCoin coins amount
wordsWhen :: (Char -> Bool) -> String -> [String]
wordsWhen p s = case dropWhile p s of
"" -> []
s' -> w : wordsWhen p s''
where (w, s'') = break p s'
| tinkerthaler/bioinformatics3 | src/DPChange.hs | mit | 1,937 | 0 | 12 | 600 | 439 | 223 | 216 | 30 | 2 |
{-# LANGUAGE DeriveGeneric #-}
module Lattice where
import GHC.Generics (Generic)
class Ord a => Lattice a where
top :: a
bottom :: a
(\/) :: a -> a -> a
(/\) :: a -> a -> a
flowsTo :: a -> a -> Bool
-- | slightly more interesting lattice
-- H
-- / \
-- MA MB
-- \ /
-- L
data GLabel = L | MA | MB | H | Any deriving (Show, Eq, Ord, Generic)
-- there must be a better way than this!
-- this needs to be generated algorthimcally. To discuss
instance Lattice GLabel where
top = H
bottom = L
-- joins
L \/ L = L
L \/ MA = MA
L \/ MB = MB
L \/ H = H
L \/ Any = Any
MA \/ L = MA
MA \/ MA = MA
MA \/ MB = H
MA \/ H = H
MA \/ Any = Any
MB \/ L = MB
MB \/ MA = H
MB \/ MB = MB
MB \/ H = H
MB \/ Any = Any
H \/ L = H
H \/ MA = H
H \/ MB = H
H \/ H = H
H \/ Any = H
Any \/ L = Any
Any \/ MA = Any
Any \/ MB = Any
Any \/ H = H
Any \/ Any = Any
-- meets
L /\ L = L
L /\ MA = L
L /\ MB = L
L /\ H = L
L /\ Any = L
MA /\ L = L
MA /\ MA = MA
MA /\ MB = L
MA /\ H = MA
MA /\ Any = Any
MB /\ L = L
MB /\ MA = L
MB /\ MB = MB
MB /\ H = MB
MB /\ Any = Any
H /\ L = L
H /\ MA = MA
H /\ MB = MB
H /\ H = H
H /\ Any = Any
Any /\ L = L
Any /\ MA = Any
Any /\ MB = Any
Any /\ H = Any
Any /\ Any = Any
-- permissible flows
flowsTo L L = True
flowsTo L MA = True
flowsTo L MB = True
flowsTo L H = True
flowsTo L Any = True
flowsTo MA L = False
flowsTo MA MA = True
flowsTo MA MB = False
flowsTo MA H = True
flowsTo MA Any = True
flowsTo MB L = False
flowsTo MB MA = False
flowsTo MB MB = True
flowsTo MB H = True
flowsTo MB Any = True
flowsTo H L = False
flowsTo H MA = False
flowsTo H MB = False
flowsTo H H = True
flowsTo H Any = True
flowsTo Any L = True
flowsTo Any MA = True
flowsTo Any MB = True
flowsTo Any H = True
flowsTo Any Any = True
| kellino/TypeSystems | gradualSecurity/src/Lattice.hs | mit | 2,151 | 0 | 8 | 921 | 951 | 466 | 485 | 88 | 0 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE CPP #-}
module Stackage.Select
( select
, defaultSelectSettings
) where
import Data.Either (partitionEithers)
import qualified Data.Map as Map
import Data.Maybe (mapMaybe)
import Data.Set (empty)
import qualified Data.Set as Set
import Distribution.Text (simpleParse)
import Distribution.Version (withinRange)
import Prelude hiding (pi)
import Stackage.Config
import Stackage.InstallInfo
import Stackage.Types
import Stackage.Util
defaultSelectSettings :: GhcMajorVersion -> SelectSettings
defaultSelectSettings version = SelectSettings
{ extraCore = defaultExtraCore version
, expectedFailuresSelect = defaultExpectedFailures version
, stablePackages = defaultStablePackages version
, haskellPlatformDir = "hp"
, requireHaskellPlatform = True
, excludedPackages = empty
, flags = \coreMap ->
Set.fromList (words "blaze_html_0_5 small_base") `Set.union`
#if defined(mingw32_HOST_OS) || defined(__MINGW32__)
-- Needed on Windows to get unix-compat to compile
(if version >= GhcMajorVersion 7 6 then Set.empty else Set.fromList (words "old-time"))
`Set.union`
#endif
-- Support for containers-unicode-symbols
(case Map.lookup (PackageName "containers") coreMap of
Just v | Just range <- simpleParse "< 0.5", v `withinRange` range
-> Set.singleton "containers-old"
_ -> Set.empty)
, disabledFlags = Set.fromList $ words "bytestring-in-base"
, allowedPackage = const $ Right ()
, useGlobalDatabase = False
, skippedTests = empty
, selectGhcVersion = version
, selectTarballDir = "patching/tarballs"
}
select :: SelectSettings -> IO BuildPlan
select settings' = do
ii <- getInstallInfo settings'
bt <-
case iiBuildTools ii of
Left s -> error $ "Could not topologically sort build tools: " ++ s
Right x -> return x
return BuildPlan
{ bpTools = bt
, bpPackages = iiPackages ii
, bpOptionalCore = iiOptionalCore ii
, bpCore = iiCore ii
, bpSkippedTests = skippedTests settings'
}
-- | Get all of the build tools required.
iiBuildTools :: InstallInfo -> Either String [String]
iiBuildTools InstallInfo { iiPackageDB = PackageDB m, iiPackages = packages } =
fmap (map packageVersionString)
$ topSort
$ map addDependencies
$ filter (flip Set.notMember coreTools . fst)
$ Set.toList
$ Set.fromList
$ mapMaybe (flip Map.lookup buildToolMap)
$ Set.toList
$ Set.unions
$ map piBuildTools
$ Map.elems
$ Map.filterWithKey isSelected m
where
isSelected name _ = name `Set.member` selected
selected = Set.fromList $ Map.keys packages
-- Build tools shipped with GHC which we should not attempt to build
-- ourselves.
coreTools = Set.fromList $ map PackageName $ words "hsc2hs"
-- The map from build tool name to the package it comes from.
buildToolMap :: Map Executable (PackageName, Version)
buildToolMap = Map.unions $ map toBuildToolMap $ Map.toList m
toBuildToolMap :: (PackageName, PackageInfo) -> Map Executable (PackageName, Version)
toBuildToolMap (pn, pi) = Map.unions
$ map (flip Map.singleton (pn, piVersion pi))
$ Set.toList
$ piExecs pi
addDependencies :: (PackageName, Version) -> ((PackageName, Version), Set (PackageName, Version))
addDependencies (pn, pv) =
((pn, pv), deps)
where
deps =
case Map.lookup pn m of
Nothing -> Set.empty
Just pi -> Set.fromList
$ mapMaybe (flip Map.lookup buildToolMap)
$ Set.toList
$ piBuildTools pi
topSort :: (Show a, Ord a) => [(a, Set a)] -> Either String [a]
topSort orig =
uncurry go . partitionEithers . map (splitter . limitDeps) $ orig
where
splitter (x, y)
| Set.null y = Left x
| otherwise = Right (x, y)
go x [] = Right x
go [] y = Left $ "The following form a cycle: " ++ show (map fst y)
go (x:xs) ys = do
let (xs', ys') = partitionEithers $ map (splitter . dropDep x) ys
rest <- go (xs ++ xs') ys'
return $ x : rest
dropDep x (y, z) = (y, Set.delete x z)
allVertices = Set.fromList $ map fst orig
limitDeps (x, y) = (x, Set.intersection allVertices y)
| sinelaw/stackage | Stackage/Select.hs | mit | 4,652 | 0 | 18 | 1,383 | 1,279 | 678 | 601 | 99 | 3 |
{-# LANGUAGE BangPatterns, RankNTypes #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
{-# OPTIONS_GHC -fspec-constr #-}
{-# OPTIONS_GHC -fdicts-cheap #-}
{- OPTIONS_GHC -optlo-globalopt #-}
{- OPTIONS_GHC -optlo-loop-unswitch #-}
{- OPTIONS_GHC -optlo-mem2reg #-}
{- OPTIONS_GHC -optlo-prune-eh #-}
{-# OPTIONS_GHC -optlo-O3 -optlc-O3 #-} -- this is fast...
module Main where
import Data.Vector as V
import GHC.Enum as E
import Data.Vector.Fusion.Stream as VS
import Data.Vector.Fusion.Stream.Monadic as M
import Data.Vector.Fusion.Stream.Size as VS
import Criterion.Main as C
import Control.Monad.ST
import qualified Data.Vector.Unboxed.Mutable as VUM
import qualified Data.Vector.Unboxed as VU
import HERMIT.Optimization.StreamFusion.Vector
-- | Note: Data.Vector.concatMap = Data.Vector.Generic.concatMap
-- which is implemented in terms of flatten (with entire
-- inner vector in state, so not properly fused).
-- We cannot hope to optimize this.
concatTestV :: Int -> Int
concatTestV n = V.sum $ V.concatMap (\(!x) -> V.enumFromN 1 x) $ V.enumFromN 1 n
{-# NOINLINE concatTestV #-}
concatTestS :: Int -> Int
concatTestS n = VS.foldl' (+) 0 $ VS.concatMap (\(!x) -> VS.enumFromStepN 1 1 x) $ VS.enumFromStepN 1 1 n
{-# NOINLINE concatTestS #-}
-- | And again, this time we flatten the resulting stream. If this is fast
-- (enough), we can start the fusion process on ADP.
--
-- NOTE This does actually reduce to the desired tight loop.
flattenTest :: Int -> Int
flattenTest !n = VS.foldl' (+) 0 $ VS.flatten mk step Unknown $ VS.enumFromStepN 1 1 n
where
mk !x = (1,x)
{-# INLINE mk #-}
step (!i,!max)
| i<=max = VS.Yield i (i+1,max)
-- | max>(0::Int) = VS.Yield i (i+1,max-1) -- 10% faster
| otherwise = VS.Done
{-# INLINE step #-}
{-# NOINLINE flattenTest #-}
flattenTestDown :: Int -> Int
flattenTestDown !n = VS.foldl' (+) 0 $ VS.flatten mk step Unknown $ VS.enumFromStepN 1 1 n
where
mk !x = (x,1)
{-# INLINE mk #-}
step (!i,!min)
| i>=min = VS.Yield i (i-1,min)
| otherwise = VS.Done
{-# INLINE step #-}
{-# NOINLINE flattenTestDown #-}
-- nestedConcatS 3 = sum [1,1,2,2,1,2,3,2,3,3]
nestedConcatS :: Int -> Int
nestedConcatS n = VS.foldl' (+) 0 $ VS.concatMap (\(!x) -> VS.concatMap (\(!y) -> VS.enumFromStepN y 1 x) $ VS.enumFromStepN 1 1 x) $ VS.enumFromStepN 1 1 n
{-# NOINLINE nestedConcatS #-}
concatMapMonadic :: Int -> Int
concatMapMonadic k = runST $ do
tbl <- VU.thaw $ VU.fromList [0 .. k]
M.foldl' (+) 0 $ M.concatMapM (\(!x) -> VUM.unsafeRead tbl x >>= \z -> return $ M.enumFromStepN 1 1 z) $ M.enumFromStepN 1 1 k
{-# NOINLINE concatMapMonadic #-}
{-
nestedFlatten :: Int -> Int
nestedFlatten !n = VS.foldl' (+) 0 $ VS.flatten mk step Unknown $ VS.enumFromStepN 1 1 n
where
mk !x = (1,1,x)
{-# INLINE mk #-}
step (!i,!j,!x)
| (i<=x) && (j<=i) = VS.Yield j (i,j+1,x)
| i<=x = VS.Skip (i+1,1,x)
| otherwise = VS.Done
{-# INLINE step #-}
{-# NOINLINE nestedFlatten #-}
-}
main = do
-- print $ concatTestV 1000
print $ concatTestS 1000
print $ flattenTest 1000
print $ concatMapMonadic 1000
-- print $ flattenTestDown 1000
putStrLn $ "nestedConcatS: " Prelude.++ (show $ nestedConcatS 100)
-- putStrLn $ "nestedFlatten: " Prelude.++ (show $ nestedFlatten 100)
defaultMain
[ bgroup "concat tests / 100"
[ bench "concatTestS" $ whnf concatTestS 100
-- , bench "concatTestV" $ whnf concatTestV 100
, bench "flattenTest" $ whnf flattenTest 100
, bench "concatMapMonadic" $ whnf concatMapMonadic 100
]
, bgroup "concat tests / 1000"
[ bench "concatTestS" $ whnf concatTestS 1000
-- , bench "concatTestV" $ whnf concatTestV 1000
, bench "flattenTest" $ whnf flattenTest 1000
, bench "concatMapMonadic" $ whnf concatMapMonadic 1000
]
{- for paper
, bgroup "concat tests / 5000"
[ bench "concatTestS" $ whnf concatTestS 5000
, bench "flattenTest" $ whnf flattenTest 5000
]
, bgroup "concat tests / 10000"
[ bench "concatTestS" $ whnf concatTestS 10000
, bench "flattenTest" $ whnf flattenTest 10000
]
, bgroup "concat tests / 20000"
[ bench "concatTestS" $ whnf concatTestS 20000
, bench "flattenTest" $ whnf flattenTest 20000
]
-}
, bgroup "nested tests / 100"
[ bench "nestedConcatS" $ whnf nestedConcatS 100
]
{-
, bgroup "nested tests / 10"
[ bench "nestedConcatS" $ whnf nestedConcatS 10
, bench "nestedFlatten" $ whnf nestedFlatten 10
]
, bgroup "nested tests / 100"
[ bench "nestedConcatS" $ whnf nestedConcatS 100
, bench "nestedFlatten" $ whnf nestedFlatten 100
]
-}
]
| ku-fpg/hermit-streamfusion | Concat.hs | mit | 4,909 | 0 | 17 | 1,226 | 911 | 488 | 423 | 64 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Math.REPL.Evaluator.Cmd (evalCmd) where
--------------------------------------------------------------------------------
import Math.REPL.Evaluator.Expr (evalExpr)
import Math.REPL.Evaluator.Func (execFunc)
import Math.REPL.Help (help)
import Math.REPL.Prim.Bindings (Bindings, addFun, addVar,
display)
import Math.REPL.Prim.Cmd (Cmd (..))
import Math.REPL.Prim.Definitions (defBinds)
import Math.REPL.Prim.Expr (Expr (Message, Constant))
import Math.REPL.Prim.Function (testFunc)
import Math.REPL.Prim.Result (Result (..))
--------------------------------------------------------------------------------
import Math.REPL.Prim.Bindings (getFun)
import Math.REPL.Prim.Function (Function, apply, arity)
import Graphics.Rendering.Plot.Gtk.UI (plotWithArity)
import Graphics.UI.Gtk (initGUI, mainGUI)
import Data.List (foldl1')
import Data.Maybe
--------------------------------------------------------------------------------
evalCmd :: Bindings -> Cmd -> Result
evalCmd _ Help = Text . unlines $ help
evalCmd b Display = Text . unlines . display $ b
evalCmd _ Reset = NewBindings defBinds
evalCmd b (Assign s e) =
case evalExpr b e of
Message xxs -> Error . unlines $ xxs
Constant c -> NewBindings . addVar (s, c) $ b
_ -> Error " ~~ Erroneous Result ~~ "
evalCmd b fun@(Func i _ _) = let f = execFunc b fun
in case testFunc f 0 of
Nothing -> NewBindings . addFun (i, f) $ b
Just ms -> Error . unlines $ ms
--------------------------------------------------------------------------------
evalCmd b (Plot s rs) =
case getFun s b of
Nothing -> Error $ "Unknown function: " ++ s
Just f -> let pairMap fn (x, y) = (fn x, fn y)
rangeExps = map (pairMap (evalExpr b)) rs
rangeErrs = map (uncurry grabMessage) rangeExps
allErrors = foldl1' grabMessage rangeErrs
ranges = map (pairMap (\(Constant c) -> c)) rangeExps
fromMsg = (\(Message ms) -> ms)
in if null . fromMsg $ allErrors
then plotFunction f ranges
else Error . unlines . fromMsg $ allErrors
grabMessage :: Expr -> Expr -> Expr
grabMessage (Message m1) (Message m2) = Message (m1 ++ m2)
grabMessage m1@(Message _) _ = m1
grabMessage _ m2@(Message _) = m2
grabMessage _ _ = Message []
plotFunction :: Function -> [(Double, Double)] -> Result
plotFunction f ranges =
if length ranges /= arity f
then Error $ "Invalid parameters: " ++
"Required " ++ show (arity f) ++ " range(s). " ++
"Provided " ++ show (length ranges) ++ "."
else Action $ do
_ <- initGUI
fromJust $ plotWithArity (arity f) ((\(Right x) -> x) . apply f) ranges
mainGUI
--------------------------------------------------------------------------------
| sumitsahrawat/calculator | src/Math/REPL/Evaluator/Cmd.hs | gpl-2.0 | 3,396 | 0 | 18 | 1,122 | 921 | 501 | 420 | 59 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module MirakelBot.Handlers.Users where
import Control.Lens
import Control.Monad hiding (forM_)
import Data.Foldable
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import MirakelBot.HandlerHelpers
import MirakelBot.Handlers
import MirakelBot.Message.Send
import MirakelBot.Types
init :: Irc ()
init = void $ registerBangHandlerWithHelp "users" "show all online users" $ \_ -> do
msg <- getMessage
let toChannel x = case x of ToChannel c -> Just c; _ -> Nothing
let mdest = msg ^? privateDestination
forM_ (maybeToList mdest >>= mapMaybe toChannel) $ \c -> do
users <- getUserList c
answer (T.unwords $ map showUser $ M.toList users)
showUser :: (Nick,UserMode) -> T.Text
showUser (Nick nick,mode) = showt mode <> nick
| azapps/mirakelbot | src/MirakelBot/Handlers/Users.hs | gpl-3.0 | 977 | 0 | 17 | 286 | 270 | 143 | 127 | 23 | 2 |
module Main where
import Parser
import Control.Monad.Trans
import System.Console.Haskeline
import Text.Parsec.Prim
process :: String -> IO ()
process line = do
let res = parseToplevel line
case res of
Left err -> print err
Right ex -> mapM_ print ex
main :: IO ()
main = runInputT defaultSettings loop
where
loop = do
minput <- getInputLine "ready> "
case minput of
Nothing -> outputStrLn "Goodbye."
Just input -> (liftIO $ process input) >> loop | Le-Chiffre/HsC | Code/Compiler/Main.hs | gpl-3.0 | 486 | 0 | 15 | 113 | 168 | 83 | 85 | 18 | 2 |
-- |
-- Module: Math.NumberTheory.Canon.SpecialFunctions
-- Copyright: (c) 2018-2019 Frederick Schneider
-- Licence: MIT
-- Maintainer: Frederick Schneider <[email protected]>
-- Stability: Provisional
--
-- This module defines numerous functions associated with massive numbers.
-- This is an excellent resource: http://googology.wikia.com/wiki/Googology_Wiki
module Math.NumberTheory.Canon.SpecialFunctions (
moserFunc,
moserTriangle,
moserSquare,
moserPentagon,
mega,
megiston,
moser,
knuth,
conwayChain,
conwayGuy,
genGrahamFunc,
grahamFunc,
grahamsNumber,
ackermann,
ackermann3
-- , sudan
)
where
import Math.NumberTheory.Canon
moserFunc :: Canon -> Canon -> Canon -> Canon
moserTriangle, moserSquare :: Canon -> Canon
moserPentagon, mega, megiston, moser :: Canon
-- | Generalized Moser function: https://en.wikipedia.org/wiki/Steinhaus%E2%80%93Moser_notation
moserFunc nP mP pP
| cIntegral nP && cIntegral mP && cIntegral pP && nP >= c1 && pP >= c3
= m' nP mP pP
| otherwise = error "The parameters to the Moser function must all be integral with n >= 1 and p >= 3."
where m' n m p | n < 1 = error "n must be >= 1 in the Moser function"
| m > c1 = m' (m' n c1 p) (m-c1) p
| p > c3 = m' n n (p-c1)
| otherwise = n <^ n
-- to do: non-recursive definition?
-- | Moser Triangle (see Wikipedia link above)
moserTriangle n = moserFunc n c1 c3
-- | Moser Square (see Wikipedia link above)
moserSquare n = moserFunc n c1 c4
-- | Moser Pentagon (see Wikipedia link above)
moserPentagon = mega
-- | Mega: "2 in a circle" (see Wikipedia link above)
mega = moserFunc c2 c1 c5
-- | Megiston: "10 in a circle" (see Wikipedia link above)
megiston = moserFunc c10 c1 c5
where c10 = makeCanon 10
-- | Moser number; "2 in a mega-gon" (see Wikipedia link above)
moser = moserFunc c2 c1 mega
ackermann :: Canon -> Canon -> Canon
ackermann3 :: Canon -> Canon -> Canon -> Canon
-- | <https://en.wikipedia.org/wiki/Ackermann_function Ackermann function>
ackermann m n
| cIntegral m && cIntegral n && m >= c0 && n >= c0
= a m n
| otherwise = error "m and n must both be integral in the Ackermann function with m >= 0 and n >= 0"
where a m' n' | m' == c0 = n' + c1
| m' < c3 && n' == c0 = a (m' - c1) c1
| m' < c3 = a (m' - c1) $ a m' (n - c1)
| otherwise = -3 + conwayChain [2, n+3, m-2]
-- | The original 3 parameter Ackermann function
ackermann3 mP nP pP
| cIntegral mP && cIntegral nP && cIntegral pP && nP >= c0 && pP >= c0
= a3 mP nP pP
| otherwise = error "m, n and p must all be integral in the Ackermann3 function"
where a3 m n p | n < c0 || p < c0 = error "ackermann3 Both n and p must be >= 0"
| p == c0 = m + n
| p == c1 = m * n
| p == c2 = m <^ n
| p == c3 = m <^> (n + c1)
| n == c0 = m
| p == c4 && n == 2 = m <^> (1 + m <^> (m + c1)) -- Found while testing. Helps along calculation
| p == c4 && n > 2 = m <^> (1 + a3 m (n - c1) p)
| otherwise = a3 m (a3 m (n - c1) p) (p - c1)
{- Status
ackermann3 2 2 4 = 2 <^> 17 -- could also be written as 2 <^> (1 + 2<^>3) so this is between 2 <<^>> 3 and 2 <<^>> 4
ackermann3 2 3 4 = 2 <^> {1 + 2 <^> 17}
ackermann3 2 4 4 ... Generated error saying special cases in cHyperOp not covered when more than two items. XXX
ackermann3 3 2 4 = 3 <^> (1 + 3 <^> (2*2))
ackermann3 3 3 4 ... Hung initially but workaround added
ackermann3 7 3 4 = 7 <^> {1 + 7 <^> {1 + 7 <^> (2^3)}}
ackermann3 5 4 4 = 5 <^> {1 + 5 <^> {1 + 5 <^> {1 + 5 <^> (2 * 3)}}} -- note the folding based on the second term
ackermann3 2 2 5 ... Hangs
Here's why (stepping through the logic)
a3 2 2 5 = a3 2 (a3 2 1 5) 4
where a3 2 1 5 = a3 2 (a3 2 0 5) 4 = a3 2 2 4
a3 2 2 5 = a3 2 (a3 2 2 4) 4 = a3 2 (2<^>17) 4. So, this folding step would have to be done an incredible number of times.
ToDo: Is there an elegant closed form expression? x n 4 is between x <<^>> n+ 1 and x <<^>> n + 2.
-}
{- ToDo: Fix and add later
-- | The Sudan function created by Gabriel Sudan, a student of David Hilbert (https://en.wikipedia.org/wiki/Sudan_function)
sudan :: Canon -> Canon -> Canon -> Canon
sudan n x y | not (cIntegral n) || not (cIntegral x) || not (cIntegral y) || n < 0 || x < 0 || y < 0
= error "All input to the sudan function must be integral and >= 0"
| otherwise = s n x y
where s n x y | n == 0 = x + y
| n > 0 && y == 0 = x
| n == 1 = s c1 c0 y + x * 2 <^ y
| otherwise = s (n-1) snxym1 (snxym1 + y)
where snxym1 = s n x (y-1)
-}
genGrahamFunc :: Canon -> Integer -> Canon
grahamFunc :: Integer -> Canon
grahamsNumber :: Canon
-- | Calls the generalized Graham function with value 3
grahamFunc = genGrahamFunc c3
-- | <https://en.wikipedia.org/wiki/Graham%27s_number Graham's Number>
grahamsNumber = grahamFunc 64
-- | Generalized Graham Function
genGrahamFunc cP nP
| cIntegral cP && cP >= c1 && nP >= 1
= gGF cP nP
| otherwise = error "c and n must be Integral and both c and n >= 1 in the generalized Graham function"
where gGF c n | n > 1 = cApplyHy (gGF c (n -1)) [c,c] True -- recursively defined
| otherwise = c <<<^>>> c -- Hexation or 4 arrows
knuth :: Canon -> Canon -> Canon -> Canon
-- | <https://en.wikipedia.org/wiki/Knuth%27s_up-arrow_notation Knuth's Up Arrow Notation>, analagous to hyperoperations
knuth a n b = cApplyHy (c2 + n) [a,b] True
-- | <https://en.wikipedia.org/wiki/Conway_chained_arrow_notation Conway Chained-Arrow Notation>:
-- This function will try to reduce generalized conway chain notation down to humble hyperoperations (or better).
conwayChain :: [Canon] -> Canon
conwayChain l'
| all (\c -> cIntegral c && c > c0) l' = cc l'
| otherwise = error "ConwayChain: Each element in the input list/chain must be integral and > 0"
where cc ch | null ch = error "Logic Error: conwayChain requires a non-zero list."
| head ch == c1 = c1
| otherwise = f (takeWhile (/= c1) ch)
f c = case l of
0 -> c1 -- in this context we have stripped out the 1s so we can assume 1
1 -> p
2 -> p <^ q
3 -> knuth p r q -- "simple" hyperoperation
-- Beyond length 3, we may never come back. Note: We string out the 1s
_ | p == c2 && q == c2 -> c4 -- Property #6
| otherwise -> cc $ x ++ [cc (x ++ [s-1, v])] ++ [v-1] -- Rule #4
where l = length c
(p, q, r) = (head c, c !! 1, c!! 2)
x = take (l-2) c -- x is like the prefix chain from the wiki formula
(s, v) = (c !! (l-2), last c) -- second to last AND "very" last terms
-- Note: conwayChain [x,2,2,2] = x <H(x^2 + 1)> x. (e.g. conwayChain [3,2,2,2] = 3 ~^~ 3, which is the hyperoperator for level 10)
{- Some low-level level 4 examples
v = map (\l -> (l, conwayChain $ map makeCanon l)) [[3,2,2,2], [3,2,3,2], [3,3,2,2], [3,3,3,2], [3,2,2,3], [3,3,2,3]]
mapM_ (putStrLn . show) v
([3,2,2,2], 3 ~^~ 3) -- Level 10 = 3^2 + 1 Hyper Operation. Note: The library converts: x <HO: h> 2 TO x <HO: h-1> x
([3,2,3,2], 3 <H{1 + 3 ~^~ 3}> 3) -- which is 3 <H{1 + conwayChain[3,2,2,2])> 3
([3,3,2,2],3 ~~|<<<<^>>>>|~~ 3) -- Level 29 = 3^3 + 2 Hyper Operation
([3,3,3,2],3 <H{2 + 3 ~~|<<<<^>>>>|~~ 3}> 3) -- which is 3 <H{2 + conwayChain[3,3,2,2])> 3
([3,2,2,3],3 <H{1 + 3 <H{1 + 3 <H{1 + 3 <H{1 + 3 <H{1 + 3 <H{1 + 3 <H{1 + 3 ~^~ 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3)
([3,3,2,3],3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 <H{2 + 3 ~~|<<<<^>>>>|~~ 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3}> 3)
Note: conwayChain [3,3,3,3] = conwayChain [3,3, [3,3,2,3], 2] so you have to iteratively embed the hyper operations a massive number of times
Note: For perspective, Graham's number has been shown to be between [3,3,64,2] and [3,3,65,2]!
-}
conwayGuy :: Canon -> Canon
-- | Conway-Guy function is a conwayChain of n copies of n.
conwayGuy n = conwayChain (replicate (fromIntegral n) n)
-- Kind of unrelated but interesting: goodstein rep: https://en.wikipedia.org/wiki/Knuth%27s_up-arrow_notation#Numeration_systems_based_on_the_hyperoperation_sequence
| grandpascorpion/canon | Math/NumberTheory/Canon/SpecialFunctions.hs | gpl-3.0 | 9,094 | 0 | 18 | 2,806 | 1,525 | 781 | 744 | 91 | 5 |
-- | This module is the processor for 'RedisMessage's. It bridges the Redis interface
-- created by "RedisServer" and a 'Store'.
module Ltc.Network.RedisAdapter (
redisProxyD
) where
import Control.Applicative ( (<$>) )
import Control.Exception ( handle )
import Control.Monad ( forM, unless )
import Control.Proxy ( Proxy, Pipe, runIdentityP, request, respond, lift )
import Data.ByteString.Lazy.Char8 ( ByteString )
import Data.Set ( Set )
import Ltc.Network.RedisProtocol ( RedisMessage(..) )
import Ltc.Store ( Store(..), Storable, SetCmd(..), Key(..), Version
, TypeMismatchError(..) )
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.Set as S
import System.Log.Logger ( debugM, warningM )
import Text.Printf ( printf )
----------------------
-- Debugging
----------------------
-- | Debugging tag for this module
tag :: String
tag = "RedisAdapter"
----------------------
-- Redis proxy
----------------------
-- | Process 'RedisMessage's in a synchronous fashion. Note that not all Redis messages
-- are supported, and will "not supported" return errors.
--
-- All Redis entries in a 'Store' will have type 'ByteString', or @Set ByteString@; this
-- matches Redis' own stringly-typed interface.
redisProxyD :: (Proxy p, Store s) => s -> () -> Pipe p RedisMessage RedisMessage IO ()
redisProxyD store () = runIdentityP loop
where
loop = do
cmd <- request ()
lift $ debugM tag "handling command"
(reply, stop) <- lift $
handle (\(TypeMismatchError { expectedType = typ }) ->
resply (toError (printf "WRONGTYPE expected: %s" (show typ)))) $ do
case cmd of
MultiBulk ["PING"] ->
resply (Status "PONG")
MultiBulk ["QUIT"] ->
return (Status "OK", True)
MultiBulk ["SET", Bulk key, Bulk value] -> do
_ <- set store (mkKey key) (lazy value)
resply (Status "OK")
MultiBulk ["GET", Bulk key] -> do
(mv :: Maybe (ByteString, Version)) <- getLatest store (mkKey key)
case mv of
Nothing -> resply Nil
Just (s, _) -> resply (Bulk (strict s))
MultiBulk ["KEYS", Bulk pat] ->
handleKeys pat
MultiBulk ["INCR", Bulk key] ->
handleIncr key 1
MultiBulk ["INCRBY", Bulk key, Integer delta] ->
handleIncr key delta
MultiBulk ["DECR", Bulk key] ->
handleIncr key (-1)
MultiBulk ["DECRBY", Bulk key, Integer delta] ->
handleIncr key (-delta)
MultiBulk ["APPEND", Bulk key, Bulk value] -> do
handleAppend key value
MultiBulk ["STRLEN", Bulk key] -> do
v <- getWithDefault (mkKey key) ""
resply (Integer (fromIntegral (BL.length v)))
MultiBulk ["GETRANGE", Bulk key, Integer start, Integer end] -> do
handleGetRange key start end
-- SETRANGE is not supported because Francesco is a pedant.
MultiBulk ("MGET" : ks) -> do
messagesToKeys ks handleMGet
MultiBulk ("MSET" : ks) -> do
messagesToKeyValues ks handleMSet
MultiBulk ["SADD", Bulk key, Bulk s] -> do
handleSAdd key (lazy s)
MultiBulk ["SADD", Bulk key, Integer n] -> do
let s = BL.pack (show n)
handleSAdd key s
MultiBulk ("SINTER" : ks) ->
messagesToKeys ks handleSInter
MultiBulk ["SMEMBERS", key] ->
messagesToKeys [key] handleSInter
MultiBulk ["SISMEMBER", Bulk key, Bulk value] -> do
checkIsMember key (lazy value)
MultiBulk ["SISMEMBER", Bulk key, Integer value] -> do
let s = BL.pack (show value)
checkIsMember key s
MultiBulk ["SCARD", Bulk key] -> do
(vs :: Maybe (Set ByteString, Version)) <- getLatest store (mkKey key)
case vs of
Nothing ->
resply (Integer 0)
Just (s, _) ->
resply (Integer (fromIntegral (S.size s)))
command -> do
warningM tag (printf "invalid command: %s" (show command))
resply (Error "ERR invalid command")
respond reply
unless stop loop
handleKeys pat = do
case globToRegex (BL.unpack (lazy pat)) of
Nothing ->
resply (Error "ERR bad pattern")
Just reg -> do
ks <- keys store (BL.unpack (BL.fromStrict reg))
let ks' = map (\(Key k) -> strict k) (S.toList ks)
resply (MultiBulk (map Bulk ks'))
handleIncr key delta = do
s <- getWithDefault (mkKey key) ("0" :: ByteString)
case maybeRead s of
Nothing ->
resply (toError (printf "WRONGTYPE key %s does not hold an int" (show key)))
Just n -> do
_ <- set store (mkKey key) (BL.pack (show (n + delta)))
resply (Integer (n + delta))
handleAppend key value = do
s <- getWithDefault (mkKey key) ""
_ <- set store (mkKey key) (BL.append s (lazy value))
resply (Integer (fromIntegral (BL.length s + fromIntegral (BS.length value))))
handleGetRange key start end = do
s <- getWithDefault (mkKey key) ""
let normalize n = if n < 0 then fromIntegral (BL.length s) + n else n
start' = fromIntegral (normalize start)
end' = fromIntegral (normalize end)
resply (Bulk (strict (BL.take (end' - start' + 1) (BL.drop start' s))))
handleMGet ks = withWriteLock store $ do
values <- forM ks $ \key -> do
(mv :: Maybe (ByteString, Version)) <- getLatest store key
return $ case mv of
Just (s, _) -> Bulk (strict s)
_ -> Nil
resply (MultiBulk values)
handleMSet kvs = do
_ <- mset store (map (uncurry SetCmd) kvs)
resply (Status "OK")
handleSAdd key s = do
ss <- getWithDefault (mkKey key) (S.empty :: Set ByteString)
let size = S.size ss
ss' = S.insert s ss
_ <- set store (mkKey key) ss'
let size' = S.size ss'
resply (Integer (fromIntegral (size' - size)))
checkIsMember key value = do
vs <- getWithDefault (mkKey key) (S.empty :: Set ByteString)
resply (toRedisBool (value `S.member` vs))
handleSInter [] =
resply (MultiBulk [])
handleSInter (k:ks) = do
(mv :: Maybe (Set ByteString, Version)) <- getLatest store k
case mv of
Nothing ->
resply (MultiBulk [])
Just (s, _) -> do
mss <- getStringSets ks
case mss of
Nothing ->
resply (toError "WRONGTYPE some arguments are not sets")
Just ss -> do
let isct = foldl S.intersection s ss
resply (MultiBulk (map (Bulk . strict) (S.toList isct :: [ByteString])))
-- | Get all the sets associated with the given keys. Any missing values default to
-- empty sets. If any of the sets are not sets of strings, return 'Nothing'.
getStringSets :: [Key] -> IO (Maybe [Set ByteString])
getStringSets ks = do
handle (\(TypeMismatchError {}) -> return Nothing ) $
Just . map (maybe S.empty fst) <$> forM ks (getLatest store)
-- | Convert a list of 'RedisMessage's to a list of 'Key's. This is useful for
-- commands with variable numbers of arguments.
messagesToKeys :: [RedisMessage]
-> ([Key] -> IO (RedisMessage, Bool))
-> IO (RedisMessage, Bool)
messagesToKeys ks act = go [] ks
where
go acc [] = act (reverse acc)
go acc (Bulk k : kt) = go (mkKey k : acc) kt
go _ _ = resply (toError "WRONGTYPE some arguments are not keys")
messagesToKeyValues :: [RedisMessage]
-> ([(Key, ByteString)] -> IO (RedisMessage, Bool))
-> IO (RedisMessage, Bool)
messagesToKeyValues ks act = go [] ks
where
go acc [] = act (reverse acc)
go _ [_] = resply (toError "ERR last key had no value")
go acc (Bulk k : Bulk v : kt) = go ((mkKey k, lazy v) : acc) kt
go acc (Bulk k : Integer v : kt) = go ((mkKey k, BL.pack (show v)) : acc) kt
go _ _ = resply (toError "WRONGTYPE some arguments where not keys")
-- | Because, usually, we want to not stop the loop.
resply :: (Monad m) => RedisMessage -> m (RedisMessage, Bool)
resply msg = return (msg, False)
-- | Get the value of a key, or return the given default if the key does no exist.
getWithDefault :: (Storable a) => Key -> a -> IO a
getWithDefault key def = do
mv <- getLatest store key
return (maybe def fst mv)
--------------------------------
-- Helpers
--------------------------------
-- | Convert a 'String' to an 'Error' 'RedisMessage'.
toError :: String -> RedisMessage
toError = Error . strict . BL.pack
-- | Convert a 'Bool' to an 'Integer' 'RedisMessage'
toRedisBool :: Bool -> RedisMessage
toRedisBool = Integer . fromIntegral . fromEnum
-- | Make a strict 'ByteString' lazy.
lazy :: BS.ByteString -> ByteString
lazy s = BL.fromChunks [s]
-- | Make a 'Key' from a strict 'BS.ByteString'.
mkKey :: BS.ByteString -> Key
mkKey = Key . lazy
-- | Make a lazy 'ByteString' strict.
strict :: ByteString -> BS.ByteString
strict = BS.concat . BL.toChunks
-- | Try to read a value; return 'Nothing' if the parse fails.
maybeRead :: (Read a) => ByteString -> Maybe a
maybeRead bs =
let s = BL.unpack bs in
case readsPrec 0 s of
[(x, "")] -> Just x
_ -> Nothing
globToRegex :: (Monad m, Functor m) => String -> m BS.ByteString
globToRegex s = do
r <- go s
return (BS.concat ["^", r, "$"])
where
go "" = return ""
go ('*':cs) = BS.cons '.' . BS.cons '*' <$> go cs
go ('?':cs) = BS.cons '.' <$> go cs
go ('[':c:cs) = BS.cons '[' . BS.cons c <$> charClass cs
go (c:cs) = BS.cons c <$> go cs
charClass (']':cs) = BS.cons ']' <$> go cs
charClass (c:cs) = BS.cons c <$> charClass cs
charClass _ = fail "unterminated character class"
| scvalex/ltc | src/Ltc/Network/RedisAdapter.hs | gpl-3.0 | 11,188 | 0 | 28 | 4,063 | 3,462 | 1,734 | 1,728 | -1 | -1 |
module Language.Prolog.IsString (module IsString) where
import IsString
| nishiuramakoto/logiku | prolog/Prolog/Language/Prolog/IsString.hs | gpl-3.0 | 72 | 0 | 4 | 7 | 16 | 11 | 5 | 2 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Hoodle.Type.Undo
-- Copyright : (c) 2011-2013 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <[email protected]>
-- Stability : experimental
-- Portability : GHC
--
-----------------------------------------------------------------------------
module Hoodle.Type.Undo where
import Data.Hoodle.Zipper
data UndoTable a = UndoTable { undo_allowednum :: Int
, undo_totalnum :: Int
, undo_zipper :: Maybe (SeqZipper a)
}
emptyUndo :: Int -> UndoTable a
emptyUndo n | n > 0 = UndoTable n 0 Nothing
| otherwise = error "undo table must be larger than 0"
singletonUndo :: Int -> a -> UndoTable a
singletonUndo n e = addToUndo (emptyUndo n) e
addToUndo :: UndoTable a -> a -> UndoTable a
addToUndo utable e =
let tn = undo_totalnum utable
an = undo_allowednum utable
mzs = undo_zipper utable
in case mzs of
Nothing -> UndoTable an 1 . Just . singletonSZ $ e
Just zs ->
if tn < an
then UndoTable an (tn+1) . Just . appendGoLast zs $ e
else UndoTable an an . chopFirst . appendGoLast zs $ e
getCurrentUndoItem :: UndoTable a -> Maybe a
getCurrentUndoItem = fmap current . undo_zipper
getPrevUndo :: UndoTable a -> Maybe (a, UndoTable a)
getPrevUndo t = do
newzs <- moveLeft =<< undo_zipper t
return (current newzs, t {undo_zipper = Just newzs})
getNextUndo :: UndoTable a -> Maybe (a, UndoTable a)
getNextUndo t = do
newzs <- moveRight =<< undo_zipper t
return (current newzs, t {undo_zipper = Just newzs})
numOfUndo :: UndoTable a -> Int
numOfUndo = undo_totalnum
| wavewave/hoodle-core | src/Hoodle/Type/Undo.hs | gpl-3.0 | 1,778 | 0 | 16 | 464 | 489 | 248 | 241 | 33 | 3 |
module While.Compiler where
import While.AST
compileAExpr :: AExpr -> String
compileAExpr (Var s) = s
compileAExpr (IntConst n) = show n
compileAExpr (AUnary op a) = "(" ++ compileAUnaOp op ++ " " ++ compileAExpr a ++ ")"
compileAExpr (ABinary op a b) = "(" ++ compileAExpr a ++ " " ++ compileABinOp op++ " " ++ compileAExpr b ++ ")"
compileAUnaOp :: AUnaOp -> String
compileAUnaOp Neg = "-"
compileABinOp :: ABinOp -> String
compileABinOp Add = "+"
compileABinOp Subtract = "-"
compileABinOp Multiply = "*"
compileABinOp Divide = "/"
compileBExpr :: BExpr -> String
compileBExpr (BoolConst True) = "true"
compileBExpr (BoolConst False) = "false"
compileBExpr (BUnary op a) = "(" ++ compileBUnaOp op ++ " " ++ compileBExpr a ++ ")"
compileBExpr (BBinary op a b) = "(" ++ compileBExpr a ++ " " ++ compileBBinOp op ++ " " ++ compileBExpr b ++ ")"
compileBExpr (RBinary op a b) = "(" ++ compileAExpr a ++ " " ++ compileRBinOp op ++ " " ++ compileAExpr b ++ ")"
compileBUnaOp :: BUnaOp -> String
compileBUnaOp Not = "not"
compileBBinOp :: BBinOp -> String
compileBBinOp And = "and"
compileBBinOp Or = "or"
compileRBinOp :: RBinOp -> String
compileRBinOp Equal = "=="
compileRBinOp NotEqual = "<>"
compileRBinOp Greater = ">"
compileRBinOp GreaterEqual = ">="
compileRBinOp Less = "<"
compileRBinOp LessEqual = "<="
compileStmt :: Stmt -> String
compileStmt Skip = "skip"
compileStmt (Seq a@(Seq _ _) b) = "(" ++ compileStmt a ++ "); " ++ compileStmt b
compileStmt (Seq a b) = compileStmt a ++ "; " ++ compileStmt b
compileStmt (Assign var expr) = var ++ " := " ++ compileAExpr expr
compileStmt (If test a b) = "if " ++ compileBExpr test ++ " then (" ++ compileStmt a ++ ") else (" ++ compileStmt b ++ ")"
compileStmt (While test expr) = "while " ++ compileBExpr test ++ " do (" ++ compileStmt expr ++ ")"
| fpoli/abstat | src/While/Compiler.hs | gpl-3.0 | 1,811 | 0 | 11 | 330 | 682 | 335 | 347 | 39 | 1 |
-- Copyright 2013 - 2017 John F. Miller
{-# LANGUAGE ScopedTypeVariables, OverloadedStrings, RankNTypes, NamedFieldPuns #-}
-- | Data structures for the Abstract Syntax Tree
module AST where
-- import Scope (Value, Scope)
import Var
import Name
import Parameters
import LineParser
-- | Primary Abstract Syntax Tree
data Exp = Exp {node::Node, position::Position} deriving Show
data Node =
EVar Var -- ^ a possibly scoped variable
| EInt Integer -- ^ Integer literal
| EFloat Double -- ^ real number literal
| EString String -- ^ string literal
| ExString [Exp] -- ^ concat all elements together as a string
| EAtom Name -- ^ atom literal (:foo)
| EArray [Exp] -- ^ an Array Literal
| EHash [(Exp,Exp)] -- ^ a Hash Literal
| EIVar Name -- ^ Named istance variable (\@foo)
| ECVar Name -- ^ Named class variable (\@@foo)
| ENil | EFalse | ETrue
-- | Operator embeded Equation
--
-- Operator Strings of the form `a * b + c - d`. The first expression
-- (`a`) is the first argument. The list are pairs of operators and
-- expressions ( [(*,b),(+,c),(-,d)] in the example). This is turned
-- into a tree structure using Dykstra's Yard Shuninig Algorithm found
-- in Eval.hs.
| OpStr Exp [(Op,Exp)]
| Index Exp [Exp] -- ^ an expression followed by an index (foo[3])
| Lambda Parameter [Exp] -- ^ an anonymous function declairation
| Def Visibility Order Name Parameter [Exp] -- ^ a method declairartion
-- | a method declairation of the form `def self.xxx`
| DefSelf Visibility Order Name Parameter [Exp]
-- | application of the actual params [Exp] to the function found at var
| Apply Var [Exp] Visibility
-- ^ application of the actual params [Exp] to the function derived
-- from Exp
| ApplyFn Exp [Exp]
| Call Exp Name [Exp] -- ^ method invocation (foo.bar(x))
| Send Exp Name [Exp] -- ^ concurrent method invocation (foo->bar(x))
| ESuper (Maybe [Exp]) -- ^ a call to `super` with or without args
| Assign LHS Exp -- ^ assignment of a var (see LHS)
| OpAssign LHS Op Exp -- ^ assign new value based on the old ( a += 12)
-- | conditional expression if the predicate is not false or nil evaluate
-- the consequent otherwise evaluate the alternate.
| If {predicate :: Exp,consequent :: Exp ,alternate :: Maybe Exp}
| While Exp Exp -- ^ evaluate the second Exp while the first is true
| Until Exp Exp -- ^ evaluate the second Exp until the first becomes true
-- | Build a new class or reopen and existing one at Var with (Maybe Var)
-- as super class. the Exp will be run in the in the context of the
-- class.
| EClass Var (Maybe Var) CodeBlock -- ^ Create or reopen a class
| Module Var CodeBlock -- ^ Create or reopen a module
| Block [Exp]-- ^ A block of sequential expressions.
-- | EValue (forall m. Scope m => Value m) -- ^ allows values to be "shoved" back into expressions
deriving Show
-- | Left Hand Side data structure
--
-- This represent anything that can be assigned to -- that is could go on the
-- left hand side of an `=`. These include local and scoped vars, IVars,
-- CVars, indexed vars, and method calls (via the foo= convention).
data LHS =
LVar Var
| LIVar Name
| LCVar Name
| LIndex Exp [Exp]
| LCall Exp Name [Exp]
| LSend Exp Name [Exp] deriving Show
| antarestrader/sapphire | AST.hs | gpl-3.0 | 3,368 | 0 | 9 | 785 | 441 | 284 | 157 | 45 | 0 |
{-# LANGUAGE DeriveFunctor,OverloadedStrings #-}
module Lang.LintRich where
import Lang.Rich
import Lang.Type
import Control.Applicative
import Control.Monad
import Control.Monad.Writer
import Control.Monad.Reader
import Data.Map (Map)
import qualified Data.Map as M
import Text.PrettyPrint
import Lang.PrettyRich
type LintM v a = WriterT [Err v] (Reader (Map v (Type v))) a
lint :: Ord v => LintM v a -> [Err v]
lint m = runReader (execWriterT m) M.empty
type TypedExpr v = Expr (Typed v)
data Err v
= AlreadyBound v (Type v) (Type v)
| BoundAsOtherType v (Type v) (Type v)
| ExprTypeDisagrees (TypedExpr v) (Type v) (Type v)
| VarIncorrectlyApplied (TypedExpr v)
| NotFunctionType (TypedExpr v) (Type v)
| IncorrectApplication (TypedExpr v) (Type v) (Type v)
| ScrutineeVarIllTyped (TypedExpr v) (Type v) (Type v)
| CaseWithoutAlts (TypedExpr v)
| AltsRHSIllTyped (TypedExpr v) [Type v]
| ConstructorIncorrectlyApplied (Pattern (Typed v))
| IllTypedPattern (Type v) (Pattern (Typed v))
deriving (Show,Functor)
ppErr :: (v -> Doc) -> Err v -> Doc
ppErr p err = case err of
AlreadyBound v t1 t2 -> sep
[p v,"is bound as:",ppType 0 p t1,", but rebound as:",ppType 0 p t2]
BoundAsOtherType v t1 t2 -> sep
[p v,"is bound as:",ppType 0 p t1,", but used as:",ppType 0 p t2]
ExprTypeDisagrees e t1 t2 -> sep
[ppExpr 0 k e,"has type:",ppType 0 p t1,", but exprType thinks:",ppType 0 p t2]
VarIncorrectlyApplied e -> "Variable incorrectly applied: " <+> ppExpr 0 k e
NotFunctionType e t -> sep
[ppExpr 0 k e,"should be of function type, but is:",ppType 0 p t]
IncorrectApplication e t1 t2 -> sep
[ppExpr 0 k e,"incorrectly applied. Argument should be:",ppType 0 p t1,"but is:",ppType 0 p t2]
ScrutineeVarIllTyped e t1 t2 -> sep
[ppExpr 0 k e,"scurutinee should be:",ppType 0 p t1,"but is:",ppType 0 p t2]
CaseWithoutAlts e -> "Case without alternatives: " <+> ppExpr 0 k e
AltsRHSIllTyped e ts -> sep $
"Alternatives in case differ in type:":ppExpr 0 k e:map (ppType 0 p) ts
ConstructorIncorrectlyApplied pat -> "Constructor incorrectly applied:" <+> ppPat k pat
IllTypedPattern t pat -> ppPat k pat <+> "pattern illtyped, has type:" <+> ppType 0 p t
where
k = (p . forget_type,ppTyped p)
report :: Err v -> LintM v ()
report = tell . (:[])
insertVar :: Ord v => Typed v -> LintM v a -> LintM v a
insertVar (v ::: t) m = do
mt <- asks (M.lookup v)
case mt of
Just t' -> report (AlreadyBound v t t') >> m
Nothing -> local (M.insert v t) m
insertVars :: Ord v => [Typed v] -> LintM v a -> LintM v a
insertVars xs m = foldr insertVar m xs
lintVar :: Ord v => Typed v -> LintM v ()
lintVar (v ::: t) = do
mt <- asks (M.lookup v)
case mt of
Just t' | not (t `eqType` t') -> report (BoundAsOtherType v t t')
_ -> return ()
lintFns :: Ord v => [Function (Typed v)] -> LintM v ()
lintFns fns = lintFnsAnd fns (return ())
lintFnsAnd :: Ord v => [Function (Typed v)] -> LintM v a -> LintM v a
lintFnsAnd fns m = insertVars (map fn_name fns)
(mapM_ (lintExpr . fn_body) fns >> m)
lintExpr :: Ord v => TypedExpr v -> LintM v (Type v)
lintExpr e0 = chk_ret $ case e0 of
Var v@(_ ::: ty) ts -> do
lintVar v
let (tvs,ty') = collectForalls ty
when (length ts /= length tvs) (report (VarIncorrectlyApplied e0))
return (substManyTys (zip tvs (map forget ts)) ty')
App e1 e2 -> do
t1 <- lintExpr e1
t2 <- lintExpr e2
case t1 of
ArrTy ta tb -> do
unless (ta `eqType` t2) (report (IncorrectApplication e0 ta t2))
return tb
_ -> do
report (NotFunctionType e1 t1)
return Star
Lit _ (t ::: _) -> return (TyCon t [])
String (t ::: _) -> return (TyCon t [])
Lam x@(_ ::: t) e -> insertVar x (ArrTy t <$> lintExpr e)
Case e mx {- @(_ ::: tx) -} alts -> do
ts <- lintExpr e
case mx of
Just (_ ::: tx) | not (ts `eqType` tx)
-> report (ScrutineeVarIllTyped e0 ts tx)
_ -> return ()
tys <- maybe id insertVar mx (mapM (lintAlt ts) alts)
case tys of
[] -> report (CaseWithoutAlts e0) >> return Star
t:tys' -> do
unless (all (eqType t) tys') (report (AltsRHSIllTyped e0 tys))
return t
Let fns e -> lintFnsAnd fns (lintExpr e)
where
chk_ret m = do
t <- m
let t' = exprType e0
unless (t `eqType` t') (report (ExprTypeDisagrees e0 t t'))
return t
lintAlt :: Ord v => Type v -> Alt (Typed v) -> LintM v (Type v)
lintAlt t0 (p,rhs) = lintPat t0 p >> lintExpr rhs
lintPat :: Ord v => Type v -> Pattern (Typed v) -> LintM v ()
lintPat t0 p = case p of
Default -> return ()
ConPat (_ ::: t) tys args -> do
let (tvs,ty) = collectForalls t
when (length tys /= length tvs) (report (ConstructorIncorrectlyApplied p))
let ty' = substManyTys (zip tvs (map forget tys)) ty
(args_ty,res_ty) = collectArrTy ty'
when (length args_ty /= length args) (report (ConstructorIncorrectlyApplied p))
sequence_
[ unless (t1 `eqType` t2) (report (ConstructorIncorrectlyApplied p))
| (_ ::: t1,t2) <- zip args args_ty
]
unless (res_ty `eqType` t0) (report (IllTypedPattern t0 p))
LitPat _ (t ::: _) -> when (TyCon t [] /= t0) (report (IllTypedPattern t0 p))
| danr/tfp1 | Lang/LintRich.hs | gpl-3.0 | 5,584 | 0 | 20 | 1,592 | 2,442 | 1,195 | 1,247 | 125 | 11 |
{- |
mtlstats
Copyright (C) 1984, 1985, 2019, 2020, 2021 Rhéal Lamothe
<[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
{-# LANGUAGE LambdaCase #-}
module Mtlstats.Control.EditStandings (editStandingsC) where
import Lens.Micro ((^.))
import qualified UI.NCurses as C
import Mtlstats.Format
import Mtlstats.Menu
import Mtlstats.Menu.EditStandings
import Mtlstats.Prompt
import Mtlstats.Prompt.EditStandings
import Mtlstats.Types
import Mtlstats.Types.Menu
-- | Controller for the edit standings menu
editStandingsC :: EditStandingsMode -> Controller
editStandingsC = \case
ESMMenu -> menuControllerWith header editStandingsMenu
ESMHome m -> editHomeStandingsC m
ESMAway m -> editAwayStandingsC m
editHomeStandingsC :: ESMSubMode -> Controller
editHomeStandingsC = \case
ESMSubMenu -> menuC editHomeStandingsMenu
ESMEditWins -> promptC editHomeWinsPrompt
ESMEditLosses -> promptC editHomeLossesPrompt
ESMEditOvertime -> promptC editHomeOvertimePrompt
ESMEditGoalsFor -> promptC editHomeGoalsForPrompt
ESMEditGoalsAgainst -> promptC editHomeGoalsAgainstPrompt
editAwayStandingsC :: ESMSubMode -> Controller
editAwayStandingsC = \case
ESMSubMenu -> menuC editAwayStandingsMenu
ESMEditWins -> promptC editAwayWinsPrompt
ESMEditLosses -> promptC editAwayLossesPrompt
ESMEditOvertime -> promptC editAwayOvertimePrompt
ESMEditGoalsFor -> promptC editAwayGoalsForPrompt
ESMEditGoalsAgainst -> promptC editAwayGoalsAgainstPrompt
menuC :: Menu () -> Controller
menuC = menuControllerWith header
promptC :: Prompt -> Controller
promptC = promptControllerWith header
header :: ProgState -> C.Update ()
header = do
db <- (^.database)
let
home = db^.dbHomeGameStats
away = db^.dbAwayGameStats
table = numTable [" W", " L", " OT", " GF", " GA"]
[ ( "HOME", valsFor home )
, ( "ROAD", valsFor away )
]
return $ C.drawString $ unlines $ table ++ [""]
valsFor :: GameStats -> [Int]
valsFor gs =
[ gs^.gmsWins
, gs^.gmsLosses
, gs^.gmsOvertime
, gs^.gmsGoalsFor
, gs^.gmsGoalsAgainst
]
| mtlstats/mtlstats | src/Mtlstats/Control/EditStandings.hs | gpl-3.0 | 2,725 | 0 | 13 | 499 | 485 | 259 | 226 | 53 | 6 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.FirebaseHosting.Sites.Domains.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists the domains for the specified site.
--
-- /See:/ <https://firebase.google.com/docs/hosting/ Firebase Hosting API Reference> for @firebasehosting.sites.domains.list@.
module Network.Google.Resource.FirebaseHosting.Sites.Domains.List
(
-- * REST Resource
SitesDomainsListResource
-- * Creating a Request
, sitesDomainsList
, SitesDomainsList
-- * Request Lenses
, sdlParent
, sdlXgafv
, sdlUploadProtocol
, sdlAccessToken
, sdlUploadType
, sdlPageToken
, sdlPageSize
, sdlCallback
) where
import Network.Google.FirebaseHosting.Types
import Network.Google.Prelude
-- | A resource alias for @firebasehosting.sites.domains.list@ method which the
-- 'SitesDomainsList' request conforms to.
type SitesDomainsListResource =
"v1beta1" :>
Capture "parent" Text :>
"domains" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListDomainsResponse
-- | Lists the domains for the specified site.
--
-- /See:/ 'sitesDomainsList' smart constructor.
data SitesDomainsList =
SitesDomainsList'
{ _sdlParent :: !Text
, _sdlXgafv :: !(Maybe Xgafv)
, _sdlUploadProtocol :: !(Maybe Text)
, _sdlAccessToken :: !(Maybe Text)
, _sdlUploadType :: !(Maybe Text)
, _sdlPageToken :: !(Maybe Text)
, _sdlPageSize :: !(Maybe (Textual Int32))
, _sdlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SitesDomainsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdlParent'
--
-- * 'sdlXgafv'
--
-- * 'sdlUploadProtocol'
--
-- * 'sdlAccessToken'
--
-- * 'sdlUploadType'
--
-- * 'sdlPageToken'
--
-- * 'sdlPageSize'
--
-- * 'sdlCallback'
sitesDomainsList
:: Text -- ^ 'sdlParent'
-> SitesDomainsList
sitesDomainsList pSdlParent_ =
SitesDomainsList'
{ _sdlParent = pSdlParent_
, _sdlXgafv = Nothing
, _sdlUploadProtocol = Nothing
, _sdlAccessToken = Nothing
, _sdlUploadType = Nothing
, _sdlPageToken = Nothing
, _sdlPageSize = Nothing
, _sdlCallback = Nothing
}
-- | Required. The parent for which to list domains, in the format:
-- 'sites\/site-name'
sdlParent :: Lens' SitesDomainsList Text
sdlParent
= lens _sdlParent (\ s a -> s{_sdlParent = a})
-- | V1 error format.
sdlXgafv :: Lens' SitesDomainsList (Maybe Xgafv)
sdlXgafv = lens _sdlXgafv (\ s a -> s{_sdlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sdlUploadProtocol :: Lens' SitesDomainsList (Maybe Text)
sdlUploadProtocol
= lens _sdlUploadProtocol
(\ s a -> s{_sdlUploadProtocol = a})
-- | OAuth access token.
sdlAccessToken :: Lens' SitesDomainsList (Maybe Text)
sdlAccessToken
= lens _sdlAccessToken
(\ s a -> s{_sdlAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sdlUploadType :: Lens' SitesDomainsList (Maybe Text)
sdlUploadType
= lens _sdlUploadType
(\ s a -> s{_sdlUploadType = a})
-- | The next_page_token from a previous request, if provided.
sdlPageToken :: Lens' SitesDomainsList (Maybe Text)
sdlPageToken
= lens _sdlPageToken (\ s a -> s{_sdlPageToken = a})
-- | The page size to return. Defaults to 50.
sdlPageSize :: Lens' SitesDomainsList (Maybe Int32)
sdlPageSize
= lens _sdlPageSize (\ s a -> s{_sdlPageSize = a}) .
mapping _Coerce
-- | JSONP
sdlCallback :: Lens' SitesDomainsList (Maybe Text)
sdlCallback
= lens _sdlCallback (\ s a -> s{_sdlCallback = a})
instance GoogleRequest SitesDomainsList where
type Rs SitesDomainsList = ListDomainsResponse
type Scopes SitesDomainsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
"https://www.googleapis.com/auth/firebase",
"https://www.googleapis.com/auth/firebase.readonly"]
requestClient SitesDomainsList'{..}
= go _sdlParent _sdlXgafv _sdlUploadProtocol
_sdlAccessToken
_sdlUploadType
_sdlPageToken
_sdlPageSize
_sdlCallback
(Just AltJSON)
firebaseHostingService
where go
= buildClient
(Proxy :: Proxy SitesDomainsListResource)
mempty
| brendanhay/gogol | gogol-firebasehosting/gen/Network/Google/Resource/FirebaseHosting/Sites/Domains/List.hs | mpl-2.0 | 5,639 | 0 | 18 | 1,405 | 888 | 514 | 374 | 127 | 1 |
{-# LANGUAGE OverloadedLists, OverloadedStrings #-}
module NicovideoTranslator.Translate (ApiKey, translate) where
import GHC.Exts (IsList (toList))
import Prelude hiding (lookup)
import Control.Concurrent.Async (concurrently)
import Control.Lens ((&), (.~), (^.))
import Data.Aeson.Types (Value (Array, Bool, Object, String), toJSON)
import Data.HashMap.Strict (lookup)
import Data.LanguageCodes (ISO639_1)
import Data.Text (Text, pack, toLower, unpack)
import Network.Wreq ( Options
, Response
, asJSON
, defaults
, param
, postWith
, responseBody
)
type ApiKey = Text
apiUrl :: String
apiUrl = "https://translation.googleapis.com/language/translate/v2"
translate :: ApiKey -> ISO639_1 -> [Text] -> IO [Text]
translate apiKey target texts =
case splitAt 128 texts of
([], _) -> return []
(head, []) -> translate' apiKey target head
(head, tail) -> do
let trans = translate apiKey target
(headResult, tailResult) <- concurrently (trans head) (trans tail)
return $ headResult ++ tailResult
translate' :: ApiKey -> ISO639_1 -> [Text] -> IO [Text]
translate' apiKey target texts = do
response <- (asJSON =<< postWith query apiUrl params) :: IO (Response Value)
let Object body = response ^. responseBody
Just (Object data') = lookup "data" body
Just (Array translations) = lookup "translations" data'
return $ [ s
| Object r <- toList translations
, Just (String s) <- [lookup "translatedText" r]
]
where
query :: Options
query = defaults & param "key" .~ [apiKey]
params :: Value
params = Object
[ ("target", String $ toLower . pack . show $ target)
, ("source", String "ja")
, ("prettyprint", Bool False)
, ("format", String "text")
, ("q", toJSON texts)
]
| dahlia/nicovideo-translator | lib/NicovideoTranslator/Translate.hs | agpl-3.0 | 2,000 | 0 | 13 | 585 | 622 | 344 | 278 | 47 | 3 |
-- |Helper functions for retrying, for example a connection.
module Retry where
import Control.Concurrent (threadDelay)
import Control.Exception
import Control.Monad (when)
import Data.Time.Clock.POSIX (getPOSIXTime)
-- |This function runs given action infinitely until it succeeds. On
-- every failure, reporter function is called. If it fails before 5
-- minutes, keep a 5 minute break before retrying. Otherwise retry
-- immediately.
foreverRetry :: (Exception e) => IO a -> (e -> IO ()) -> IO a
foreverRetry act reporter = do
before <- getPOSIXTime
result <- try act
case result of
Right a -> return a -- Graceful exit
Left e -> do
after <- getPOSIXTime
reporter e
-- 5 minutes delay if last fail was less than 5 minutes ago
when (after - before < 300) $ threadDelay 300000000
foreverRetry act reporter
-- |Simple retry reporter if you don't want to write one
-- yourself. This also makes this receive all kinds of exceptions
-- (SomeException).
printException :: SomeException -> IO ()
printException e = putStrLn $ "Exception received (retrying): " ++ show e
-- |Shorthand for an easy retry.
foreverRetryPrintEx :: IO a -> IO a
foreverRetryPrintEx = flip foreverRetry printException
| koodilehto/kryptoradio | data_sources/exchange/Retry.hs | agpl-3.0 | 1,237 | 0 | 16 | 239 | 250 | 128 | 122 | 20 | 2 |
--
-- Copyright 2017 Azad Bolour
-- Licensed under GNU Affero General Public License v3.0 -
-- https://github.com/azadbolour/boardgame/blob/master/LICENSE.md
--
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Data.IORef
main :: IO ()
add1 :: IORef Int -> IO ()
add1 ref = modifyIORef' ref (+1)
main = do
box <- newIORef 4
val1 <- readIORef box
print val1
add1 box
readIORef box >>= print | azadbolour/boardgame | haskell-server/trial/IORefSample.hs | agpl-3.0 | 419 | 0 | 8 | 81 | 106 | 53 | 53 | 12 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-|
Module : Control.Concurrent.NQE.Publisher
Copyright : No rights reserved
License : UNLICENSE
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
A publisher is a process that forwards messages to subscribers. NQE publishers
are simple, and do not implement filtering directly, although that can be done
on the 'STM' 'Listen' actions that forward messages to subscribers.
If a subscriber has been added to a publisher using the 'subscribe' function, it
needs to be removed later using 'unsubscribe' when it is no longer needed, or
the publisher will continue calling its 'Listen' action in the future, likely
causing memory leaks.
-}
module Control.Concurrent.NQE.Publisher
( Subscriber
, PublisherMessage(..)
, Publisher
, withSubscription
, subscribe
, unsubscribe
, withPublisher
, publisher
, publisherProcess
, publish
, publishSTM
) where
import Control.Concurrent.NQE.Process
import Control.Concurrent.Unique
import Control.Monad.Reader
import Data.Function
import Data.Hashable
import Data.List
import UnliftIO
-- | Handle of a subscriber to a process. Should be kept in order to
-- unsubscribe.
data Subscriber msg = Subscriber (Listen msg) Unique
instance Eq (Subscriber msg) where
(==) = (==) `on` f
where
f (Subscriber _ u) = u
instance Hashable (Subscriber msg) where
hashWithSalt i (Subscriber _ u) = hashWithSalt i u
-- | Messages that a publisher will take.
data PublisherMessage msg
= Subscribe !(Listen msg) !(Listen (Subscriber msg))
| Unsubscribe !(Subscriber msg)
| Event msg
-- | Alias for a publisher process.
type Publisher msg = Process (PublisherMessage msg)
publish :: MonadIO m => msg -> Publisher msg -> m ()
publish = send . Event
publishSTM :: msg -> Publisher msg -> STM ()
publishSTM = sendSTM . Event
-- | Create a mailbox, subscribe it to a publisher and pass it to the supplied
-- function . End subscription when function returns.
withSubscription ::
MonadUnliftIO m => Publisher msg -> (Inbox msg -> m a) -> m a
withSubscription pub f = do
inbox <- newInbox
let sub = subscribe pub (`sendSTM` inbox)
unsub = unsubscribe pub
bracket sub unsub $ \_ -> f inbox
-- | 'Listen' to events from a publisher.
subscribe :: MonadIO m => Publisher msg -> Listen msg -> m (Subscriber msg)
subscribe pub sub = Subscribe sub `query` pub
-- | Stop listening to events from a publisher. Must provide 'Subscriber' that
-- was returned from corresponding 'subscribe' action.
unsubscribe :: MonadIO m => Publisher msg -> Subscriber msg -> m ()
unsubscribe pub sub = Unsubscribe sub `send` pub
-- | Start a publisher in the background and pass it to a function. The
-- publisher will be stopped when the function function returns.
withPublisher :: MonadUnliftIO m => (Publisher msg -> m a) -> m a
withPublisher = withProcess publisherProcess
-- | Start a publisher in the background.
publisher :: MonadUnliftIO m => m (Publisher msg)
publisher = process publisherProcess
-- | Start a publisher in the current thread.
publisherProcess :: MonadUnliftIO m => Inbox (PublisherMessage msg) -> m ()
publisherProcess inbox = newTVarIO [] >>= runReaderT go
where
go = forever $ receive inbox >>= publisherMessage
-- | Internal function to dispatch a publisher message.
publisherMessage ::
(MonadIO m, MonadReader (TVar [Subscriber msg]) m)
=> PublisherMessage msg
-> m ()
publisherMessage (Subscribe sub r) =
ask >>= \box -> do
u <- liftIO newUnique
let s = Subscriber sub u
atomically $ do
modifyTVar box (`union` [s])
r s
publisherMessage (Unsubscribe sub) =
ask >>= \box -> atomically (modifyTVar box (delete sub))
publisherMessage (Event event) =
ask >>= \box ->
atomically $
readTVar box >>= \subs ->
forM_ subs $ \(Subscriber sub _) -> sub event
| xenog/nqe | src/Control/Concurrent/NQE/Publisher.hs | unlicense | 4,054 | 0 | 14 | 926 | 906 | 471 | 435 | 77 | 1 |
module LearnParsers where
import Text.Trifecta
import Text.ParserCombinators.ReadP ( ReadP )
import Control.Applicative
stop :: Parser a
stop = unexpected "stop"
{-
Exercises: Parsing Practice
1. Modifeied `one` and `oneTwo` to include `eof >> return ret`
-}
-- read a single character '1'
one = char '1' >>= \ret -> eof >> return ret
-- read a single character '1', then die
one' = one >> stop
-- equivalent to char '1' >> stop
-- read two characters, '1', and '2'
oneTwo = char '1' >> char '2' >>= \ret -> eof >> return ret
-- read two characters, '1' and '2', then die
oneTwo' = oneTwo >> stop
testParse :: Parser Char -> IO ()
testParse p = print $ parseString p mempty "123"
{-
Exercises: Parsing Practice
2. Add `oneTwoThree` and `testParseString` to parse a string
-}
-- Parses up to "123"
oneTwoThree = string "123" <|> string "12" <|> string "1"
oneTwoThree' = (string "123" <|> string "12" <|> string "1") >> stop
testParseString :: Parser String -> String -> IO ()
testParseString p s = print $ parseString p mempty s
{-
Exercises: Parsing Practice
3.
-}
myStringParser :: String -> Parser String
myStringParser (x:xs) = char x >> myStringParser xs >> return (x:xs)
myStringParser ([]) = return ([])
myOneTwoThree = myStringParser "123"
<|> myStringParser "12"
<|> myStringParser "1"
pNL s = putStrLn ('\n' : s)
main = do
pNL "stop"
testParse stop
pNL "one:"
testParse one
pNL "one':"
testParse one'
pNL "oneTwo:"
testParse oneTwo
pNL "oneTwo':"
testParse oneTwo'
-- Exercises: Parsing Practice
-- 2.
pNL "oneTwoThree:"
testParseString oneTwoThree "1"
testParseString oneTwoThree "12"
testParseString oneTwoThree "123"
pNL "oneTwoThree':"
testParseString oneTwoThree' "1"
testParseString oneTwoThree' "12"
testParseString oneTwoThree' "123"
-- Exercises: Parsing Practice
-- 3.
pNL "oneTwoThree'':"
testParseString myOneTwoThree "1"
testParseString myOneTwoThree "12"
testParseString myOneTwoThree "123"
| dmp1ce/Haskell-Programming-Exercises | Chapter 24/src/LearnParsers.hs | unlicense | 2,022 | 0 | 9 | 404 | 523 | 246 | 277 | 46 | 1 |
{-
- Copyright (c) 2017 The Agile Monkeys S.L. <[email protected]>
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module HaskellDo.Toolbar.Types where
data State = State
{ projectPath :: String
, lastProject :: String
, projectConfig :: String
, projectOpened :: Bool
, createProject :: Bool
, directoryExists :: Bool
, directoryList :: ([String], [String]) -- (directories, files)
, newDirectoryPath :: String
} deriving (Read, Show)
data Action
= Compile
| OpenProject
| LoadPackageYaml
| NewPath String
| NewPackage String
| NewDirectoryModal
| NewDirectory String
| CreateNewDirectory
| LoadProject
| SavePackage
| ClosePackageModal
| ToggleEditor
| ToggleError
| ConvertToPDF
deriving (Read, Show)
| J2RGEZ/haskell-do | src/common/HaskellDo/Toolbar/Types.hs | apache-2.0 | 1,339 | 0 | 10 | 323 | 152 | 97 | 55 | 27 | 0 |
{-# LANGUAGE DeriveDataTypeable, NamedFieldPuns, RecordWildCards,
ScopedTypeVariables #-}
-- |
-- Module: Network.Riak.Connection.Pool
-- Copyright: (c) 2011 MailRank, Inc.
-- License: Apache
-- Maintainer: Nathan Hunter <[email protected]>
-- Stability: experimental
-- Portability: portable
--
-- A high-performance striped pooling abstraction for managing
-- connections to a Riak cluster. This is a thin wrapper around
-- 'Data.Pool'.
module Network.Riak.Connection.Pool
(
Pool
, client
, create
, idleTime
, maxConnections
, numStripes
, withConnection
) where
import Data.Time.Clock (NominalDiffTime)
import Data.Typeable (Typeable)
import Network.Riak (Client(clientID), Connection, connect, disconnect)
import Network.Riak.Connection (makeClientID)
import qualified Data.Pool as Pool
-- | A pool of connections to a Riak server.
--
-- This pool is \"striped\", i.e. it consists of several sub-pools
-- that are managed independently.
--
-- The total number of connections that can possibly be open at once
-- is 'maxConnections' * 'numStripes'.
data Pool = Pool {
client :: Client
-- ^ Client specification. The client ID is ignored, and always
-- regenerated automatically for each new connection.
, pool :: Pool.Pool Connection
} deriving (Typeable)
instance Show Pool where
show p = "Pool { client = " ++ show (client p) ++ ", " ++
"numStripes = " ++ show (numStripes p) ++ ", " ++
"idleTime = " ++ show (idleTime p) ++ ", " ++
"maxConnections = " ++ show (maxConnections p) ++ "}"
instance Eq Pool where
a == b = client a == client b && numStripes a == numStripes b &&
idleTime a == idleTime b && maxConnections a == maxConnections b
-- | Create a new connection pool.
create :: Client
-- ^ Client configuration. The client ID is ignored, and
-- always regenerated automatically for each new connection.
-> Int
-- ^ Stripe count. The number of distinct sub-pools to
-- maintain. The smallest acceptable value is 1.
-> NominalDiffTime
-- ^ Amount of time for which an unused connection is kept
-- open. The smallest acceptable value is 0.5 seconds.
--
-- The elapsed time before closing may be a little longer than
-- requested, as the reaper thread wakes at 2-second intervals.
-> Int
-- ^ Maximum number of connections to keep open per stripe.
-- The smallest acceptable value is 1.
--
-- Requests for connections will block if this limit is reached
-- on a single stripe, even if other stripes have idle
-- connections available.
-> IO Pool
create client ns it mc =
Pool client `fmap` Pool.createPool c disconnect ns it mc
where c = do
cid <- makeClientID
connect client { clientID = cid }
-- | Stripe count. The number of distinct sub-pools to maintain. The
-- smallest acceptable value is 1.
numStripes :: Pool -> Int
numStripes = Pool.numStripes . pool
-- | Amount of time for which an unused connection is kept open. The
-- smallest acceptable value is 0.5 seconds.
--
-- The elapsed time before closing may be a little longer than
-- requested, as the reaper thread wakes at 1-second intervals.
idleTime :: Pool -> NominalDiffTime
idleTime = Pool.idleTime . pool
-- | Maximum number of connections to keep open per stripe. The
-- smallest acceptable value is 1.
--
-- Requests for connections will block if this limit is reached on a
-- single stripe, even if other stripes have idle connections
-- available.
maxConnections :: Pool -> Int
maxConnections = Pool.maxResources . pool
-- | Temporarily take a connection from a 'Pool', perform an action
-- with it, and return it to the pool afterwards.
--
-- * If the pool has a connection available, it is used
-- immediately.
--
-- * Otherwise, if the maximum number of connections has not been
-- reached, a new connection is created and used.
--
-- * If the maximum number of connections has been reached, this
-- function blocks until a connection becomes available, then that
-- connection is used.
--
-- If the action throws an exception of any type, the 'Connection' is
-- destroyed, and not returned to the pool.
--
-- It probably goes without saying that you should never call
-- 'disconnect' on a connection, as doing so will cause a subsequent
-- user (who expects the connection to be valid) to throw an exception.
withConnection :: Pool -> (Connection -> IO a) -> IO a
withConnection = Pool.withResource . pool
| janrain/riak-haskell-client | src/Network/Riak/Connection/Pool.hs | apache-2.0 | 4,640 | 0 | 19 | 1,071 | 548 | 323 | 225 | 46 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Network.MoeSocks.Type
(
module Network.MoeSocks.Type.Runtime
, module Network.MoeSocks.Type.Common
)
where
import Network.MoeSocks.Type.Runtime
import Network.MoeSocks.Type.Common
| nfjinjing/moesocks | src/Network/MoeSocks/Type.hs | apache-2.0 | 255 | 0 | 5 | 26 | 41 | 30 | 11 | 8 | 0 |
{-# LANGUAGE TupleSections, TypeOperators, Rank2Types, BangPatterns, FunctionalDependencies, MultiParamTypeClasses, MagicHash, ScopedTypeVariables, GADTs, FlexibleContexts, TypeFamilies, TypeSynonymInstances, FlexibleInstances #-}
module System.Mem.Concurrent.WeakMap (
WeakMap(..)
, new,new',copy'
, lookup
, insertWithMkWeak, insertWeak, mergeWeak
, deleteFinalized, finalizeEntry
, unionWithKey, extendWithKey, unionWithKey', mergeWithKey
, toMap,toMap'
, mapM_,mapM_',mapM'',purge, foldrM
) where
-- | Implementation of memo tables using hash tables and weak pointers as presented in http://community.haskell.org/~simonmar/papers/weak.pdf.
-- | Requires the package hashtables.
import Prelude hiding (lookup,mapM_)
import qualified Prelude
import Control.Exception
import Control.Concurrent.MVar
import Data.Atomics
import System.Mem.Weak.Exts (Weak(..),MkWeak(..))
import qualified System.Mem.Weak.Exts as Weak
import System.IO.Unsafe
import Control.Monad hiding (mapM_,foldM)
import qualified Control.Monad
import Data.Hashable
import GHC.Base
import Control.Monad.Trans
import Data.Unique
import Data.Strict.Tuple as Strict
import Data.Map.Strict (Map(..))
import qualified Data.Map.Strict as Map
import Data.IORef
import qualified Data.Foldable as Foldable
import Data.Strict.List as SList
import Debug
newtype WeakMap k v = WeakMap (WeakMap' k v :!: Weak (WeakMap' k v))
type WeakMap' k v = MVar (Map k (Weak v))
toMap :: MonadIO m => WeakMap k v -> m (Map k (Weak v))
toMap (WeakMap (tbl :!: _)) = liftIO $ readMVar tbl
toMap' :: (MonadIO m,Ord k) => WeakMap k v -> m (Map k v)
toMap' w = do
m <- toMap w
let add k w m = do
mb <- liftIO (Weak.deRefWeak w)
case mb of
Nothing -> m
Just v -> liftM (Map.insert k v) m
liftIO $ Map.foldrWithKey add (return Map.empty) m
{-# NOINLINE new #-}
new :: (Eq k,Hashable k) => IO (WeakMap k v)
new = do
tbl <- newMVar Map.empty
weak_tbl <- Weak.mkWeakKey tbl tbl $ Just $ table_finalizer tbl
return $ WeakMap (tbl :!: weak_tbl)
-- without finalization
{-# NOINLINE new' #-}
new' :: (Eq k,Hashable k) => IO (WeakMap k v)
new' = do
tbl <- newMVar Map.empty
weak_tbl <- Weak.mkWeakKey tbl tbl Nothing
return $ WeakMap (tbl :!: weak_tbl)
{-# NOINLINE copy' #-}
copy' :: (Eq k,Hashable k) => WeakMap k v -> IO (WeakMap k v)
copy' (WeakMap (src_tbl :!: _)) = do
tbl <- readMVar src_tbl >>= newMVar
weak_tbl <- Weak.mkWeakKey tbl tbl Nothing
return $ WeakMap (tbl :!: weak_tbl)
--{-# NOINLINE newFor #-}
---- | creates a new weak table that is uniquely identified by an argument value @a@
--newFor :: (Eq k,Hashable k) => a -> IO (WeakMap k v)
--newFor a = do
-- tbl <- CMap.empty
-- let (MkWeak mkWeak) = MkWeak (mkWeakKey tbl) `orMkWeak` MkWeak (Weak.mkWeak a)
-- weak_tbl <- mkWeak tbl $ Just $ table_finalizer tbl
-- return $ WeakMap (tbl :!: weak_tbl)
--
--newForMkWeak :: (Eq k,Hashable k) => MkWeak -> IO (WeakMap k v)
--newForMkWeak (MkWeak mkWeak) = do
-- tbl <- newIORef Map.empty
-- weak_tbl <- mkWeak tbl $ Just $ table_finalizer tbl
-- return $ WeakMap (tbl :!: weak_tbl)
finalize :: (Eq k,Hashable k) => WeakMap k v -> IO ()
finalize w_tbl@(WeakMap (_ :!: weak_tbl)) = do
mb <- Weak.deRefWeak weak_tbl
case mb of
Nothing -> return ()
Just weak_tbl' -> table_finalizer weak_tbl'
table_finalizer :: (Eq k,Hashable k) => WeakMap' k v -> IO ()
table_finalizer tbl = do
pairs <- readMVar tbl
Foldable.mapM_ Weak.finalize pairs
finalizeEntry :: Ord k => WeakMap k v -> k -> IO ()
finalizeEntry (WeakMap (_ :!: weak_tbl)) k = do
mb <- Weak.deRefWeak weak_tbl
case mb of
Nothing -> return ()
Just weak_tbl' -> do
tbl <- readMVar weak_tbl'
case Map.lookup k tbl of
Nothing -> return ()
Just w -> Weak.finalize w
--{-# INLINE insert #-}
--insert :: (Eq k,Hashable k) => WeakMap k v -> k -> v -> IO ()
--insert tbl k v = insertWith tbl k k v
--
---- | the key @k@ stores the entry for the value @a@ in the table
--insertWith :: (Eq k,Hashable k) => WeakMap k v -> a -> k -> v -> IO ()
--insertWith w_tbl@(WeakMap (tbl :!: weak_tbl)) a k v = do
-- weak <- Weak.mkWeak a v $ Just $ finalizeEntry' weak_tbl k
-- CMap.insert k weak tbl
--
insertWithMkWeak :: (Ord k,Hashable k) => WeakMap k v -> MkWeak -> k -> v -> IO ()
insertWithMkWeak w_tbl@(WeakMap (tbl :!: _)) (MkWeak mkWeak) k v = do
weak <- mkWeak v $ Just $ deleteFinalized w_tbl k
finalizeEntry w_tbl k
modifyMVarMasked_ tbl (return . Map.insert k weak)
{-# INLINE insertWeak #-}
insertWeak :: (Ord k,Hashable k,MonadIO m) => WeakMap k v -> k -> Weak v -> m ()
insertWeak (WeakMap (tbl :!: _)) k weak = liftIO $ modifyMVarMasked_ tbl (return . Map.insert k weak)
-- non-overlapping union
extendWeak :: (Ord k,Hashable k) => WeakMap k v -> k -> Weak v -> IO ()
extendWeak = mergeWeak (\_ _ -> return False)
-- non-overlapping union
mergeWeak :: (Ord k,Hashable k) => (v -> v -> IO Bool) -> WeakMap k v -> k -> Weak v -> IO ()
mergeWeak doOverwrite (WeakMap (tbl :!: _)) k weak = modifyMVarMasked_ tbl $ \m -> do
case Map.lookup k m of
Nothing -> do
return $ Map.insert k weak m
Just w -> do
mb <- liftIO $ Weak.deRefWeak w
case mb of
Nothing -> return $ Map.insert k weak m
Just oldv -> do
mb <- liftIO $ Weak.deRefWeak weak
case mb of
Nothing -> return m
Just newv -> do
b <- doOverwrite oldv newv
if b
then return $ Map.insert k weak m
else return m
-- only deletes the entry if it is already dead
deleteFinalized :: (Ord k,Hashable k) => WeakMap k v -> k -> IO ()
deleteFinalized (WeakMap (_ :!: weak_tbl)) = finalizeEntry' weak_tbl where
finalizeEntry' weak_tbl k = do
mb <- Weak.deRefWeak weak_tbl
case mb of
Nothing -> return ()
Just r -> modifyMVarMasked_ r $ \m -> do
case Map.lookup k m of
Nothing -> return m
Just w -> do
mb <- Weak.deRefWeak w
case mb of
Nothing -> return $ Map.delete k m
Just x -> return m
lookup :: (Ord k,Hashable k,MonadIO m) => WeakMap k v -> k -> m (Maybe v)
lookup (WeakMap (tbl :!: weak_tbl)) k = liftIO $ do
xs <- readMVar tbl
let mb = Map.lookup k xs
case mb of
Nothing -> return Nothing
Just w -> Weak.deRefWeak w
-- right-biased
-- the second @WeakMap@ is not accessed concurrently
unionWithKey :: (Ord k,Hashable k,MonadIO m) => (v -> MkWeak) -> WeakMap k v -> WeakMap k v -> m ()
unionWithKey getKey wmap m@(WeakMap (tbl :!: _)) = do
xs <- liftM Map.toList $ liftIO $ readMVar tbl
let addFinalizers (k,w) = do
mb <- Weak.deRefWeak w
case mb of
Nothing -> return ()
Just x -> do
let MkWeak mkWeak = getKey x
mkWeak () (Just $ deleteFinalized wmap k)
insertWeak wmap k w
liftIO $ Foldable.mapM_ addFinalizers xs
-- right-biased
-- the second @WeakMap@ is not accessed concurrently
-- without adding finalizers
unionWithKey' :: (Ord k,Hashable k,MonadIO m) => WeakMap k v -> WeakMap k v -> m ()
unionWithKey' wmap m@(WeakMap (tbl :!: _)) = do
xs <- liftM Map.toList $ liftIO $ readMVar tbl
let addFinalizers (k,w) = do
mb <- Weak.deRefWeak w
case mb of
Nothing -> return ()
Just x -> insertWeak wmap k w
liftIO $ Foldable.mapM_ addFinalizers xs
extendWithKey :: (Ord k,Hashable k) => (v -> MkWeak) -> WeakMap k v -> WeakMap k v -> IO ()
extendWithKey = mergeWithKey (\_ _ -> return False)
mergeWithKey :: (Ord k,Hashable k) => (v -> v -> IO Bool) -> (v -> MkWeak) -> WeakMap k v -> WeakMap k v -> IO ()
mergeWithKey merge getKey wmap m@(WeakMap (tbl :!: _)) = do
xs <- liftM Map.toList $ liftIO $ readMVar tbl
let addFinalizers (k,w) = do
mb <- liftIO $ Weak.deRefWeak w
case mb of
Nothing -> return ()
Just x -> do
let MkWeak mkWeak = getKey x
liftIO $ mkWeak () (Just $ deleteFinalized wmap k)
mergeWeak merge wmap k w
Foldable.mapM_ addFinalizers xs
purge :: (Ord k,Hashable k) => WeakMap k v -> IO ()
purge (WeakMap (_ :!: w_map)) = purgeWeak w_map where
purgeWeak :: (Ord k,Hashable k) => Weak (WeakMap' k v) -> IO ()
purgeWeak w_map = do
mb <- Weak.deRefWeak w_map
case mb of
Nothing -> return ()
Just wm -> modifyMVarMasked_ wm (\m -> Foldable.foldlM purgeMap Map.empty (Map.toList m))
purgeMap :: (Ord k,Hashable k) => Map k (Weak v) -> (k,Weak v) -> IO (Map k (Weak v))
purgeMap m (k,w) = do
mb <- Weak.deRefWeak w
case mb of
Nothing -> return m
Just v -> return $ Map.insert k w m
{-# INLINE mapM'' #-}
mapM'' :: Monad m => (forall x . IO x -> m x) -> (Weak v -> m a) -> WeakMap k v -> m [a]
mapM'' liftIO f (WeakMap (tbl :!: _)) = liftIO (readMVar tbl) >>= Control.Monad.mapM f . Map.elems
mapM_' :: Monad m => (forall x . IO x -> m x) -> ((k,v) -> m a) -> WeakMap k v -> m ()
mapM_' liftIO f (WeakMap (tbl :!: _)) = liftIO (readMVar tbl) >>= Control.Monad.mapM_ g . Map.toAscList where
g (k,w) = do
mb <- liftIO $ Weak.deRefWeak w
case mb of
Nothing -> return ()
Just v -> f (k,v) >> return ()
mapM_ :: MonadIO m => ((k,v) -> m a) -> WeakMap k v -> m ()
mapM_ = mapM_' liftIO
foldrM :: MonadIO m => ((k,v) -> b -> m b) -> b -> WeakMap k v -> m b
foldrM f z (WeakMap (tbl :!: _)) = do
xs <- liftIO $ readMVar tbl
let dof k w m = do
mb <- liftIO $ Weak.deRefWeak w
case mb of
Nothing -> m
Just v -> m >>= f (k,v)
Map.foldrWithKey dof (return z) xs
| cornell-pl/HsAdapton | src/System/Mem/Concurrent/WeakMap.hs | bsd-3-clause | 9,263 | 31 | 27 | 1,984 | 3,674 | 1,832 | 1,842 | 201 | 5 |
module Sync.Common where
import Control.Monad.State
import qualified Data.Map as M
import System.INotify
import System.Log.Logger as E
type SyncState = StateT Sync IO ()
type FileStructureFactory = FilePath -> StateT Sync IO (FileStructure)
data Sync = Sync {
getMap :: M.Map FilePath FileStructure,
getFileStructureFactory :: FileStructureFactory,
getBasePath :: FilePath
}
data FileStructure = FileStructure {
getName :: FilePath,
getChildren :: [FilePath],
getWatchDescriptor :: WatchDescriptor
}
instance Show FileStructure where
show a = show $ "[Node: " ++ getName a ++
" children: " ++ (show $ getChildren a) ++ "]"
data FileEvent = FileEvent {
getPath :: FilePath,
getEvent :: Event
} deriving (Show)
logName :: String
logName = "Sync"
l :: String -> IO ()
l msg = debugM logName msg
| kevinm416/sync | Sync/Common.hs | bsd-3-clause | 861 | 0 | 10 | 190 | 246 | 141 | 105 | 26 | 1 |
{-# LANGUAGE CPP, BangPatterns, ViewPatterns, FlexibleInstances, TypeOperators, FlexibleContexts, TypeSynonymInstances #-}
{-# LANGUAGE MultiParamTypeClasses, PatternGuards #-}
#if __GLASGOW_HASKELL__ >= 700
{-# OPTIONS -fllvm #-}
#endif
module Data.TrieMap.RadixTrie.Search (insertEdge) where
import Control.Monad.Unpack
import Control.Monad.Option
import Data.TrieMap.RadixTrie.Base
import Data.TrieMap.RadixTrie.Zipper ()
import Data.Vector.Generic (length)
import Prelude hiding (lookup, length)
#define V(f) f (VVector) (k)
#define U(f) f (PVector) (Word)
#define EDGE(args) (!(eView -> Edge args))
instance TrieKey k => Searchable (TrieMap (VVector k)) (VVector k) where
{-# INLINE search #-}
search ks (Radix m) nomatch0 match0 = case m of
Nothing -> nomatch $~ singleLoc ks
Just e -> searchEdgeC ks e nomatch match
where nomatch = unpack (nomatch0 . Hole)
match a = unpack (match0 a . Hole)
singleZip ks = Hole (singleLoc ks)
singleton ks a = Radix (Just (singletonEdge ks a))
lookup ks (Radix m) = maybeToOption m >>= lookupEdge ks
insertWith f ks a (Radix (Just e)) = Radix (Just (insertEdge f ks a e))
insertWith _ ks a (Radix Nothing) = singleton ks a
instance Searchable (TrieMap (PVector Word)) (PVector Word) where
{-# INLINE search #-}
search ks (WRadix m) nomatch0 match0 = case m of
Nothing -> nomatch $~ singleLoc ks
Just e -> searchEdgeC ks e nomatch match
where nomatch = unpack (nomatch0 . WHole)
match a = unpack (match0 a . WHole)
singleZip ks = WHole (singleLoc ks)
singleton ks a = WRadix (Just (singletonEdge ks a))
lookup ks (WRadix m) = maybeToOption m >>= lookupEdge ks
insertWith f ks a (WRadix (Just e)) = WRadix (Just (insertEdge f ks a e))
insertWith _ ks a (WRadix Nothing) = singleton ks a
{-# SPECIALIZE lookupEdge ::
TrieKey k => V() -> V(Edge) a -> Option a,
U() -> U(Edge) a -> Option a #-}
lookupEdge :: (Eq k, Label v k) => v k -> Edge v k a -> Option a
lookupEdge ks e = option $ \ no yes -> let
lookupE !ks !EDGE(_ ls !v ts) = if kLen < lLen then no else matchSlice matcher matches ks ls where
!kLen = length ks
!lLen = length ls
matcher k l z
| k == l = z
| otherwise = no
matches _ _
| kLen == lLen = maybe no yes v
| (_, k, ks') <- splitSlice lLen ks
= runOption (lookup k ts) no (lookupE ks')
in lookupE ks e
{-# SPECIALIZE INLINE searchEdgeC ::
TrieKey k => V() -> V(Edge) a -> (V(EdgeLoc) a :~> r) -> (a -> V(EdgeLoc) a :~> r) -> r,
U() -> U(Edge) a -> (U(EdgeLoc) a :~> r) -> (a -> U(EdgeLoc) a :~> r) -> r #-}
searchEdgeC :: (Eq k, Label v k, Unpackable (EdgeLoc v k a)) =>
v k -> Edge v k a -> (EdgeLoc v k a :~> r) -> (a -> EdgeLoc v k a :~> r) -> r
searchEdgeC ks0 e nomatch match = searchE ks0 e root where
searchE !ks e@EDGE(_ !ls !v ts) path = iMatchSlice matcher matches ks ls where
matcher i k l z =
runOption (unifierM k l (dropEdge (i+1) e)) z
(\ tHole -> nomatch $~ loc (dropSlice (i+1) ks) empty (deep path (takeSlice i ls) Nothing tHole))
matches kLen lLen = case compare kLen lLen of
LT -> let lPre = takeSlice kLen ls; l = ls !$ kLen; e' = dropEdge (kLen + 1) e in
nomatch $~ loc lPre (singleton l e') path
EQ -> maybe nomatch match v $~ loc ls ts path
GT -> let
{-# INLINE kk #-}
kk = ks !$ lLen
ks' = dropSlice (lLen + 1) ks
nomatch' tHole = nomatch $~ loc ks' empty (deep path ls v tHole)
match' e' tHole = searchE ks' e' (deep path ls v tHole)
in search kk ts nomatch' match'
{-# SPECIALIZE insertEdge ::
(TrieKey k, Sized a) => (a -> a) -> V() -> a -> V(Edge) a -> V(Edge) a,
Sized a => (a -> a) -> U() -> a -> U(Edge) a -> U(Edge) a #-}
insertEdge :: (Label v k, Sized a) => (a -> a) -> v k -> a -> Edge v k a -> Edge v k a
insertEdge f ks0 a e = insertE ks0 e where
!sza = getSize a
insertE !ks eL@EDGE(szL ls !v ts) = iMatchSlice matcher matches ks ls where
!szV = szL - sizeM ts
matcher !i k l z = runOption (unifyM k eK' l eL') z (edge (takeSlice i ls) Nothing)
where eK' = edge' sza (dropSlice (i+1) ks) (Just a) empty
eL' = dropEdge (i+1) eL
matches kLen lLen = case compare kLen lLen of
LT -> (edge' (sza + szL) ks (Just a) (singleton l eL'))
where l = ls !$ kLen; eL' = dropEdge (kLen+1) eL
EQ -> (edge ls (Just (maybe a f v)) ts)
GT -> edge' sz' ls v ts' where
ks' = dropSlice (lLen + 1) ks
k = ks !$ lLen
ts' = insertWith (insertE ks') k (edge' sza ks' (Just a) empty) ts
sz' = sizeM ts' + szV
| lowasser/TrieMap | Data/TrieMap/RadixTrie/Search.hs | bsd-3-clause | 4,571 | 44 | 17 | 1,136 | 1,724 | 871 | 853 | -1 | -1 |
module Persistent.CRUDSpec where
import Database.Persist
import Database.Persist.Sql
import Persistent.CRUD
import Test.Hspec
spec :: Spec
spec =
describe "createUser" $
it "returns the created user id" $ do
let email = "[email protected]"
firstName = "Foo"
lastName = "Bar"
numberOfUsers <- withDB $ do
createUser email firstName lastName
countUsers
numberOfUsers `shouldBe` (1 :: Int)
withTestDB :: SqlPersistM a -> IO a
withTestDB q =
withDB $ do
runMigration migrateAll
transactionSave
r <- q
transactionUndo
return r
| stackbuilders/persistent-crud | test/Persistent/CRUDSpec.hs | bsd-3-clause | 648 | 0 | 12 | 206 | 161 | 80 | 81 | 24 | 1 |
-- | パターンマッチのコンパイル
module Malgo.Desugar.Match (match, PatMatrix, patMatrix) where
import Control.Lens (At (at), Prism', has, over, (?=), _1)
import qualified Data.List as List
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.Map.Strict as Map
import Data.Traversable (for)
import Koriel.Core.Syntax
import qualified Koriel.Core.Syntax as Core
import Koriel.Core.Type
import qualified Koriel.Core.Type as Core
import Koriel.Id
import Koriel.MonadUniq
import Koriel.Pretty
import Malgo.Desugar.DsEnv
import Malgo.Desugar.Type (dsType, unfoldType)
import Malgo.Desugar.Unboxed (dsUnboxed)
import Malgo.Prelude hiding (group)
import Malgo.Syntax
import Malgo.Syntax.Extension
import Malgo.TypeRep
import qualified Malgo.TypeRep as Malgo
-- TODO: The Implementation of Functional Programming Languages
-- を元にコメントを追加
-- 各節のパターン列を行列に見立て、転置してmatchにわたし、パターンを分解する
-- 例えば、{ f Nil -> f empty | f (Cons x xs) -> f x }の場合は、
-- [ [f, Nil], [f, Cons x xs] ] に見立て、
-- [ [f, f], [Nil, Cons x xs] ] に転置する
newtype PatMatrix = PatMatrix
{ -- | パターンの転置行列
innerList :: [[Pat (Malgo 'Refine)]]
}
deriving stock (Show)
deriving newtype (Pretty)
patMatrix :: [[Pat (Malgo 'Refine)]] -> PatMatrix
patMatrix xss = PatMatrix $ transpose xss
headCol :: PatMatrix -> Maybe [Pat (Malgo 'Refine)]
headCol PatMatrix {innerList = []} = Nothing
headCol PatMatrix {innerList = x : _} = Just x
tailCol :: PatMatrix -> PatMatrix
tailCol PatMatrix {innerList = []} = PatMatrix []
tailCol PatMatrix {innerList = _ : xs} = PatMatrix xs
-- consCol :: [Pat (Malgo 'Refine)] -> PatMatrix -> PatMatrix
-- consCol ps PatMatrix {..} = PatMatrix (ps : innerList)
splitCol :: PatMatrix -> (Maybe [Pat (Malgo 'Refine)], PatMatrix)
splitCol mat = (headCol mat, tailCol mat)
-- パターンマッチを分解し、switch-case相当の分岐で表現できるように変換する
match ::
(MonadState DsEnv m, MonadIO m, MonadReader env m, MonadFail m, HasUniqSupply env) =>
-- | マッチ対象
[Id Core.Type] ->
-- | パターン(転置行列)
PatMatrix ->
-- | righthand
[m (Core.Exp (Id Core.Type))] ->
-- | fail
Core.Exp (Id Core.Type) ->
m (Core.Exp (Id Core.Type))
match (scrutinee : restScrutinee) pat@(splitCol -> (Just heads, tails)) es err
-- Variable Rule
-- パターンの先頭がすべて変数のとき
| all (has _VarP) heads = do
-- 変数パターンvについて、式中に現れるすべてのvをscrutineeで置き換える
match
restScrutinee
tails
( zipWith
( \case
(VarP _ v) -> \e -> nameEnv . at v ?= scrutinee >> e
_ -> error "All elements of heads must be VarP"
)
heads
es
)
err
-- Constructor Rule
-- パターンの先頭がすべて値コンストラクタのとき
| all (has _ConP) heads = do
let patType = Malgo.typeOf $ List.head heads
-- unless (Malgo._TyApp `has` patType || Malgo._TyCon `has` patType) $
-- errorDoc $ "Not valid type:" <+> pPrint patType
-- 型からコンストラクタの集合を求める
let (con, ts) = case Malgo.viewTyConApp patType of
Just (Malgo.TyCon con, ts) -> (con, ts)
_ -> error "patType must be TyApp or TyCon"
valueConstructors <- lookupValueConstructors con ts
-- 各コンストラクタごとにC.Caseを生成する
cases <- for valueConstructors \(conName, Forall _ conType) -> do
paramTypes <- traverse dsType $ fst $ splitTyArr conType
let coreCon = Core.Con (Data $ idToText conName) paramTypes
params <- traverse (newInternalId "$p") paramTypes
let (pat', es') = group conName pat es
Unpack coreCon params <$> match (params <> restScrutinee) pat' es' err
unfoldedType <- unfoldType patType
pure $ Match (Cast unfoldedType $ Core.Var scrutinee) $ NonEmpty.fromList cases
-- パターンの先頭がすべてレコードのとき
| all (has _RecordP) heads = do
let patType = Malgo.typeOf $ List.head heads
SumT [con@(Core.Con Core.Tuple ts)] <- dsType patType
params <- traverse (newInternalId "$p") ts
cases <- do
(pat', es') <- groupRecord pat es
one . Unpack con params <$> match (params <> restScrutinee) pat' es' err
pure $ Match (Atom $ Core.Var scrutinee) cases
-- パターンの先頭がすべてタプルのとき
| all (has _TupleP) heads = do
let patType = Malgo.typeOf $ List.head heads
SumT [con@(Core.Con Core.Tuple ts)] <- dsType patType
params <- traverse (newInternalId "$p") ts
cases <- do
let (pat', es') = groupTuple pat es
one . Unpack con params <$> match (params <> restScrutinee) pat' es' err
pure $ Match (Atom $ Core.Var scrutinee) cases
-- パターンの先頭がすべてunboxedな値のとき
| all (has _UnboxedP) heads = do
let cs =
map
( \case
UnboxedP _ x -> dsUnboxed x
_ -> error "All elements of heads must be UnboxedP"
)
heads
cases <- traverse (\c -> Switch c <$> match restScrutinee tails es err) cs
-- パターンの網羅性を保証するため、
-- `_ -> err` を追加する
hole <- newInternalId "$_" (Core.typeOf scrutinee)
pure $ Match (Atom $ Core.Var scrutinee) $ NonEmpty.fromList (cases <> [Core.Bind hole err])
-- The Mixture Rule
-- 複数種類のパターンが混ざっているとき
| otherwise =
do
let ((pat', pat''), (es', es'')) = partition pat es
err' <- match (scrutinee : restScrutinee) pat'' es'' err
match (scrutinee : restScrutinee) pat' es' err'
match [] (PatMatrix []) (e : _) _ = e
match _ (PatMatrix []) [] err = pure err
match scrutinees pat es err = do
errorDoc $ "match" <+> pPrint scrutinees <+> pPrint pat <+> pPrint (length es) <+> pPrint err
-- Mixture Rule以外にマッチするようにパターン列を分解
-- [ [Cons A xs]
-- , [Cons x xs]
-- , [Nil] ]
-- ->
-- ( [ [Cons A xs]
-- , [Cons x xs] ]
-- , [ [Nil] ])
partition ::
PatMatrix ->
[m (Core.Exp (Id Core.Type))] ->
( (PatMatrix, PatMatrix),
([m (Core.Exp (Id Core.Type))], [m (Core.Exp (Id Core.Type))])
)
partition (splitCol -> (Just heads@(VarP {} : _), PatMatrix tails)) es = partitionOn _VarP heads tails es
partition (splitCol -> (Just heads@(ConP {} : _), PatMatrix tails)) es = partitionOn _ConP heads tails es
partition (splitCol -> (Just heads@(TupleP {} : _), PatMatrix tails)) es = partitionOn _TupleP heads tails es
partition (splitCol -> (Just heads@(RecordP {} : _), PatMatrix tails)) es = partitionOn _RecordP heads tails es
partition (splitCol -> (Just heads@(UnboxedP {} : _), PatMatrix tails)) es = partitionOn _UnboxedP heads tails es
partition _ _ = error "All patterns are covered"
partitionOn ::
Prism' (Pat (Malgo 'Refine)) b ->
[Pat (Malgo 'Refine)] ->
[[Pat (Malgo 'Refine)]] ->
[a] ->
((PatMatrix, PatMatrix), ([a], [a]))
partitionOn prism heads tails es =
( (PatMatrix $ onHeads : onTails, PatMatrix $ otherHeads : otherTails),
List.splitAt (length onHeads) es
)
where
-- onHeads : onTails => pattern that row starts with prism
-- otherHeads : otherTails => pattern row that starts without prism
(onHeads, otherHeads) = List.span (has prism) heads
(onTails, otherTails) = unzip $ map (List.splitAt (length onHeads)) tails
-- コンストラクタgconの引数部のパターンpsを展開したパターン行列を生成する
group ::
XId (Malgo 'Refine) ->
PatMatrix ->
[m (Core.Exp (Id Core.Type))] ->
(PatMatrix, [m (Core.Exp (Id Core.Type))])
group gcon (PatMatrix (transpose -> pss)) es = over _1 patMatrix $ unzip $ mapMaybe (aux gcon) (zip pss es)
where
aux gcon (ConP _ gcon' ps : pss, e)
| gcon == gcon' = Just (ps <> pss, e)
| otherwise = Nothing
aux _ (p : _, _) = errorDoc $ "Invalid pattern:" <+> pPrint p
aux _ ([], _) = error "ps must be not empty"
groupTuple :: PatMatrix -> [m (Core.Exp (Id Core.Type))] -> (PatMatrix, [m (Core.Exp (Id Core.Type))])
groupTuple (PatMatrix (transpose -> pss)) es = over _1 patMatrix $ unzip $ zipWith aux pss es
where
aux (TupleP _ ps : pss) e = (ps <> pss, e)
aux (p : _) _ = errorDoc $ "Invalid pattern:" <+> pPrint p
aux [] _ = error "ps must be not empty"
groupRecord :: (MonadReader env m, MonadIO m, HasUniqSupply env) => PatMatrix -> [m (Core.Exp (Id Core.Type))] -> m (PatMatrix, [m (Core.Exp (Id Core.Type))])
groupRecord (PatMatrix pss) es = over _1 patMatrix . unzip <$> zipWithM aux pss es
where
aux (RecordP x ps : pss) e = do
ps' <- extendRecordP x $ map (first removePrefix) ps
pure (ps' <> pss, e)
aux (p : _) _ = errorDoc $ "Invalid pattern:" <+> pPrint p
aux [] _ = error "ps must be not empty"
extendRecordP (Annotated (Malgo.TyRecord ktsMap) pos) ps = do
let kts = Map.toList ktsMap
for kts \(key, ty) ->
case List.lookup key ps of
Nothing -> VarP (Annotated ty pos) <$> newInternalId "$_p" ()
Just p -> pure p
extendRecordP _ _ = error "typeOf x must be TyRecord"
| takoeight0821/malgo | src/Malgo/Desugar/Match.hs | bsd-3-clause | 9,274 | 0 | 19 | 1,923 | 3,130 | 1,608 | 1,522 | -1 | -1 |
module Main where
import Control.Applicative
import Control.Arrow
import Control.Monad
import Data.Attoparsec.Char8 hiding (take)
import qualified Data.Attoparsec.Char8 as AC (take)
import Data.Char (ord)
import Data.List (foldl')
import Data.ByteString.Char8 (pack, unpack)
import Data.ByteString (ByteString)
int :: Parser Integer
int = toInteger . c2d <$> digit
where
c2d x = ord x - ord '0'
int2 :: Parser Integer
int2 = liftM2 ((*10)>>>(+)) int int
type Digit = Int
integer :: Digit -> Parser Integer
integer d = foldl' (liftM2 ((*10)>>>(+))) zero $ take d $ repeat int
where
zero = return 0
integer' :: Digit -> Parser Integer
integer' d = read . unpack <$> AC.take d
test :: Int -> [Result Integer]
test n = map (\(l,bs) -> parse (integer l) bs) (genData n)
test' :: Int -> [Result Integer]
test' n = map (\(l,bs) -> parse (integer' l) bs) (genData n)
genData :: Int -> [(Int, ByteString)]
genData n = take n $ map (length &&& pack) $ splits primes stream
where
stream = cycle ['0'..'9']
primes = cycle [2,3,5,7,11,13,17,19,23,29,31]
splits :: [Int] -> String -> [String]
splits [] _ = []
splits (n:ns) s = h:splits ns t
where
(h, t) = splitAt n s
main :: IO ()
main = print $ test 100000
| cutsea110/tsr-test | Devel.hs | bsd-3-clause | 1,238 | 0 | 12 | 248 | 596 | 326 | 270 | 35 | 1 |
module Scheme.DataType.Error.Try where
import DeepControl.Monad.Except
-- for Chaitin's Omega function
data TryError = OUTOFDATA
| OUTOFTIME
| PARSEErr String
| OTHER String
instance Error TryError where
strMsg s = OTHER s
instance Show TryError where
show OUTOFDATA = "out-of-data"
show OUTOFTIME = "out-of-time"
show (PARSEErr s) = "failed to parse: " ++ s
show (OTHER s) = show s
| ocean0yohsuke/Scheme | src/Scheme/DataType/Error/Try.hs | bsd-3-clause | 460 | 0 | 8 | 136 | 113 | 61 | 52 | 13 | 0 |
--
-- Module : Granulepos
-- Copyright : (c) Conrad Parker 2006
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
module Codec.Container.Ogg.Granulepos (
Granulepos (..),
gpPack,
gpUnpack
) where
import Data.Word (Word64)
------------------------------------------------------------
-- Types
--
newtype Granulepos = Granulepos (Maybe Word64)
deriving Eq
------------------------------------------------------------
-- Granulepos functions
--
gpPack :: Word64 -> Granulepos
gpPack 0xffffffffffffffff = Granulepos Nothing
gpPack gp = Granulepos (Just gp)
gpUnpack :: Granulepos -> Word64
gpUnpack (Granulepos (Nothing)) = -1
gpUnpack (Granulepos (Just gp)) = gp
instance Show Granulepos where
show (Granulepos (Nothing)) = "-1"
show (Granulepos (Just gp)) = show gp
| kfish/hogg | Codec/Container/Ogg/Granulepos.hs | bsd-3-clause | 865 | 0 | 10 | 139 | 196 | 112 | 84 | 16 | 1 |
module Graphics.UI.Gtk.WebKit.WebSettings where
{-
-- * Desciption
-- | WebKitWebSettings can be applied to a WebKitWebView to control the to be used text encoding, color,
-- font sizes, printing mode, script support, loading of images and various other things.
-- * Types
WebSettings,
WebSettingsClass,
EditingBehavior,
-- * Constructors
webSettingsNew,
-- * Methods
webSettingsCopy,
webSettingsGetUserAgent,
-- * Attributes
-- ** Family
webSettingsCursiveFontFamily,
webSettingsDefaultFontFamily,
webSettingsFantasyFontFamily,
webSettingsMonospaceFontFamily,
webSettingsSansFontFamily,
webSettingsSerifFontFamily,
-- ** FontSize
webSettingsDefaultFontSize,
webSettingsDefaultMonospaceFontSize,
webSettingsMinimumFontSize,
webSettingsMinimumLogicalFontSize,
-- ** Image
webSettingsAutoLoadImages,
webSettingsAutoShrinkImages,
-- ** Encoding
webSettingsDefaultEncoding,
-- ** Other
webSettingsEditingBehavior,
webSettingsEnableCaretBrowsing,
webSettingsEnableDeveloperExtras,
webSettingsEnableHtml5Database,
webSettingsEnableHtml5LocalStorage,
webSettingsEnableOfflineWebApplicationCache,
webSettingsEnablePlugins,
webSettingsEnablePrivateBrowsing,
webSettingsEnableScripts,
webSettingsEnableSpellChecking,
webSettingsEnableUniversalAccessFromFileUris,
webSettingsEnableXssAuditor,
webSettingsEnableSiteSpecificQuirks,
#if WEBKIT_CHECK_VERSION (1,1,16)
webSettingsEnableDomPaste,
#endif
#if WEBKIT_CHECK_VERSION (1,1,18)
webSettingsEnableDefaultContextMenu,
webSettingsEnablePageCache,
#endif
#if WEBKIT_CHECK_VERSION (1,1,23)
webSettingsEnableSpatialNavigation,
#endif
webSettingsEnforce96Dpi,
webSettingsJSCanOpenWindowAuto,
webSettingsPrintBackgrounds,
webSettingsResizableTextAreas,
webSettingsSpellCheckingLang,
#if WEBKIT_CHECK_VERSION (1,1,17)
webSettingsTabKeyCyclesThroughElements,
#endif
webSettingsUserAgent,
webSettingsUserStylesheetUri,
webSettingsZoomStep,
) where
import Control.Monad (liftM)
import System.Glib.FFI
import System.Glib.UTFString
import System.Glib.GList
import System.Glib.GError
import System.Glib.Properties
import System.Glib.Attributes
import Graphics.UI.Gtk.Gdk.Events
{#import Graphics.UI.Gtk.Abstract.Object#} (makeNewObject)
{#import Graphics.UI.Gtk.WebKit.Types#}
{#import System.Glib.GObject#}
{#context lib="webkit" prefix ="webkit"#}
{#enum EditingBehavior {underscoreToCase}#}
------------------
-- Constructors
-- | Create a new 'WebSettings' instance.
--
-- A 'WebSettings' can be applied to a 'WebView'
-- to control the to be used text encoding, color, font size,
-- printing mode,script support, loading of images and various other things.
webSettingsNew :: IO WebSettings
webSettingsNew =
wrapNewGObject mkWebSettings $ {#call web_settings_new#}
-- | Copy an existing 'WebSettings' instance.
webSettingsCopy ::
WebSettingsClass self => self
-> IO WebSettings
webSettingsCopy websettings =
constructNewGObject mkWebSettings $ {#call web_settings_copy#} (toWebSettings websettings)
-- | Return the User-Agent string currently used.
webSettingsGetUserAgent ::
(WebSettingsClass self, GlibString string) => self
-> IO (Maybe string) -- ^ User-Agent string or @Nothing@ in case failed.
webSettingsGetUserAgent websettings =
{#call web_settings_get_user_agent#} (toWebSettings websettings) >>= maybePeek peekUTFString
-- | Load images automatically
--
-- Default value: True
webSettingsAutoLoadImages :: (WebSettingsClass self) => Attr self Bool
webSettingsAutoLoadImages = newAttrFromBoolProperty "auto-load-images"
-- | Automatically shrink standalone images to fit
--
-- Default value: True
webSettingsAutoShrinkImages :: (WebSettingsClass self) => Attr self Bool
webSettingsAutoShrinkImages = newAttrFromBoolProperty "auto-shrink-images"
-- | The default Cursive font family used to display text
--
-- Default value "serif"
webSettingsCursiveFontFamily :: (WebSettingsClass self, GlibString string) => Attr self string
webSettingsCursiveFontFamily = newAttrFromStringProperty "cursive-font-family"
-- | The default encoding used to display text
--
-- Default value "iso-8859-1"
webSettingsDefaultEncoding :: (WebSettingsClass self, GlibString string) => Attr self string
webSettingsDefaultEncoding = newAttrFromStringProperty "default-encoding"
-- | The default font family used to display text
--
-- Default value: "sans-serif"
webSettingsDefaultFontFamily :: (WebSettingsClass self, GlibString string) => Attr self string
webSettingsDefaultFontFamily = newAttrFromStringProperty "default-font-family"
-- | The default font size used to display text
--
-- Default value: >=5
webSettingsDefaultFontSize :: (WebSettingsClass self) => Attr self Int
webSettingsDefaultFontSize = newAttrFromIntProperty "default-font-size"
-- | The default font size used to display monospace text
--
-- Allowed values: >= 5
--
-- Default value: 10
webSettingsDefaultMonospaceFontSize :: (WebSettingsClass self) => Attr self Int
webSettingsDefaultMonospaceFontSize = newAttrFromIntProperty "default-monospace-font-size"
-- | This settings controls various editing behaviors
webSettingsEditingBehavior :: (WebSettingsClass self) => Attr self EditingBehavior
webSettingsEditingBehavior = newAttrFromEnumProperty "editing-behavior"
{#call pure unsafe webkit_editing_behavior_get_type#}
-- | Whether to enable caret browsing mode.
webSettingsEnableCaretBrowsing :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableCaretBrowsing = newAttrFromBoolProperty "enable-caret-browsing"
-- | Whether developer extensions should be enabled.
--
-- This enables, for now, the 'WebInspector'
webSettingsEnableDeveloperExtras :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableDeveloperExtras = newAttrFromBoolProperty "enable-developer-extras"
#if WEBKIT_CHECK_VERSION (1,1,16)
-- | Whether to enable DOM paste. If set to 'True', document.execCommand("Paste") will correctly execute
-- and paste content of the clipboard.
--
-- Default value: 'False'
--
-- * Since 1.1.16
webSettingsEnableDomPaste :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableDomPaste = newAttrFromBoolProperty "enable-dom-paste"
#endif
-- | Whether to enable HTML5 client-side SQL database support.
webSettingsEnableHtml5Database :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableHtml5Database = newAttrFromBoolProperty "enable-html5-database"
-- | Whether to enable HTML5 localStorage support.
webSettingsEnableHtml5LocalStorage :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableHtml5LocalStorage = newAttrFromBoolProperty "enable-html5-local-storage"
-- | Whether to enable HTML5 offline web application cache support.
webSettingsEnableOfflineWebApplicationCache :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableOfflineWebApplicationCache = newAttrFromBoolProperty "enable-offline-web-application-cache"
-- | Enable embedded plugin objects.
webSettingsEnablePlugins :: (WebSettingsClass self) => Attr self Bool
webSettingsEnablePlugins = newAttrFromBoolProperty "enable-plugins"
-- | Whether to enable private browsing mode.
webSettingsEnablePrivateBrowsing :: (WebSettingsClass self) => Attr self Bool
webSettingsEnablePrivateBrowsing = newAttrFromBoolProperty "enable-private-browsing"
-- | Enable embedded scripting languages
webSettingsEnableScripts :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableScripts = newAttrFromBoolProperty "enable-scripts"
-- | Whether to enable speel checking while typing.
webSettingsEnableSpellChecking :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableSpellChecking = newAttrFromBoolProperty "enable-spell-checking"
-- | Whether to allow files loaded through file:// URLs universal access to all pages.
webSettingsEnableUniversalAccessFromFileUris :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableUniversalAccessFromFileUris = newAttrFromBoolProperty "enable-universal-access-from-file-uris"
-- | Whether to enable the XSS Auditor.
--
-- This feature filters some kinds of reflective XSS attacks on vulnerable web sites.
webSettingsEnableXssAuditor :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableXssAuditor = newAttrFromBoolProperty "enable-xss-auditor"
-- | Enforce a resolution of 96 DPI.
webSettingsEnforce96Dpi :: (WebSettingsClass self) => Attr self Bool
webSettingsEnforce96Dpi = newAttrFromBoolProperty "enforce-96-dpi"
-- | The default Fantasy font family used to display text
webSettingsFantasyFontFamily :: (WebSettingsClass self, GlibString string) => Attr self string
webSettingsFantasyFontFamily = newAttrFromStringProperty "fantasy-font-family"
-- | Whether JavaScript can open popup windows automatically without user intervention.
webSettingsJSCanOpenWindowAuto :: (WebSettingsClass self) => Attr self Bool
webSettingsJSCanOpenWindowAuto = newAttrFromBoolProperty "javascript-can-open-windows-automatically"
-- | The minimum font size used to display text.
--
-- Allowed values: >=1
--
-- Default value: 5
webSettingsMinimumFontSize :: (WebSettingsClass self) => Attr self Int
webSettingsMinimumFontSize = newAttrFromIntProperty "minimum-font-size"
-- | The minimum logical font size used to display text
--
-- Allowed values: >=1
--
-- Default value: 5
webSettingsMinimumLogicalFontSize :: (WebSettingsClass self) => Attr self Int
webSettingsMinimumLogicalFontSize = newAttrFromIntProperty "minimum-logical-font-size"
-- | The default font family used to display monospace text.
--
-- Default value: "monospace"
webSettingsMonospaceFontFamily :: (WebSettingsClass self, GlibString string) => Attr self string
webSettingsMonospaceFontFamily = newAttrFromStringProperty "monospace-font-family"
-- | Whether background images should be printed
--
-- Default value: True
webSettingsPrintBackgrounds :: (WebSettingsClass self) => Attr self Bool
webSettingsPrintBackgrounds = newAttrFromBoolProperty "print-backgrounds"
-- | Whether text areas are resizable
--
-- Default value : True
webSettingsResizableTextAreas :: (WebSettingsClass self) => Attr self Bool
webSettingsResizableTextAreas = newAttrFromBoolProperty "resizable-text-areas"
-- | The default Sans Serif font family used to display text
--
-- Default value "sans-serif"
webSettingsSansFontFamily :: (WebSettingsClass self, GlibString string) => Attr self string
webSettingsSansFontFamily = newAttrFromStringProperty "sans-serif-font-family"
-- | The default Serif font family used to display text
--
-- Default value: "serif"
webSettingsSerifFontFamily :: (WebSettingsClass self, GlibString string) => Attr self string
webSettingsSerifFontFamily = newAttrFromStringProperty "serif-font-family"
-- | The languages to be used for spell checking, separated by commas
--
-- The locale string typically is in the form lang_COUNTRY,
-- where lang is an ISO-639 language code, and COUNTRY is an ISO-3166 country code.
-- For instance, sv_FI for Swedish as written in Finland or pt_BR for Portuguese as written in Brazil.
--
-- If no value is specified then the value returned by gtk_get_default_language will be used.
--
-- Default value: @Nothing@
webSettingsSpellCheckingLang :: (WebSettingsClass self, GlibString string) => Attr self (Maybe string)
webSettingsSpellCheckingLang = newAttrFromMaybeStringProperty "spell-checking-languages"
#if WEBKIT_CHECK_VERSION (1,1,17)
-- | Whether the tab key cycles through elements on the page.
--
-- If flag is 'True', pressing the tab key will focus the next element in the @webView@. If flag is 'False',
-- the @webView@ will interpret tab key presses as normal key presses. If the selected element is
-- editable, the tab key will cause the insertion of a tab character.
--
-- Default value: 'True'
--
-- * Since 1.1.17
webSettingsTabKeyCyclesThroughElements :: (WebSettingsClass self) => Attr self Bool
webSettingsTabKeyCyclesThroughElements = newAttrFromBoolProperty "tab-key-cycles-through-elements"
#endif
#if WEBKIT_CHECK_VERSION (1,1,18)
-- | Whether right-clicks should be handled automatically to create, and display the context
-- menu. Turning this off will make WebKitGTK+ not emit the populate-popup signal. Notice that the
-- default button press event handler may still handle right clicks for other reasons, such as in-page
-- context menus, or right-clicks that are handled by the page itself.
--
-- Default value: 'True'
--
-- * Since 1.1.18
webSettingsEnableDefaultContextMenu :: (WebSettingsClass self) => Attr self Bool
webSettingsEnableDefaultContextMenu = newAttrFromBoolProperty "enable-default-context-menu"
-- | Enable or disable the page cache. Disabling the page cache is generally only useful for special
-- circumstances like low-memory scenarios or special purpose applications like static HTML
-- viewers. This setting only controls the Page Cache, this cache is different than the disk-based or
-- memory-based traditional resource caches, its point is to make going back and forth between pages
-- much faster. For details about the different types of caches and their purposes see:
-- http://webkit.org/ blog/427/webkit-page-cache-i-the-basics/
--
-- Default value: 'False'
--
-- * Since 1.1.18
webSettingsEnablePageCache :: (WebSettingsClass self) => Attr self Bool
webSettingsEnablePageCache = newAttrFromBoolProperty "enable-page-cache"
#endif
-- | The User-Agent string used by WebKit
--
-- This will return a default User-Agent string if a custom string wasn't provided by the application.
-- Setting this property to a NULL value or an empty string will result in
-- the User-Agent string being reset to the default value.
--
-- Default value: \"Mozilla/5.0 (X11; U; Linux x86_64; c) AppleWebKit/531.2+ (KHTML, like Gecko) Safari/531.2+\"
webSettingsUserAgent :: (WebSettingsClass self, GlibString string) => Attr self string
webSettingsUserAgent = newAttrFromStringProperty "user-agent"
-- | The URI of a stylesheet that is applied to every page.
--
-- Default value: @Nothing@
webSettingsUserStylesheetUri :: (WebSettingsClass self, GlibString string) => Attr self (Maybe string)
webSettingsUserStylesheetUri = newAttrFromMaybeStringProperty "user-stylesheet-uri"
-- | The value by which the zoom level is changed when zooming in or out
--
-- Allowed values: >= 0
--
-- Default value: 0.1
webSettingsZoomStep :: (WebSettingsClass self) => Attr self Float
webSettingsZoomStep = newAttrFromFloatProperty "zoom-step"
-- | Enables the site-specific compatibility workarounds.
--
-- Default value: False
webSettingsEnableSiteSpecificQuirks :: WebSettingsClass self => Attr self Bool
webSettingsEnableSiteSpecificQuirks = newAttrFromBoolProperty "enable-site-specific-quirks"
#if WEBKIT_CHECK_VERSION (1,1,23)
-- | Whether to enable the Spatial Navigation. This feature consists in the ability to navigate between
-- focusable elements in a Web page, such as hyperlinks and form controls, by using Left, Right, Up and
-- Down arrow keys. For example, if an user presses the Right key, heuristics determine whether there
-- is an element he might be trying to reach towards the right, and if there are multiple elements,
-- which element he probably wants.
--
-- Default value: 'False'
--
-- * Since 1.1.23
webSettingsEnableSpatialNavigation :: WebSettingsClass self => Attr self Bool
webSettingsEnableSpatialNavigation = newAttrFromBoolProperty "enable-spatial-navigation"
#endif
-}
| mightybyte/reflex-dom-stubs | src/Graphics/UI/Gtk/WebKit/WebSettings.hs | bsd-3-clause | 15,359 | 0 | 3 | 1,909 | 10 | 8 | 2 | 1 | 0 |
import Test.HUnit
import Text.Parsec.Error (ParseError, errorMessages, messageString)
import qualified SvgParser as SVG
import SvgParser (SVG)
import Attributes (Attribute(..))
parseFile :: String -> IO (Either ParseError SVG)
parseFile filename = do
file <- readFile filename
return $ SVG.parse file
cmpToFile :: String -> SVG -> Test
cmpToFile filename svg = TestCase $ do
res <- parseFile filename
case res of
Left err -> assertFailure $ show err
Right svgFromFile -> assertEqual "Simple square" svg (SVG.clean svgFromFile)
square = SVG.Element "svg"
[ Width 300
, Height 300
, ViewBox (0, 0, 300, 300)
]
[ SVG.SelfClosingTag "path" [D "m 50,50 200,0 0,200 -200,0 z"]
]
simpleSquare :: Test
simpleSquare = cmpToFile "test/svg/square_simple.svg" square
tests = TestList
[ simpleSquare
]
main :: IO Counts
main = runTestTT tests
| elaye/svg-parser | test/Spec.hs | bsd-3-clause | 872 | 0 | 14 | 167 | 285 | 148 | 137 | 26 | 2 |
{-# LANGUAGE NoMonomorphismRestriction, OverloadedStrings #-}
module Conifer.Types where
import Data.Maybe
import Data.Tree(Tree(..))
import Diagrams.Coordinates
import Diagrams.Prelude -- hiding (rotationAbout, direction)
import Diagrams.ThreeD.Types
import Diagrams.ThreeD.Vector
import Control.Monad (mzero)
import Data.Aeson
import qualified Data.Aeson.Types as DAT
import qualified Data.Attoparsec as P
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.HashMap.Strict as HM
import qualified Data.String as S
-- The Tree Data Structure
--
-- A Tree is the standard tree container represented by a Node constructor,
-- with a polymorphic payload. A leaf is a Node with no children.
--
-- The payload is TreeInfo parameterized on location type, and containing
-- the location, the girth at its origin (the location of which is implicit),
-- the girth at its location, and its age.
data NodeType = TrunkNode | BranchNode deriving (Show)
type Age = Double
type Girth = Double
type GirthSpec = Double
type SpecInfo a = (a, GirthSpec, GirthSpec, Age, NodeType)
type TreeInfo a = (a, Girth, Girth, Age, NodeType)
-- We specialize the types for the phases of tree development.
-- The tree grows as type TreeSpec3, in which the nodes have
-- length and direction, but unknown girth. After the tree has
-- been pruned, its trunk and branch girths can be calculated
-- and saved as type Tree3 (nodes in 3D). It can then be projected
-- to Tree2 (nodes in 2D) before being flattened to a list of
-- primitive drawing elements.
type TreeSpec3 = Tree (SpecInfo (P3 Double, V3 Double))
type Tree3 = Tree (TreeInfo (P3 Double, V3 Double))
type Tree2 = Tree (TreeInfo (P2 Double, V2 Double))
-- The tree is ultimately converted to context-free drawing instructions
-- which when carried out produce diagrams.
--
-- Trunk is a section of trunk or branch between points p0 and p1,
-- with girth g0 at p0 and g1 at p1.
-- Tip is the tip of a tree or branch, between points p0 and p1.
-- Needles indicates decoration with needles between points p0 and p1.
-- Trunk and Tip carry age as a hint for when needles should be drawn.
data TreePrim = Trunk { p0::P2 Double, p1::P2 Double, g0::Double, g1::Double, age::Double }
| Tip { p0::P2 Double, p1::P2 Double, age::Double }
| Needles { p0::P2 Double, p1::P2 Double }
-- Specifying a Conifer
--
-- Our ideal tree will be completely determined by its "genes", the various
-- parameters in TreeParams. The age of the tree is roughly the number of recursive
-- steps in its growth—each year corresponds to another level of branching. As we are
-- modeling a conifer, its structure is a main trunk that adds some number of whorls
-- of branches each year and another length of trunk, meanwhile adding another level
-- of branching to existing branches.
--
-- One major concession to arbitrary aesthetics is the list of trunk branch angles,
-- which led to a fuller and less regular look, important for the original application
-- of this code. A more realistic approach would be to model random deviations from
-- the regular growth.
data TreeParams = TreeParams {
tpTrunkLengthIncrementPerYear :: Double
, tpTrunkBranchLengthRatio :: Double
, tpTrunkBranchAngles :: [Double]
, tpTrunkGirth :: Double
, tpWhorlsPerYear :: Int
, tpWhorlSize :: Int
, tpBranchGirth :: Double
, tpBranchBranchLengthRatio :: Double
, tpBranchBranchLengthRatio2 :: Double
, tpBranchBranchAngle :: Angle Double
} deriving (Show, Eq)
instance Default TreeParams where
def = TreeParams {
tpTrunkLengthIncrementPerYear = 0.9
, tpTrunkBranchLengthRatio = 0.7
, tpTrunkBranchAngles = [tau / 6]
, tpTrunkGirth = 1.0
, tpWhorlsPerYear = 1
, tpWhorlSize = 6
, tpBranchGirth = 1.0
, tpBranchBranchLengthRatio = 0.8
, tpBranchBranchLengthRatio2 = 0.8
, tpBranchBranchAngle = 1 / 6 @@ turn
}
-- The mutable state during a tree's growth consists of its age, the rotational phase of the next
-- whorl, and the next trunk branch angle to use.
data AgeParams = AgeParams {
apAge :: Age
, apTrunkBranchAngleIndex :: Int
, apWhorlPhase :: Double
} deriving (Show, Eq)
-- A tree is unfolded from a seed.
type Seed = (SpecInfo (P3 Double, V3 Double), TreeParams, AgeParams)
-- The tree can be optionally decorated with needles, in which case the
-- needles can be customized in various ways.
data NeedleParams = NeedleParams {
needleLength :: Double
, needleAngle :: Angle Double
, needleIncr :: Double
}
instance Default NeedleParams where
def = NeedleParams {
needleLength = 0.05
, needleAngle = 1 / 10 @@ turn
, needleIncr = 0.05
}
-- The UserData type represents the data that can be fed via stdin to configure a tree.
data UserData = UD {
udAge :: Maybe Double
, udNeedles :: Maybe Bool
, udTrunkLengthIncrementPerYear :: Maybe Double
, udTrunkBranchLengthRatio :: Maybe Double
, udTrunkBranchAngles :: Maybe [Double]
, udTrunkGirth :: Maybe Double
, udWhorlsPerYear :: Maybe Int
, udWhorlSize :: Maybe Int
, udBranchGirth :: Maybe Double
, udBranchBranchLengthRatio :: Maybe Double
, udBranchBranchLengthRatio2 :: Maybe Double
-- , udBranchBranchAngle :: Maybe (Angle Double)
} deriving (Show, Eq)
instance ToJSON UserData where
toJSON ud = Object $ HM.fromList $ filter ((/= Null) . snd) [
("age", toJSON $ udAge ud)
, ("needles", toJSON $ udNeedles ud)
, ("udTrunkLengthIncrementPerYear", toJSON $ udTrunkLengthIncrementPerYear ud)
, ("udTrunkBranchLengthRatio", toJSON $ udTrunkBranchLengthRatio ud)
, ("udTrunkBranchAngles", toJSON $ udTrunkBranchAngles ud)
, ("udTrunkGirth", toJSON $ udTrunkGirth ud)
, ("udWhorlsPerYear", toJSON $ udWhorlsPerYear ud)
, ("udWhorlSize", toJSON $ udWhorlSize ud)
, ("udBranchGirth", toJSON $ udBranchGirth ud)
, ("udBranchBranchLengthRatio", toJSON $ udBranchBranchLengthRatio ud)
, ("udBranchBranchLengthRatio2", toJSON $ udBranchBranchLengthRatio2 ud)
-- , ("udBranchBranchAngle", toJSON $ udBranchBranchAngle ud)
]
-- sample data
ud = UD {
udAge = Just 3
, udNeedles = Just False
, udTrunkLengthIncrementPerYear = Just 1.4
, udTrunkBranchLengthRatio = Just 0.6
, udTrunkBranchAngles = Just [0.698, 0.898, 1.31 , 0.967]
, udTrunkGirth = Just 5.0
, udWhorlsPerYear = Just 9
, udWhorlSize = Just 7
, udBranchGirth = Just 1.0
, udBranchBranchLengthRatio = Just 1.0
, udBranchBranchLengthRatio2 = Just 1.0
-- , udBranchBranchAngle :: Angle Double
}
instance FromJSON UserData where
parseJSON (Object v) = UD
<$> v .:? "age"
<*> v .:? "needles"
<*> v .:? "udTrunkLengthIncrementPerYear"
<*> v .:? "udTrunkBranchLengthRatio"
<*> v .:? "udTrunkBranchAngles"
<*> v .:? "udTrunkGirth"
<*> v .:? "udWhorlsPerYear"
<*> v .:? "udWhorlSize"
<*> v .:? "udBranchGirth"
<*> v .:? "udBranchBranchLengthRatio"
<*> v .:? "udBranchBranchLengthRatio2"
-- <*> v .:? "udBranchBranchAngle"
parseJSON _ = mzero
decodeWith :: (Value -> DAT.Parser b) -> String -> Either String b
decodeWith p s = do
value <- P.eitherResult $ (P.parse json . S.fromString) s
DAT.parseEither p value
getUserDataFromJSON = decode . B.pack
argsFromInput ud tp ap = (tp', ap', n)
where tp' = TreeParams
(upd tpTrunkLengthIncrementPerYear udTrunkLengthIncrementPerYear ud tp)
(upd tpTrunkBranchLengthRatio udTrunkBranchLengthRatio ud tp)
(upd tpTrunkBranchAngles udTrunkBranchAngles ud tp)
(upd tpTrunkGirth udTrunkGirth ud tp)
(upd tpWhorlsPerYear udWhorlsPerYear ud tp)
(upd tpWhorlSize udWhorlSize ud tp)
(upd tpBranchGirth udBranchGirth ud tp)
(upd tpBranchBranchLengthRatio udBranchBranchLengthRatio ud tp)
(upd tpBranchBranchLengthRatio2 udBranchBranchLengthRatio2 ud tp)
(tpBranchBranchAngle tp)
ap' = AgeParams
(upd apAge udAge ud ap)
(apTrunkBranchAngleIndex ap)
(apWhorlPhase ap)
n = maybe False id (udNeedles ud)
upd f_tp f_ud ud tp = maybe (f_tp tp) id (f_ud ud)
| bobgru/conifer | src/Conifer/Types.hs | bsd-3-clause | 9,328 | 0 | 27 | 2,825 | 1,644 | 950 | 694 | 141 | 1 |
module MoreDigits where
import Data.Number.IReal
import Data.Number.IReal.IReal
import Data.Number.IReal.IntegerInterval
import LinAlg
import Newton
import Integrals
import Erf
{-
This file contains solutions to some of the problems from the MoreDigits friendly competition,
held at LORIA, Nancy, France, in July 2006. See http://rnc7.loria.fr/competition.html.
We use the example parameter sets rather than the actual competition sets,since the former
have published correct solutions we can check our results against.
Timings are from running ghci 7.8.3 on a 2.5 Ghz MacBook Pro.
Each of the 16 problems has a simpler and a harder version. The results with our package are as follows (see below for improved results):
- We can solve both versions of problems 2, 3, 6, 7, 8, 9, 12 and 14.
- We can solve the simpler versions of problems 1, 11, 13 and 16, but the harder versions are out of reach, for excessive memory or time requirements
- Problems 4 and 5 concern two special functions, the zeta and gamma functions. We have not implemented these and, consequently, cannot solve these problems.
- Problem 10 is a linear algebra problem requiring inverting a matrix (of size 80x80 for the simpler problem). Our simple-minded linear algebra module cannot invert matrices larger than ca 40x40.
- Problem 15 is an integral with a heavily oscillating integrand. We can get the correct result for the simpler problem using Clenshaw-Curtis quadrature, but with a shaky error analysis based on noting that the results for 512 and 1024 points agree to the required number of decimals, and thus the common result is probably correct. We do not consider that a satisfactory solution.
Addendum March 14, 2015: See file MoreDigitsRounded.hs for solutions to the harder versions of problems 10, 13, 15 and 16 using the new module Data.Number.IReal.Rounded.
-}
-- Auxiliary stream of pseudo-random integers used in several problems below
chi :: Integer -> [Integer]
chi s = tail (iterate f s)
where f s = (69069 * s + 3) `mod` 2^31
p1 n a b c = exp (log (a/b)/c) ? n
{-
p1 15000 123456 10000 1000
1.002516460709654427070669458074954176353693511207522646674439914525351574088850747808768859
...
90067298469713697479049799670230992994517233320216647880388126823764450957720301326122793745235802
(1.84 secs, 944380208 bytes)
We have no specialized implementation of nth roots, so the harder problem is far beyond reach. An implementation of nthRoot along the lines of sqrt would probaby handle this problem well.
-}
p2 n a b = exp (cos (a/b)) ? n
{-
p2 5000 6 7
1.924372742668343802802699839922863404917063482409596698496497263030972263567957417927207928
...
7064785022769449560946036292536382
(0.08 secs, 37847656 bytes)
p2 50000 2348 11
2.677658027419916799656791778743205279006831584447909394138911371168661727256596447351001419-
...
0028465996312865188159383181798097287773457362
(11.63 secs, 5915475144 bytes)
-}
p3 n a b c = acos (a/c) + asin (b/c) ? n
{-
p3 5000 2923 2813 3000
1.442910413467789456239386067859780011674672727154695225153220712260067164227074953642684611
...
8818481631797890652501951269818691
(0.27 secs, 187639840 bytes)
p3 40000 3922 813 4000
0.402482572546625665922717515202191441139775563729442129892515278209805324753500764115577856
...
53391820989810762240599096232837234036181276719563105086718718647336476260
(23.17 secs, 8288882608 bytes)
-}
{-
We do not attempt to solve p4 and p5 since we have not implemented the special functions zeta and gamma.
-}
p6 n a b = erf(sin(a/b)) ? n
{-
p6 30000 1512 1000
0.8419822444250350722500465556032049076535282226505613076319708107454641865199854581422396
...
97379590474161895111388647662559
(5.09 secs, 1769452000 bytes)
-}
p7 n s a = bsum (map (recip . fromIntegral) (take a (chi s))) ? n
{-
p7 15 12324 20000
0.000089400791092
(0.29 secs, 121306240 bytes)
p7 15 12314 2000000
0.016451150554244
(29.21 secs, 11332463760 bytes)
Note that the latter is *not* the harder problem; that has 100 times more terms!!
On the other hand, the required precision for the harder problem is only five significant digits,
so we can actually work in type Double to solve the problem! Compiling and running
main = print (sum (take 200000000 (map (recip . fromIntegral) (chi 12314))))
we get the output 1.9378131134366972 in ca 16 seconds. (This seems a bit too much for ghci)
-}
p8 n s a b = bsum [prec (n + 10) $ abs (sin (fromInteger k/b)) | k <- take a (chi s) ] ? n
{-
p8 16 12344 5000 2
3178.8937977675151612
(1.17 secs, 604932260 bytes)
p8 150 12384 100000 10
63837.783124646381213503757555288914770713872985612438616744139740933585253689694540737815815402040378950007180540637644340700133080182949505340280227608713
(8.51 secs, 9719354792 bytes)
-}
p9 n a b = bsum [scale 1 (-k) | (k,x) <- zip [1..ceiling (logBase 2 10 * fromIntegral n)] (tail (iterate f 0.5)), x < (0.5 :: IReal)] ? n
where f x = prec 20000 (c*x*(1-x))
c = a/b
{-
p9 100 161 43
0.2893154707647130135806131784714035265898339804693509562693961960506866365030222558951473454014980657
(0.30 secs, 68561352 bytes)
p9 7000 15 4
0.2893697060801703747946860518794294747419158436765556621286276182515676860828551
...
304596295681017400188911416039082717
(20.55 secs, 4806753488 bytes)
Remark: This is not entirely satisfactory. The successive values in the logistic sequence are narrow intervals (starting from width 10^(-20000)) and it is conceivable that one such interval includes 0.5, while the exact value would not. But since we got the correct answer, we were lucky... (But, of course, the probability for failure is negligible, if we believe that values are randomly distributed).
But also the problem itself is dubious, since it involves the < relation, which is not computable. Of course, the organizers made sure that no x_n is exactly 0.5, but anyhow...
-}
p10 n s a = bsum (map (bsum . map abs) inv)
where cs = map fromInteger (chi s)
mat = take a (group a cs)
group a xs = take a xs : group a (drop a xs)
inv = inverse mat
{- Can only use be used for n up to ca 40, i.e. not even close to solve the simple problem (n=80). -}
p11 :: IReal -> IReal -> Int -> (Integer,Int)
p11 a b c = (last as, length (filter (==1) as))
where as = take (c+1) (cf (sqrt(sqrt(a/b))))
cf x = r:cf (prec 10000 (recip s))
where (r,s) = properFraction x
{-
p11 82 13 5246
(10532,2132)
(2.67 secs, 610580736 bytes)
-}
p12 n s a = f 0 0 (chi s) ? n
where f k y (c:cs)
|k == a = y
|otherwise = f (k+1) (prec 20 $ sin (y+fromInteger c)) cs
{-
p12 11 24905 1000
-0.95250313722
(0.29 secs, 207889240 bytes)
p12 11 14905 200000
-0.80338154922
(11.55 secs, 9981154672 bytes)
-}
p13 n s a b = bsum (zipWith3 (\a b c -> a*b/c) as bs cs) ?? (n+1)
where as = take (n `div` 2) (map (\x -> fromInteger x - scale 1 30) (chi s))
bs = tail (iterate (* sqrt (a/b)) 1)
cs = map fromInteger (scanl (*) 1 [2..])
{-
p13 2000 102348 9999 1001
1.3148975627779447163107569531450272105470501991442659409013069882493811911013720
...
14096037221926209702149402897608454262629229624730552707920e10
(23.76 secs, 8757132408 bytes)
-}
p14 n a b = head (allZeros (n+1) f (0 -+- (val b*pi/4))) ? n
where f x = exp (-x/a) - tan (x/b)
{-
p14 1000 10 10
5.313908566521572046202664406047153136830749994680350179440416642864202502440058660714165198
...
1993742013577685970083189334559355797301908569071064365753611935955962832815667242
(0.06 secs, 43362384 bytes)
p14 20000 1234 4321
1372.607407915898039275620096163136526889749503325903743299297616743666337996084985896175294
...
89660209714061591467119432351273335633380
(14.95 secs, 5790678848 bytes)
-}
p15 n a b c = integral 8 n (\x -> sin (a * cos (b*x+c))) (0 +- 1) ?? n
{-
Above attempt can only solve the trivial problem, taking almost 3 seconds. The one below, from Clenshaw-Curtis, solves simple problem, but error analysis is non-trivial.
cctest can easily be modified to a solution that indicates that 512 pts are sufficient, but it's not a proof.
quad (\x -> sin (a * cos (b*x+c))) (cpss!!9) (wss!!9) ?? 20
1.01356447296047236253e-2
(10.16 secs, 3480716500 bytes)
-}
p16 n a b c = deriv c f (recip (sqrt a)) ?? n
where f x = sqrt (sin x + recip (sqrt b))
{-
p16 20 3210 5432 30
-1.19844645066450855152e74
(1.20 secs, 811510936 bytes)
-}
| sydow/ireal | applications/MoreDigits.hs | bsd-3-clause | 8,375 | 0 | 15 | 1,435 | 1,289 | 668 | 621 | 43 | 1 |
{-# LANGUAGE RecordWildCards #-}
module TestPrelude (
module Control.Concurrent.STM
, module Control.Monad
, module Data.Vector
, module Network.DHT.Kademlia.Bucket
, module Network.DHT.Kademlia.Def
, module Test.Hspec
, module TestEq
, addNodeSimple
, newEnv
, sendNoop
, defaultNode
, fullKBucket
, leftKBucket
, rightKBucket
) where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad
import Data.Vector ((!), (//))
import Network.DHT.Kademlia.Bucket
import Network.DHT.Kademlia.Def hiding (thisNode)
import Network.Socket hiding (send)
import Test.Hspec
import TestEq
import qualified Data.HashTable.IO as H
import qualified Data.Vector as V
-- | Node ids in, node ids out
addNodeSimple :: Double -> [Double] -> IO [[Double]]
addNodeSimple thisId otherIds = do
rt <- atomically $ defaultRoutingTable Nothing
forM_ otherIds $ \i -> addNode thisNode rt defaultNode {nodeId = i}
rt2 <- stripSTM rt
return $ map (map (nodeId . fst) . V.toList . kContent) $ V.toList rt2
where
thisNode = defaultNode {nodeId = thisId}
newEnv :: IO KademliaEnv
newEnv = do
sock <- socket AF_INET Datagram defaultProtocol
mvStoreHT <- H.new >>= newMVar
dataStore <- defaultDataStore
pingREQs <- atomically $ newTVar V.empty
routingTable <- atomically $ defaultRoutingTable Nothing
return KademliaEnv{..}
where
logDebug _ = return ()
logInfo _ = return ()
logWarn _ = return ()
logError _ = return ()
sendNoop :: RPC -> IO ()
sendNoop _ = return ()
defaultNode = Node 0 $ SockAddrUnix ""
fullKBucket = KBucket {
kContent = fullContent
}
leftKBucket = KBucket {
kContent = fullContent
}
rightKBucket = KBucket {
kContent = V.fromList []
}
fullContent = V.generate systemK genF where
genF i = (defaultNode {nodeId = fromIntegral i}, LastSeen 0)
| phylake/kademlia | test/TestPrelude.hs | bsd-3-clause | 1,901 | 0 | 15 | 410 | 581 | 320 | 261 | -1 | -1 |
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
module LambdaCms.Core.Handler.User
( getUserAdminIndexR
, getUserAdminNewR
, postUserAdminNewR
, getUserAdminEditR
, patchUserAdminEditR
, deleteUserAdminEditR
, chpassUserAdminEditR
, rqpassUserAdminEditR
, deactivateUserAdminEditR
, activateUserAdminEditR
, getUserAdminActivateR
, postUserAdminActivateR
) where
import LambdaCms.Core.Import
import LambdaCms.Core.Message (CoreMessage)
import qualified LambdaCms.Core.Message as Msg
import Yesod (Route)
import Yesod.Auth (Creds (..), requireAuthId,
setCreds)
import Yesod.Auth.Email (saltPass)
import Control.Arrow ((&&&))
import Data.Maybe (fromJust, fromMaybe, isJust)
import qualified Data.Set as S
import qualified Data.Text as T (breakOn, length, pack,
takeWhile)
import Data.Time.Clock
import Data.Time.Format.Human
import Network.Mail.Mime
import Text.Blaze.Html.Renderer.Text (renderHtml)
-- | Data type used by the change password form.
data ComparePassword = ComparePassword { originalPassword :: Text
, _confirmPassword :: Text
} deriving (Show, Eq)
-- | Form by which account setting are changed.
accountSettingsForm :: LambdaCmsAdmin master
=> User
-> S.Set (Roles master)
-> Maybe CoreMessage
-> Html
-> MForm (HandlerT master IO)
(FormResult (User, [Roles master]), WidgetT master IO ())
accountSettingsForm user roles mlabel extra = do
maRoles <- lift mayAssignRoles
-- User fields
(unameRes, unameView) <- mreq textField (bfs Msg.Username) (Just $ userName user)
(emailRes, emailView) <- mreq emailField (bfs Msg.EmailAddress) (Just $ userEmail user)
-- Roles field
(rolesRes, mrolesView) <- if maRoles
then do
(rolesRes', rolesView) <- mreq (checkboxesField roleList)
"Not used"
(Just $ S.toList roles)
return (rolesRes', Just rolesView)
else return (FormSuccess $ S.toList roles, Nothing)
let userRes = (\un ue -> user { userName = un, userEmail = ue })
<$> unameRes
<*> emailRes
formRes = (,) <$> userRes <*> rolesRes
widget = $(widgetFile "user/settings-form")
return (formRes, widget)
where
roleList = optionsPairs $ map ((T.pack . show) &&& id) [minBound .. maxBound]
-- | Webform for changing a user's password.
userChangePasswordForm :: Maybe Text -> Maybe CoreMessage -> CoreForm ComparePassword
userChangePasswordForm original submit =
renderBootstrap3 BootstrapBasicForm $ ComparePassword
<$> areq validatePasswordField (withName "original-pw" $ bfs Msg.Password) Nothing
<*> areq comparePasswordField (bfs Msg.Confirm) Nothing
<* bootstrapSubmit (BootstrapSubmit (fromMaybe Msg.Submit submit) " btn-success " [])
where
validatePasswordField = check validatePassword passwordField
comparePasswordField = check comparePasswords passwordField
validatePassword pw
| T.length pw >= 8 = Right pw
| otherwise = Left Msg.PasswordTooShort
comparePasswords pw
| pw == fromMaybe "" original = Right pw
| otherwise = Left Msg.PasswordMismatch
-- | Helper to create a user with email address.
generateUserWithEmail :: Text -> IO User
generateUserWithEmail e = do
uuid <- generateUUID
token <- generateActivationToken
timeNow <- getCurrentTime
return User { userIdent = uuid
, userName = fst $ T.breakOn "@" e
, userPassword = Nothing
, userEmail = e
, userActive = False
, userToken = Just token
, userCreatedAt = timeNow
, userLastLogin = Nothing
, userDeletedAt = Nothing
}
-- | Helper to create an empty user.
emptyUser :: IO User
emptyUser = generateUserWithEmail ""
-- | Validate an activation token.
validateUserToken :: User -> Text -> Maybe Bool
validateUserToken user token = case userToken user of
Just t
| t == token -> Just True -- tokens match
| otherwise -> Just False -- tokens don't match
Nothing -> Nothing -- there is no token (account already actived)
-- | Send an email to the user with a link containing the activation token.
sendAccountActivationToken :: Entity User -> CoreHandler ()
sendAccountActivationToken (Entity userId user) = case userToken user of
Just token ->
lift $ sendMailToUser user "Account activation"
$(hamletFile "templates/mail/activation-text.hamlet")
$(hamletFile "templates/mail/activation-html.hamlet")
Nothing -> error "No activation token found"
-- | Send an email to the user with a link containing the reset token.
sendAccountResetToken :: Entity User -> CoreHandler ()
sendAccountResetToken (Entity userId user) = case userToken user of
Just token ->
lift $ sendMailToUser user "Account password reset"
$(hamletFile "templates/mail/reset-text.hamlet")
$(hamletFile "templates/mail/reset-html.hamlet")
Nothing -> error "No reset token found"
-- | Function for sending mail to the user. The method of sending mail is up
-- to the implementation by the `lambdaCmsSendMail` function in the "base"
--- application.
sendMailToUser :: LambdaCmsAdmin master
=> User
-> Text
-> ((Route master -> [(Text, Text)] -> Text) -> Html)
-> ((Route master -> [(Text, Text)] -> Text) -> Html)
-> HandlerT master IO ()
sendMailToUser user subj ttemp htemp = do
text <- getRenderedTemplate ttemp
html <- getRenderedTemplate htemp
mail <- liftIO $ simpleMail
(Address (Just $ userName user) (userEmail user))
(Address (Just "LambdaCms") "[email protected]")
subj
text
html
[]
lambdaCmsSendMail mail
where
getRenderedTemplate template = do
markup <- withUrlRenderer template
return $ renderHtml markup
-- | User overview.
getUserAdminIndexR :: CoreHandler Html
getUserAdminIndexR = do
timeNow <- liftIO getCurrentTime
lift $ do
can <- getCan
(users' :: [Entity User]) <- runDB $ selectList [UserDeletedAt ==. Nothing] []
users <- mapM (\user -> do
ur <- getUserRoles $ entityKey user
return (user, S.toList ur)
) users'
hrtLocale <- lambdaCmsHumanTimeLocale
adminLayout $ do
setTitleI Msg.UserIndex
$(widgetFile "user/index")
-- | Create a new user, show the form.
getUserAdminNewR :: CoreHandler Html
getUserAdminNewR = do
eu <- liftIO emptyUser
lift $ do
can <- getCan
drs <- defaultRoles
(formWidget, enctype) <- generateFormPost $
accountSettingsForm eu drs (Just Msg.Create)
adminLayout $ do
setTitleI Msg.NewUser
$(widgetFile "user/new")
-- | Create a new user, handle a posted form.
postUserAdminNewR :: CoreHandler Html
postUserAdminNewR = do
eu <- liftIO emptyUser
drs <- lift defaultRoles
((formResult, formWidget), enctype) <- lift . runFormPost $
accountSettingsForm eu drs (Just Msg.Create)
case formResult of
FormSuccess (user, roles) -> do
userId <- lift $ runDB $ insert user
lift $ setUserRoles userId (S.fromList roles)
sendAccountActivationToken (Entity userId user)
lift $ logUser user >>= logAction
lift $ setMessageI Msg.SuccessCreate
redirectUltDest $ UserAdminR UserAdminIndexR
_ -> lift $ do
can <- getCan
adminLayout $ do
setTitleI Msg.NewUser
$(widgetFile "user/new")
-- | Show the forms to edit an existing user.
getUserAdminEditR :: UserId -> CoreHandler Html
getUserAdminEditR userId = do
timeNow <- liftIO getCurrentTime
lift $ do
authId <- requireAuthId
can <- getCan
user <- runDB $ get404 userId
urs <- getUserRoles userId
hrtLocale <- lambdaCmsHumanTimeLocale
(formWidget, enctype) <- generateFormPost $
accountSettingsForm user urs (Just Msg.Save) -- user form
(pwFormWidget, pwEnctype) <- generateFormPost $
userChangePasswordForm Nothing (Just Msg.Change) -- user password form
adminLayout $ do
setTitleI . Msg.EditUser $ userName user
$(widgetFile "user/edit")
-- | Change a user's main properties.
patchUserAdminEditR :: UserId -> CoreHandler Html
patchUserAdminEditR userId = do
(user, timeNow, hrtLocale, urs) <- updateHelper userId
(pwFormWidget, pwEnctype) <- lift . generateFormPost $
userChangePasswordForm Nothing (Just Msg.Change)
((formResult, formWidget), enctype) <- lift . runFormPost $
accountSettingsForm user urs (Just Msg.Save)
case formResult of
FormSuccess (updatedUser, updatedRoles) -> do
_ <- lift $ runDB $ update userId [ UserName =. userName updatedUser
, UserEmail =. userEmail updatedUser
]
lift $ setUserRoles userId (S.fromList updatedRoles)
lift $ logUser user >>= logAction
lift $ setMessageI Msg.SuccessReplace
redirect $ UserAdminR $ UserAdminEditR userId
_ -> lift $ do
authId <- requireAuthId
can <- getCan
adminLayout $ do
setTitleI . Msg.EditUser $ userName user
$(widgetFile "user/edit")
-- | Change a user's password.
--
-- Since 0.3.1.0
-- Store hashed passwords using `saltPass` from `Yesod.Auth.Email`.
chpassUserAdminEditR :: UserId -> CoreHandler Html
chpassUserAdminEditR userId = do
authId <- lift requireAuthId
if userId == authId
then do
(user, timeNow, hrtLocale, urs) <- updateHelper userId
(formWidget, enctype) <- lift . generateFormPost $
accountSettingsForm user urs (Just Msg.Save)
opw <- lookupPostParam "original-pw"
((formResult, pwFormWidget), pwEnctype) <- lift . runFormPost $
userChangePasswordForm opw (Just Msg.Change)
case formResult of
FormSuccess f -> do
{- For now it's not clear what is the best way to store
passwords due to different plug-ins may store passwords
differently, but unmodified naked password are insecure
anyway. The only one default plug-in from `Yesod.Auth`
which stores passwords internally is `Yesod.Auth.Email`,
and since it stores hashed passwords using `saltPass`
function it was decided to default to this approach for now.
-}
saltedPassword <- liftIO . saltPass $ originalPassword f
_ <- lift . runDB $
update userId [ UserPassword =. Just saltedPassword ]
lift $ logUser user >>= logAction
lift $ setMessageI Msg.SuccessChgPwd
redirect $ UserAdminR $ UserAdminEditR userId
_ -> lift $ do
can <- getCan
adminLayout $ do
setTitleI . Msg.EditUser $ userName user
$(widgetFile "user/edit")
else error "Can't change this uses password"
-- | Helper function to get data required for some DB updates operations in
-- handlers. Removes code duplication.
updateHelper :: forall (t :: (* -> *) -> * -> *) site.
( MonadTrans t, MonadIO (t (HandlerT site IO))
, LambdaCmsAdmin site )
=> Key User
-> t (HandlerT site IO)
(User, UTCTime, HumanTimeLocale, S.Set (Roles site))
updateHelper userId = do
user <- lift . runDB $ get404 userId
timeNow <- liftIO getCurrentTime
hrtLocale <- lift lambdaCmsHumanTimeLocale
roles <- lift $ getUserRoles userId
return (user, timeNow, hrtLocale, roles)
-- | Request a user's password to be reset.
rqpassUserAdminEditR :: UserId -> CoreHandler Html
rqpassUserAdminEditR userId = do
user' <- lift . runDB $ get404 userId
token <- liftIO generateActivationToken
let user = user'
{ userToken = Just token
, userPassword = Nothing
, userActive = False
}
_ <- lift . runDB $ replace userId user
_ <- sendAccountResetToken (Entity userId user)
lift $ logUser user >>= logAction
lift $ setMessageI Msg.PasswordResetTokenSend
redirectUltDest . UserAdminR $ UserAdminEditR userId
-- | Deactivate a user.
deactivateUserAdminEditR :: UserId -> CoreHandler Html
deactivateUserAdminEditR userId = do
user' <- lift . runDB $ get404 userId
case userToken user' of
Nothing -> do
let user = user' { userActive = False }
_ <- lift . runDB $ replace userId user
lift $ logUser user >>= logAction
lift $ setMessageI Msg.UserDeactivated
_ -> lift $ setMessageI Msg.UserStillPending
redirectUltDest . UserAdminR $ UserAdminEditR userId
-- | Activate a user.
activateUserAdminEditR :: UserId -> CoreHandler Html
activateUserAdminEditR userId = do
user' <- lift . runDB $ get404 userId
case userToken user' of
Nothing -> do
let user = user' { userActive = True }
_ <- lift . runDB $ replace userId user
lift $ logUser user >>= logAction
lift $ setMessageI Msg.UserActivated
_ -> lift $ setMessageI Msg.UserStillPending
redirectUltDest . UserAdminR $ UserAdminEditR userId
-- | Delete an existing user.
-- TODO: Don\'t /actually/ delete the DB record!
deleteUserAdminEditR :: UserId -> CoreHandler Html
deleteUserAdminEditR userId = do
lift $ do
user' <- runDB $ get404 userId
timeNow <- liftIO getCurrentTime
uuid <- liftIO generateUUID
let random = T.takeWhile (/= '-') uuid
let user = user'
{ userEmail = random <> "@@@" <> userEmail user'
, userToken = Nothing
, userActive = False
, userDeletedAt = Just timeNow
}
_ <- runDB $ replace userId user
logAction =<< logUser user
setMessageI Msg.SuccessDelete
redirectUltDest $ UserAdminR UserAdminIndexR
-- | Active an account by emailed activation link.
getUserAdminActivateR :: UserId -> Text -> CoreHandler Html
getUserAdminActivateR userId token = do
user <- lift . runDB $ get404 userId
case validateUserToken user token of
Just True -> do
(pwFormWidget, pwEnctype) <- lift . generateFormPost $
userChangePasswordForm Nothing (Just Msg.Save)
lift . adminAuthLayout $ do
setTitle . toHtml $ userName user
$(widgetFile "user/activate")
Just False -> lift . adminAuthLayout $ do
setTitleI Msg.TokenMismatch
$(widgetFile "user/tokenmismatch")
Nothing -> lift . adminAuthLayout $ do
setTitleI Msg.AccountAlreadyActivated
$(widgetFile "user/account-already-activated")
-- | Process a password change by password-reset-link email.
postUserAdminActivateR :: UserId -> Text -> CoreHandler Html
postUserAdminActivateR userId token = do
user <- lift . runDB $ get404 userId
case validateUserToken user token of
Just True -> do
opw <- lookupPostParam "original-pw"
((formResult, pwFormWidget), pwEnctype) <- lift . runFormPost $
userChangePasswordForm opw (Just Msg.Save)
case formResult of
FormSuccess f -> do
_ <- lift . runDB $
update userId [ UserPassword =. Just (originalPassword f)
, UserToken =. Nothing
, UserActive =. True
]
lift $ setMessageI Msg.ActivationSuccess
lift . setCreds False $
Creds "lambdacms-token-activation" (userEmail user) []
redirect AdminHomeR
_ -> lift . adminAuthLayout $ do
setTitle . toHtml $ userName user
$(widgetFile "user/activate")
Just False -> lift . adminAuthLayout $ do
setTitleI Msg.TokenMismatch
$(widgetFile "user/tokenmismatch")
Nothing -> lift . adminAuthLayout $ do
setTitleI Msg.AccountAlreadyActivated
$(widgetFile "user/account-already-activated")
| geraldus/lambdacms | lambdacms-core/LambdaCms/Core/Handler/User.hs | mit | 17,686 | 0 | 24 | 5,809 | 4,109 | 2,014 | 2,095 | 344 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Gpg.GenRevoke where
import Control.Monad
import Data.Text (Text)
import qualified Data.Text as Text
import Gpg.Run
data RevocationReason = NoReason
| Compromised
| Superseeded
| NoLongerUsed
deriving (Eq, Show, Enum)
genRevoke reason reasonText key = do
runGPG ["--gen-revoke", "foobar"] $ do
expectAndSend (StatusGetBool, "gen_revoke.okay") "y"
let reasonCode = Text.pack . show $ fromEnum reason
expectAndSend (StatusGetLine, "ask_revocation_reason.code") reasonCode
forM_ (Text.lines reasonText) $ \line -> do
expect StatusGetLine "ask_revocation_reason.text"
send line
expectAndSend (StatusGetLine, "ask_revocation_reason.text") ""
expectAndSend (StatusGetBool, "ask_revocation_reason.okay") "y"
getPassphrase
getPassphrase
| Philonous/pontarius-gpg | src/Gpg/GenRevoke.hs | mit | 983 | 0 | 15 | 296 | 215 | 111 | 104 | 23 | 1 |
module AppTooManyArguments1 where
fac :: Int -> Int
fac 0 = 1
fac n = n * (fac n 1)
| roberth/uu-helium | test/typeerrors/Heuristics/AppTooManyArguments1.hs | gpl-3.0 | 85 | 0 | 7 | 21 | 41 | 22 | 19 | 4 | 1 |
module Pos.Chain.Delegation.Proof
( DlgProof
, mkDlgProof
) where
import Pos.Crypto (Hash, hash)
import Pos.Chain.Delegation.Payload
-- | Proof of delegation payload.
type DlgProof = Hash DlgPayload
-- | Creates 'DlgProof' out of delegation payload.
mkDlgProof :: DlgPayload -> DlgProof
mkDlgProof = hash
| input-output-hk/cardano-sl | chain/src/Pos/Chain/Delegation/Proof.hs | apache-2.0 | 350 | 0 | 5 | 86 | 61 | 39 | 22 | 8 | 1 |
module Sansa.Commands.AddTorrent
( addTorrentCmd
) where
import Sansa.CommandsCommon
import Sansa.Commands.CommonOpts
import Aria2.Commands (addTorrent)
import Aria2.Types
import System.Directory
import Data.Maybe
import qualified Data.ByteString as B
import qualified Data.ByteString.Base64 as Base64
import Text.PrettyPrint.ANSI.Leijen hiding ((<>),(<$>))
doc :: Doc
doc = text "Add a local \".torrent\" file for download" <> line
<$$> text "If you want to add a torrent file from a remote url, see"
<+> text "'sansa add' and its --follow-torrent option." <> line
<$$> text "If - is used instead of the filename, read the torrent from stdin."
addTorrentCmd :: Command
addTorrentCmd = info (helper <*> addTorrentOpts)
( fullDesc
<> headerDoc (Just doc)
<> progDesc "Add local .torrent file"
)
addTorrentOpts :: Parser (CmdAction ())
addTorrentOpts = addTAction <$> commonDlOpts <*> strArgument (metavar "FILE")
addTAction :: DlOptions -> FilePath -> CmdAction ()
addTAction opts path = do
file <- liftIO $ readF path
cwd <- flip fromMaybe (optDir opts) <$> liftIO getCurrentDirectory
let opts' = opts { optDir = Just cwd }
GID gid <- runAria2 $ addTorrent (Base64.encode file) [] opts'
liftIO $ putStrLn $ "Queued download with id: " ++ show gid
where readF "-" = B.getContents
readF name = B.readFile name
| rootzlevel/sansa | src/Sansa/Commands/AddTorrent.hs | bsd-2-clause | 1,434 | 0 | 12 | 320 | 371 | 195 | 176 | 32 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-|
This module exports the 'Config' datatype, which you can use to configure the
Snap HTTP server.
-}
module Snap.Http.Server.Config
( Config
, ConfigBackend(..)
, ConfigLog(..)
, emptyConfig
, defaultConfig
, commandLineConfig
, completeConfig
, optDescrs
, getAccessLog
, getBackend
, getBind
, getCompression
, getDefaultTimeout
, getErrorHandler
, getErrorLog
, getHostname
, getLocale
, getOther
, getPort
, getProxyType
, getSSLBind
, getSSLCert
, getSSLKey
, getSSLPort
, getVerbose
, setAccessLog
, setBackend
, setBind
, setCompression
, setDefaultTimeout
, setErrorHandler
, setErrorLog
, setHostname
, setLocale
, setOther
, setPort
, setProxyType
, setSSLBind
, setSSLCert
, setSSLKey
, setSSLPort
, setVerbose
) where
------------------------------------------------------------------------------
import Blaze.ByteString.Builder
import Blaze.ByteString.Builder.Char8
import Control.Exception (SomeException)
import Control.Monad
import qualified Data.ByteString.Char8 as B
import Data.ByteString (ByteString)
import Data.Char
import Data.Function
import Data.List
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Typeable
import Prelude hiding (catch)
import Snap.Core
import Snap.Iteratee ((>==>), enumBuilder)
import Snap.Internal.Debug (debug)
import Snap.Util.Proxy
import System.Console.GetOpt
import System.Environment hiding (getEnv)
#ifndef PORTABLE
import System.Posix.Env
#endif
import System.Exit
import System.IO
------------------------------------------------------------------------------
import Snap.Internal.Http.Server (requestErrorMessage)
------------------------------------------------------------------------------
-- | This datatype allows you to override which backend (either simple or
-- libev) to use. Most users will not want to set this, preferring to rely on
-- the compile-type default.
--
-- Note that if you specify the libev backend and have not compiled in support
-- for it, your server will fail at runtime.
data ConfigBackend = ConfigSimpleBackend
| ConfigLibEvBackend
deriving (Show, Eq)
------------------------------------------------------------------------------
-- | Data type representing the configuration of a logging target
data ConfigLog = ConfigNoLog -- ^ no logging
| ConfigFileLog FilePath -- ^ log to text file
| ConfigIoLog (ByteString -> IO ()) -- ^ log custom IO handler
instance Show ConfigLog where
show ConfigNoLog = "ConfigNoLog"
show (ConfigFileLog f) = "ConfigFileLog " ++ show f
show (ConfigIoLog _) = "ConfigIoLog"
------------------------------------------------------------------------------
-- | A record type which represents partial configurations (for 'httpServe')
-- by wrapping all of its fields in a 'Maybe'. Values of this type are usually
-- constructed via its 'Monoid' instance by doing something like:
--
-- > setPort 1234 mempty
--
-- Any fields which are unspecified in the 'Config' passed to 'httpServe' (and
-- this is the norm) are filled in with default values from 'defaultConfig'.
data Config m a = Config
{ hostname :: Maybe ByteString
, accessLog :: Maybe ConfigLog
, errorLog :: Maybe ConfigLog
, locale :: Maybe String
, port :: Maybe Int
, bind :: Maybe ByteString
, sslport :: Maybe Int
, sslbind :: Maybe ByteString
, sslcert :: Maybe FilePath
, sslkey :: Maybe FilePath
, compression :: Maybe Bool
, verbose :: Maybe Bool
, errorHandler :: Maybe (SomeException -> m ())
, defaultTimeout :: Maybe Int
, other :: Maybe a
, backend :: Maybe ConfigBackend
, proxyType :: Maybe ProxyType
}
instance Show (Config m a) where
show c = unlines [ "Config:"
, "hostname: " ++ _hostname
, "accessLog: " ++ _accessLog
, "errorLog: " ++ _errorLog
, "locale: " ++ _locale
, "port: " ++ _port
, "bind: " ++ _bind
, "sslport: " ++ _sslport
, "sslbind: " ++ _sslbind
, "sslcert: " ++ _sslcert
, "sslkey: " ++ _sslkey
, "compression: " ++ _compression
, "verbose: " ++ _verbose
, "defaultTimeout: " ++ _defaultTimeout
, "backend: " ++ _backend
, "proxyType: " ++ _proxyType
]
where
_hostname = show $ hostname c
_accessLog = show $ accessLog c
_errorLog = show $ errorLog c
_locale = show $ locale c
_port = show $ port c
_bind = show $ bind c
_sslport = show $ sslport c
_sslbind = show $ sslbind c
_sslcert = show $ sslcert c
_sslkey = show $ sslkey c
_compression = show $ compression c
_verbose = show $ verbose c
_defaultTimeout = show $ defaultTimeout c
_backend = show $ backend c
_proxyType = show $ proxyType c
------------------------------------------------------------------------------
-- | Returns a completely empty 'Config'. Equivalent to 'mempty' from
-- 'Config''s 'Monoid' instance.
emptyConfig :: Config m a
emptyConfig = mempty
------------------------------------------------------------------------------
instance Monoid (Config m a) where
mempty = Config
{ hostname = Nothing
, accessLog = Nothing
, errorLog = Nothing
, locale = Nothing
, port = Nothing
, bind = Nothing
, sslport = Nothing
, sslbind = Nothing
, sslcert = Nothing
, sslkey = Nothing
, compression = Nothing
, verbose = Nothing
, errorHandler = Nothing
, defaultTimeout = Nothing
, other = Nothing
, backend = Nothing
, proxyType = Nothing
}
a `mappend` b = Config
{ hostname = ov hostname a b
, accessLog = ov accessLog a b
, errorLog = ov errorLog a b
, locale = ov locale a b
, port = ov port a b
, bind = ov bind a b
, sslport = ov sslport a b
, sslbind = ov sslbind a b
, sslcert = ov sslcert a b
, sslkey = ov sslkey a b
, compression = ov compression a b
, verbose = ov verbose a b
, errorHandler = ov errorHandler a b
, defaultTimeout = ov defaultTimeout a b
, other = ov other a b
, backend = ov backend a b
, proxyType = ov proxyType a b
}
where
ov f x y = getLast $! (mappend `on` (Last . f)) x y
------------------------------------------------------------------------------
-- | The 'Typeable1' instance is here so 'Config' values can be
-- dynamically loaded with Hint.
configTyCon :: TyCon
configTyCon = mkTyCon "Snap.Http.Server.Config.Config"
{-# NOINLINE configTyCon #-}
instance (Typeable1 m) => Typeable1 (Config m) where
typeOf1 _ = mkTyConApp configTyCon [typeOf1 (undefined :: m ())]
------------------------------------------------------------------------------
-- | These are the default values for the options
defaultConfig :: MonadSnap m => Config m a
defaultConfig = mempty
{ hostname = Just "localhost"
, accessLog = Just $ ConfigFileLog "log/access.log"
, errorLog = Just $ ConfigFileLog "log/error.log"
, locale = Just "en_US"
, compression = Just True
, verbose = Just True
, errorHandler = Just defaultErrorHandler
, bind = Just "0.0.0.0"
, sslbind = Just "0.0.0.0"
, sslcert = Just "cert.pem"
, sslkey = Just "key.pem"
, defaultTimeout = Just 60
}
------------------------------------------------------------------------------
-- | The hostname of the HTTP server. This field has the same format as an HTTP
-- @Host@ header; if a @Host@ header came in with the request, we use that,
-- otherwise we default to this value specified in the configuration.
getHostname :: Config m a -> Maybe ByteString
getHostname = hostname
-- | Path to the access log
getAccessLog :: Config m a -> Maybe ConfigLog
getAccessLog = accessLog
-- | Path to the error log
getErrorLog :: Config m a -> Maybe ConfigLog
getErrorLog = errorLog
-- | Gets the locale to use. Locales are used on Unix only, to set the
-- @LANG@\/@LC_ALL@\/etc. environment variable. For instance if you set the
-- locale to \"@en_US@\", we'll set the relevant environment variables to
-- \"@en_US.UTF-8@\".
getLocale :: Config m a -> Maybe String
getLocale = locale
-- | Returns the port to listen on (for http)
getPort :: Config m a -> Maybe Int
getPort = port
-- | Returns the address to bind to (for http)
getBind :: Config m a -> Maybe ByteString
getBind = bind
-- | Returns the port to listen on (for https)
getSSLPort :: Config m a -> Maybe Int
getSSLPort = sslport
-- | Returns the address to bind to (for https)
getSSLBind :: Config m a -> Maybe ByteString
getSSLBind = sslbind
-- | Path to the SSL certificate file
getSSLCert :: Config m a -> Maybe FilePath
getSSLCert = sslcert
-- | Path to the SSL key file
getSSLKey :: Config m a -> Maybe FilePath
getSSLKey = sslkey
-- | If set and set to True, compression is turned on when applicable
getCompression :: Config m a -> Maybe Bool
getCompression = compression
-- | Whether to write server status updates to stderr
getVerbose :: Config m a -> Maybe Bool
getVerbose = verbose
-- | A MonadSnap action to handle 500 errors
getErrorHandler :: Config m a -> Maybe (SomeException -> m ())
getErrorHandler = errorHandler
getDefaultTimeout :: Config m a -> Maybe Int
getDefaultTimeout = defaultTimeout
getOther :: Config m a -> Maybe a
getOther = other
getBackend :: Config m a -> Maybe ConfigBackend
getBackend = backend
getProxyType :: Config m a -> Maybe ProxyType
getProxyType = proxyType
------------------------------------------------------------------------------
setHostname :: ByteString -> Config m a -> Config m a
setHostname x c = c { hostname = Just x }
setAccessLog :: ConfigLog -> Config m a -> Config m a
setAccessLog x c = c { accessLog = Just x }
setErrorLog :: ConfigLog -> Config m a -> Config m a
setErrorLog x c = c { errorLog = Just x }
setLocale :: String -> Config m a -> Config m a
setLocale x c = c { locale = Just x }
setPort :: Int -> Config m a -> Config m a
setPort x c = c { port = Just x }
setBind :: ByteString -> Config m a -> Config m a
setBind x c = c { bind = Just x }
setSSLPort :: Int -> Config m a -> Config m a
setSSLPort x c = c { sslport = Just x }
setSSLBind :: ByteString -> Config m a -> Config m a
setSSLBind x c = c { sslbind = Just x }
setSSLCert :: FilePath -> Config m a -> Config m a
setSSLCert x c = c { sslcert = Just x }
setSSLKey :: FilePath -> Config m a -> Config m a
setSSLKey x c = c { sslkey = Just x }
setCompression :: Bool -> Config m a -> Config m a
setCompression x c = c { compression = Just x }
setVerbose :: Bool -> Config m a -> Config m a
setVerbose x c = c { verbose = Just x }
setErrorHandler :: (SomeException -> m ()) -> Config m a -> Config m a
setErrorHandler x c = c { errorHandler = Just x }
setDefaultTimeout :: Int -> Config m a -> Config m a
setDefaultTimeout x c = c { defaultTimeout = Just x }
setOther :: a -> Config m a -> Config m a
setOther x c = c { other = Just x }
setBackend :: ConfigBackend -> Config m a -> Config m a
setBackend x c = c { backend = Just x }
setProxyType :: ProxyType -> Config m a -> Config m a
setProxyType x c = c { proxyType = Just x }
------------------------------------------------------------------------------
completeConfig :: (MonadSnap m) => Config m a -> IO (Config m a)
completeConfig config = do
when noPort $ hPutStrLn stderr
"no port specified, defaulting to port 8000"
return $! cfg `mappend` cfg'
where
cfg = defaultConfig `mappend` config
sslVals = map ($ cfg) [ isJust . getSSLPort
, isJust . getSSLBind
, isJust . getSSLKey
, isJust . getSSLCert ]
sslValid = and sslVals
noPort = isNothing (getPort cfg) && not sslValid
cfg' = emptyConfig { port = if noPort then Just 8000 else Nothing }
------------------------------------------------------------------------------
bsFromString :: String -> ByteString
bsFromString = T.encodeUtf8 . T.pack
------------------------------------------------------------------------------
toString :: ByteString -> String
toString = T.unpack . T.decodeUtf8
------------------------------------------------------------------------------
-- | Returns a description of the snap command line options suitable for use
-- with "System.Console.GetOpt".
optDescrs :: MonadSnap m =>
Config m a -- ^ the configuration defaults.
-> [OptDescr (Maybe (Config m a))]
optDescrs defaults =
[ Option [] ["hostname"]
(ReqArg (Just . setConfig setHostname . bsFromString) "NAME")
$ "local hostname" ++ defaultC getHostname
, Option ['b'] ["address"]
(ReqArg (\s -> Just $ mempty { bind = Just $ bsFromString s })
"ADDRESS")
$ "address to bind to" ++ defaultO bind
, Option ['p'] ["port"]
(ReqArg (\s -> Just $ mempty { port = Just $ read s}) "PORT")
$ "port to listen on" ++ defaultO port
, Option [] ["ssl-address"]
(ReqArg (\s -> Just $ mempty { sslbind = Just $ bsFromString s })
"ADDRESS")
$ "ssl address to bind to" ++ defaultO sslbind
, Option [] ["ssl-port"]
(ReqArg (\s -> Just $ mempty { sslport = Just $ read s}) "PORT")
$ "ssl port to listen on" ++ defaultO sslport
, Option [] ["ssl-cert"]
(ReqArg (\s -> Just $ mempty { sslcert = Just s}) "PATH")
$ "path to ssl certificate in PEM format" ++ defaultO sslcert
, Option [] ["ssl-key"]
(ReqArg (\s -> Just $ mempty { sslkey = Just s}) "PATH")
$ "path to ssl private key in PEM format" ++ defaultO sslkey
, Option [] ["access-log"]
(ReqArg (Just . setConfig setAccessLog . ConfigFileLog) "PATH")
$ "access log" ++ (defaultC $ getAccessLog)
, Option [] ["error-log"]
(ReqArg (Just . setConfig setErrorLog . ConfigFileLog) "PATH")
$ "error log" ++ (defaultC $ getErrorLog)
, Option [] ["no-access-log"]
(NoArg $ Just $ setConfig setAccessLog ConfigNoLog)
$ "don't have an access log"
, Option [] ["no-error-log"]
(NoArg $ Just $ setConfig setErrorLog ConfigNoLog)
$ "don't have an error log"
, Option ['c'] ["compression"]
(NoArg $ Just $ setConfig setCompression True)
$ "use gzip compression on responses"
, Option ['t'] ["timeout"]
(ReqArg (\t -> Just $ mempty {
defaultTimeout = Just $ read t
}) "SECS")
$ "set default timeout in seconds"
, Option [] ["no-compression"]
(NoArg $ Just $ setConfig setCompression False)
$ "serve responses uncompressed"
, Option ['v'] ["verbose"]
(NoArg $ Just $ setConfig setVerbose True)
$ "print server status updates to stderr"
, Option ['q'] ["quiet"]
(NoArg $ Just $ setConfig setVerbose False)
$ "do not print anything to stderr"
, Option [] ["proxy"]
(ReqArg (\t -> Just $ setConfig setProxyType $ read t)
"X_Forwarded_For")
$ concat [ "Set --proxy=X_Forwarded_For if your snap application "
, "is behind an HTTP reverse proxy to ensure that "
, "rqRemoteAddr is set properly."]
, Option ['h'] ["help"]
(NoArg Nothing)
$ "display this help and exit"
]
where
setConfig f c = f c mempty
conf = defaultConfig `mappend` defaults
defaultC f = maybe "" ((", default " ++) . show) $ f conf
defaultO f = maybe ", default off" ((", default " ++) . show) $ f conf
------------------------------------------------------------------------------
defaultErrorHandler :: MonadSnap m => SomeException -> m ()
defaultErrorHandler e = do
debug "Snap.Http.Server.Config errorHandler:"
req <- getRequest
let sm = smsg req
debug $ toString sm
logError sm
finishWith $ setContentType "text/plain; charset=utf-8"
. setContentLength (fromIntegral $ B.length msg)
. setResponseStatus 500 "Internal Server Error"
. modifyResponseBody
(>==> enumBuilder (fromByteString msg))
$ emptyResponse
where
smsg req = toByteString $ requestErrorMessage req e
msg = toByteString msgB
msgB = mconcat [
fromByteString "A web handler threw an exception. Details:\n"
, fromShow e
]
------------------------------------------------------------------------------
-- | Returns a 'Config' obtained from parsing the options specified on the
-- command-line.
--
-- On Unix systems, the locale is read from the @LANG@ environment variable.
commandLineConfig :: MonadSnap m
=> Config m a
-- ^ default configuration. This is combined with
-- 'defaultConfig' to obtain default values to use if the
-- given parameter is specified on the command line. Usually
-- it is fine to use 'emptyConfig' here.
-> IO (Config m a)
commandLineConfig defaults = do
args <- getArgs
prog <- getProgName
let opts = optDescrs defaults
result <- either (usage prog opts)
return
(case getOpt Permute opts args of
(f, _, [] ) -> maybe (Left []) Right $
fmap mconcat $ sequence f
(_, _, errs) -> Left errs)
#ifndef PORTABLE
lang <- getEnv "LANG"
completeConfig $ mconcat [defaults,
mempty {locale = fmap upToUtf8 lang},
result]
#else
completeConfig $ mconcat [defaults, result]
#endif
where
usage prog opts errs = do
let hdr = "Usage:\n " ++ prog ++ " [OPTION...]\n\nOptions:"
let msg = concat errs ++ usageInfo hdr opts
hPutStrLn stderr msg
exitFailure
#ifndef PORTABLE
upToUtf8 = takeWhile $ \c -> isAlpha c || '_' == c
#endif
| beni55/snap-server | src/Snap/Http/Server/Config.hs | bsd-3-clause | 20,343 | 0 | 18 | 6,732 | 4,472 | 2,402 | 2,070 | 385 | 2 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[Demand]{@Demand@: A decoupled implementation of a demand domain}
-}
{-# LANGUAGE CPP, FlexibleInstances, TypeSynonymInstances #-}
module Demand (
StrDmd, UseDmd(..), Count(..),
countOnce, countMany, -- cardinality
Demand, CleanDemand, getStrDmd, getUseDmd,
mkProdDmd, mkOnceUsedDmd, mkManyUsedDmd, mkHeadStrict, oneifyDmd,
toCleanDmd,
absDmd, topDmd, botDmd, seqDmd,
lubDmd, bothDmd,
lazyApply1Dmd, lazyApply2Dmd, strictApply1Dmd,
catchArgDmd,
isTopDmd, isAbsDmd, isSeqDmd,
peelUseCall, cleanUseDmd_maybe, strictenDmd, bothCleanDmd,
addCaseBndrDmd,
DmdType(..), dmdTypeDepth, lubDmdType, bothDmdType,
nopDmdType, botDmdType, mkDmdType,
addDemand, removeDmdTyArgs,
BothDmdArg, mkBothDmdArg, toBothDmdArg,
DmdEnv, emptyDmdEnv,
peelFV, findIdDemand,
DmdResult, CPRResult,
isBotRes, isTopRes,
topRes, botRes, exnRes, cprProdRes,
vanillaCprProdRes, cprSumRes,
appIsBottom, isBottomingSig, pprIfaceStrictSig,
trimCPRInfo, returnsCPR_maybe,
StrictSig(..), mkStrictSig, mkClosedStrictSig, nopSig, botSig, cprProdSig,
isNopSig, splitStrictSig, increaseStrictSigArity,
seqDemand, seqDemandList, seqDmdType, seqStrictSig,
evalDmd, cleanEvalDmd, cleanEvalProdDmd, isStrictDmd,
splitDmdTy, splitFVs,
deferAfterIO,
postProcessUnsat, postProcessDmdType,
splitProdDmd_maybe, peelCallDmd, mkCallDmd,
dmdTransformSig, dmdTransformDataConSig, dmdTransformDictSelSig,
argOneShots, argsOneShots,
trimToType, TypeShape(..),
useCount, isUsedOnce, reuseEnv,
killUsageDemand, killUsageSig, zapUsageDemand,
strictifyDictDmd
) where
#include "HsVersions.h"
import DynFlags
import Outputable
import Var ( Var )
import VarEnv
import UniqFM
import Util
import BasicTypes
import Binary
import Maybes ( orElse )
import Type ( Type, isUnliftedType )
import TyCon ( isNewTyCon, isClassTyCon )
import DataCon ( splitDataProductType_maybe )
{-
************************************************************************
* *
Joint domain for Strictness and Absence
* *
************************************************************************
-}
data JointDmd s u = JD { sd :: s, ud :: u }
deriving ( Eq, Show )
getStrDmd :: JointDmd s u -> s
getStrDmd = sd
getUseDmd :: JointDmd s u -> u
getUseDmd = ud
-- Pretty-printing
instance (Outputable s, Outputable u) => Outputable (JointDmd s u) where
ppr (JD {sd = s, ud = u}) = angleBrackets (ppr s <> char ',' <> ppr u)
-- Well-formedness preserving constructors for the joint domain
mkJointDmd :: s -> u -> JointDmd s u
mkJointDmd s u = JD { sd = s, ud = u }
mkJointDmds :: [s] -> [u] -> [JointDmd s u]
mkJointDmds ss as = zipWithEqual "mkJointDmds" mkJointDmd ss as
{-
************************************************************************
* *
Strictness domain
* *
************************************************************************
Lazy
|
ExnStr x -
|
HeadStr
/ \
SCall SProd
\ /
HyperStr
Note [Exceptions and strictness]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Exceptions need rather careful treatment, especially because of 'catch'.
See Trac #10712.
There are two main pieces.
* The Termination type includes ThrowsExn, meaning "under the given
demand this expression either diverges or throws an exception".
This is relatively uncontroversial. The primops raise# and
raiseIO# both return ThrowsExn; nothing else does.
* An ArgStr has an ExnStr flag to say how to process the Termination
result of the argument. If the ExnStr flag is ExnStr, we squash
ThrowsExn to topRes. (This is done in postProcessDmdResult.)
Here is the kay example
catch# (\s -> throwIO exn s) blah
We analyse the argument (\s -> raiseIO# exn s) with demand
Str ExnStr (SCall HeadStr)
i.e. with the ExnStr flag set.
- First we analyse the argument with the "clean-demand" (SCall
HeadStr), getting a DmdResult of ThrowsExn from the saturated
application of raiseIO#.
- Then we apply the post-processing for the shell, squashing the
ThrowsExn to topRes.
This also applies uniformly to free variables. Consider
let r = \st -> raiseIO# blah st
in catch# (\s -> ...(r s')..) handler st
If we give the first argument of catch a strict signature, we'll get
a demand 'C(S)' for 'r'; that is, 'r' is definitely called with one
argument, which indeed it is. But when we post-process the free-var
demands on catch#'s argument (in postProcessDmdEnv), we'll give 'r'
a demand of (Str ExnStr (SCall HeadStr)); and if we feed that into r's
RHS (which would be reasonable) we'll squash the exception just as if
we'd inlined 'r'.
-}
-- Vanilla strictness domain
data StrDmd
= HyperStr -- Hyper-strict
-- Bottom of the lattice
-- Note [HyperStr and Use demands]
| SCall StrDmd -- Call demand
-- Used only for values of function type
| SProd [ArgStr] -- Product
-- Used only for values of product type
-- Invariant: not all components are HyperStr (use HyperStr)
-- not all components are Lazy (use HeadStr)
| HeadStr -- Head-Strict
-- A polymorphic demand: used for values of all types,
-- including a type variable
deriving ( Eq, Show )
type ArgStr = Str StrDmd
data Str s = Lazy -- Lazy
-- Top of the lattice
| Str ExnStr s
deriving ( Eq, Show )
data ExnStr -- See Note [Exceptions and strictness]
= VanStr -- "Vanilla" case, ordinary strictness
| ExnStr -- (Str ExnStr d) means be strict like 'd' but then degrade
-- the Termination info ThrowsExn to Dunno
deriving( Eq, Show )
-- Well-formedness preserving constructors for the Strictness domain
strBot, strTop :: ArgStr
strBot = Str VanStr HyperStr
strTop = Lazy
mkSCall :: StrDmd -> StrDmd
mkSCall HyperStr = HyperStr
mkSCall s = SCall s
mkSProd :: [ArgStr] -> StrDmd
mkSProd sx
| any isHyperStr sx = HyperStr
| all isLazy sx = HeadStr
| otherwise = SProd sx
isLazy :: ArgStr -> Bool
isLazy Lazy = True
isLazy (Str {}) = False
isHyperStr :: ArgStr -> Bool
isHyperStr (Str _ HyperStr) = True
isHyperStr _ = False
-- Pretty-printing
instance Outputable StrDmd where
ppr HyperStr = char 'B'
ppr (SCall s) = char 'C' <> parens (ppr s)
ppr HeadStr = char 'S'
ppr (SProd sx) = char 'S' <> parens (hcat (map ppr sx))
instance Outputable ArgStr where
ppr (Str x s) = (case x of VanStr -> empty; ExnStr -> char 'x')
<> ppr s
ppr Lazy = char 'L'
lubArgStr :: ArgStr -> ArgStr -> ArgStr
lubArgStr Lazy _ = Lazy
lubArgStr _ Lazy = Lazy
lubArgStr (Str x1 s1) (Str x2 s2) = Str (x1 `lubExnStr` x2) (s1 `lubStr` s2)
lubExnStr :: ExnStr -> ExnStr -> ExnStr
lubExnStr VanStr VanStr = VanStr
lubExnStr _ _ = ExnStr -- ExnStr is lazier
lubStr :: StrDmd -> StrDmd -> StrDmd
lubStr HyperStr s = s
lubStr (SCall s1) HyperStr = SCall s1
lubStr (SCall _) HeadStr = HeadStr
lubStr (SCall s1) (SCall s2) = SCall (s1 `lubStr` s2)
lubStr (SCall _) (SProd _) = HeadStr
lubStr (SProd sx) HyperStr = SProd sx
lubStr (SProd _) HeadStr = HeadStr
lubStr (SProd s1) (SProd s2)
| length s1 == length s2 = mkSProd (zipWith lubArgStr s1 s2)
| otherwise = HeadStr
lubStr (SProd _) (SCall _) = HeadStr
lubStr HeadStr _ = HeadStr
bothArgStr :: ArgStr -> ArgStr -> ArgStr
bothArgStr Lazy s = s
bothArgStr s Lazy = s
bothArgStr (Str x1 s1) (Str x2 s2) = Str (x1 `bothExnStr` x2) (s1 `bothStr` s2)
bothExnStr :: ExnStr -> ExnStr -> ExnStr
bothExnStr ExnStr ExnStr = ExnStr
bothExnStr _ _ = VanStr
bothStr :: StrDmd -> StrDmd -> StrDmd
bothStr HyperStr _ = HyperStr
bothStr HeadStr s = s
bothStr (SCall _) HyperStr = HyperStr
bothStr (SCall s1) HeadStr = SCall s1
bothStr (SCall s1) (SCall s2) = SCall (s1 `bothStr` s2)
bothStr (SCall _) (SProd _) = HyperStr -- Weird
bothStr (SProd _) HyperStr = HyperStr
bothStr (SProd s1) HeadStr = SProd s1
bothStr (SProd s1) (SProd s2)
| length s1 == length s2 = mkSProd (zipWith bothArgStr s1 s2)
| otherwise = HyperStr -- Weird
bothStr (SProd _) (SCall _) = HyperStr
-- utility functions to deal with memory leaks
seqStrDmd :: StrDmd -> ()
seqStrDmd (SProd ds) = seqStrDmdList ds
seqStrDmd (SCall s) = s `seq` ()
seqStrDmd _ = ()
seqStrDmdList :: [ArgStr] -> ()
seqStrDmdList [] = ()
seqStrDmdList (d:ds) = seqArgStr d `seq` seqStrDmdList ds
seqArgStr :: ArgStr -> ()
seqArgStr Lazy = ()
seqArgStr (Str x s) = x `seq` seqStrDmd s
-- Splitting polymorphic demands
splitArgStrProdDmd :: Int -> ArgStr -> Maybe [ArgStr]
splitArgStrProdDmd n Lazy = Just (replicate n Lazy)
splitArgStrProdDmd n (Str _ s) = splitStrProdDmd n s
splitStrProdDmd :: Int -> StrDmd -> Maybe [ArgStr]
splitStrProdDmd n HyperStr = Just (replicate n strBot)
splitStrProdDmd n HeadStr = Just (replicate n strTop)
splitStrProdDmd n (SProd ds) = ASSERT( ds `lengthIs` n) Just ds
splitStrProdDmd _ (SCall {}) = Nothing
-- This can happen when the programmer uses unsafeCoerce,
-- and we don't then want to crash the compiler (Trac #9208)
{-
************************************************************************
* *
Absence domain
* *
************************************************************************
Used
/ \
UCall UProd
\ /
UHead
|
Count x -
|
Abs
-}
-- Domain for genuine usage
data UseDmd
= UCall Count UseDmd -- Call demand for absence
-- Used only for values of function type
| UProd [ArgUse] -- Product
-- Used only for values of product type
-- See Note [Don't optimise UProd(Used) to Used]
-- [Invariant] Not all components are Abs
-- (in that case, use UHead)
| UHead -- May be used; but its sub-components are
-- definitely *not* used. Roughly U(AAA)
-- Eg the usage of x in x `seq` e
-- A polymorphic demand: used for values of all types,
-- including a type variable
-- Since (UCall _ Abs) is ill-typed, UHead doesn't
-- make sense for lambdas
| Used -- May be used; and its sub-components may be used
-- Top of the lattice
deriving ( Eq, Show )
-- Extended usage demand for absence and counting
type ArgUse = Use UseDmd
data Use u
= Abs -- Definitely unused
-- Bottom of the lattice
| Use Count u -- May be used with some cardinality
deriving ( Eq, Show )
-- Abstract counting of usages
data Count = One | Many
deriving ( Eq, Show )
-- Pretty-printing
instance Outputable ArgUse where
ppr Abs = char 'A'
ppr (Use Many a) = ppr a
ppr (Use One a) = char '1' <> char '*' <> ppr a
instance Outputable UseDmd where
ppr Used = char 'U'
ppr (UCall c a) = char 'C' <> ppr c <> parens (ppr a)
ppr UHead = char 'H'
ppr (UProd as) = char 'U' <> parens (hcat (punctuate (char ',') (map ppr as)))
instance Outputable Count where
ppr One = char '1'
ppr Many = text ""
-- Well-formedness preserving constructors for the Absence domain
countOnce, countMany :: Count
countOnce = One
countMany = Many
useBot, useTop :: ArgUse
useBot = Abs
useTop = Use Many Used
mkUCall :: Count -> UseDmd -> UseDmd
--mkUCall c Used = Used c
mkUCall c a = UCall c a
mkUProd :: [ArgUse] -> UseDmd
mkUProd ux
| all (== Abs) ux = UHead
| otherwise = UProd ux
lubCount :: Count -> Count -> Count
lubCount _ Many = Many
lubCount Many _ = Many
lubCount x _ = x
lubArgUse :: ArgUse -> ArgUse -> ArgUse
lubArgUse Abs x = x
lubArgUse x Abs = x
lubArgUse (Use c1 a1) (Use c2 a2) = Use (lubCount c1 c2) (lubUse a1 a2)
lubUse :: UseDmd -> UseDmd -> UseDmd
lubUse UHead u = u
lubUse (UCall c u) UHead = UCall c u
lubUse (UCall c1 u1) (UCall c2 u2) = UCall (lubCount c1 c2) (lubUse u1 u2)
lubUse (UCall _ _) _ = Used
lubUse (UProd ux) UHead = UProd ux
lubUse (UProd ux1) (UProd ux2)
| length ux1 == length ux2 = UProd $ zipWith lubArgUse ux1 ux2
| otherwise = Used
lubUse (UProd {}) (UCall {}) = Used
-- lubUse (UProd {}) Used = Used
lubUse (UProd ux) Used = UProd (map (`lubArgUse` useTop) ux)
lubUse Used (UProd ux) = UProd (map (`lubArgUse` useTop) ux)
lubUse Used _ = Used -- Note [Used should win]
-- `both` is different from `lub` in its treatment of counting; if
-- `both` is computed for two used, the result always has
-- cardinality `Many` (except for the inner demands of UCall demand -- [TODO] explain).
-- Also, x `bothUse` x /= x (for anything but Abs).
bothArgUse :: ArgUse -> ArgUse -> ArgUse
bothArgUse Abs x = x
bothArgUse x Abs = x
bothArgUse (Use _ a1) (Use _ a2) = Use Many (bothUse a1 a2)
bothUse :: UseDmd -> UseDmd -> UseDmd
bothUse UHead u = u
bothUse (UCall c u) UHead = UCall c u
-- Exciting special treatment of inner demand for call demands:
-- use `lubUse` instead of `bothUse`!
bothUse (UCall _ u1) (UCall _ u2) = UCall Many (u1 `lubUse` u2)
bothUse (UCall {}) _ = Used
bothUse (UProd ux) UHead = UProd ux
bothUse (UProd ux1) (UProd ux2)
| length ux1 == length ux2 = UProd $ zipWith bothArgUse ux1 ux2
| otherwise = Used
bothUse (UProd {}) (UCall {}) = Used
-- bothUse (UProd {}) Used = Used -- Note [Used should win]
bothUse Used (UProd ux) = UProd (map (`bothArgUse` useTop) ux)
bothUse (UProd ux) Used = UProd (map (`bothArgUse` useTop) ux)
bothUse Used _ = Used -- Note [Used should win]
peelUseCall :: UseDmd -> Maybe (Count, UseDmd)
peelUseCall (UCall c u) = Just (c,u)
peelUseCall _ = Nothing
addCaseBndrDmd :: Demand -- On the case binder
-> [Demand] -- On the components of the constructor
-> [Demand] -- Final demands for the components of the constructor
-- See Note [Demand on case-alternative binders]
addCaseBndrDmd (JD { sd = ms, ud = mu }) alt_dmds
= case mu of
Abs -> alt_dmds
Use _ u -> zipWith bothDmd alt_dmds (mkJointDmds ss us)
where
Just ss = splitArgStrProdDmd arity ms -- Guaranteed not to be a call
Just us = splitUseProdDmd arity u -- Ditto
where
arity = length alt_dmds
{- Note [Demand on case-alternative binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The demand on a binder in a case alternative comes
(a) From the demand on the binder itself
(b) From the demand on the case binder
Forgetting (b) led directly to Trac #10148.
Example. Source code:
f x@(p,_) = if p then foo x else True
foo (p,True) = True
foo (p,q) = foo (q,p)
After strictness analysis:
f = \ (x_an1 [Dmd=<S(SL),1*U(U,1*U)>] :: (Bool, Bool)) ->
case x_an1
of wild_X7 [Dmd=<L,1*U(1*U,1*U)>]
{ (p_an2 [Dmd=<S,1*U>], ds_dnz [Dmd=<L,A>]) ->
case p_an2 of _ {
False -> GHC.Types.True;
True -> foo wild_X7 }
It's true that ds_dnz is *itself* absent, but the use of wild_X7 means
that it is very much alive and demanded. See Trac #10148 for how the
consequences play out.
This is needed even for non-product types, in case the case-binder
is used but the components of the case alternative are not.
Note [Don't optimise UProd(Used) to Used]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These two UseDmds:
UProd [Used, Used] and Used
are semantically equivalent, but we do not turn the former into
the latter, for a regrettable-subtle reason. Suppose we did.
then
f (x,y) = (y,x)
would get
StrDmd = Str = SProd [Lazy, Lazy]
UseDmd = Used = UProd [Used, Used]
But with the joint demand of <Str, Used> doesn't convey any clue
that there is a product involved, and so the worthSplittingFun
will not fire. (We'd need to use the type as well to make it fire.)
Moreover, consider
g h p@(_,_) = h p
This too would get <Str, Used>, but this time there really isn't any
point in w/w since the components of the pair are not used at all.
So the solution is: don't aggressively collapse UProd [Used,Used] to
Used; intead leave it as-is. In effect we are using the UseDmd to do a
little bit of boxity analysis. Not very nice.
Note [Used should win]
~~~~~~~~~~~~~~~~~~~~~~
Both in lubUse and bothUse we want (Used `both` UProd us) to be Used.
Why? Because Used carries the implication the whole thing is used,
box and all, so we don't want to w/w it. If we use it both boxed and
unboxed, then we are definitely using the box, and so we are quite
likely to pay a reboxing cost. So we make Used win here.
Example is in the Buffer argument of GHC.IO.Handle.Internals.writeCharBuffer
Baseline: (A) Not making Used win (UProd wins)
Compare with: (B) making Used win for lub and both
Min -0.3% -5.6% -10.7% -11.0% -33.3%
Max +0.3% +45.6% +11.5% +11.5% +6.9%
Geometric Mean -0.0% +0.5% +0.3% +0.2% -0.8%
Baseline: (B) Making Used win for both lub and both
Compare with: (C) making Used win for both, but UProd win for lub
Min -0.1% -0.3% -7.9% -8.0% -6.5%
Max +0.1% +1.0% +21.0% +21.0% +0.5%
Geometric Mean +0.0% +0.0% -0.0% -0.1% -0.1%
-}
-- If a demand is used multiple times (i.e. reused), than any use-once
-- mentioned there, that is not protected by a UCall, can happen many times.
markReusedDmd :: ArgUse -> ArgUse
markReusedDmd Abs = Abs
markReusedDmd (Use _ a) = Use Many (markReused a)
markReused :: UseDmd -> UseDmd
markReused (UCall _ u) = UCall Many u -- No need to recurse here
markReused (UProd ux) = UProd (map markReusedDmd ux)
markReused u = u
isUsedMU :: ArgUse -> Bool
-- True <=> markReusedDmd d = d
isUsedMU Abs = True
isUsedMU (Use One _) = False
isUsedMU (Use Many u) = isUsedU u
isUsedU :: UseDmd -> Bool
-- True <=> markReused d = d
isUsedU Used = True
isUsedU UHead = True
isUsedU (UProd us) = all isUsedMU us
isUsedU (UCall One _) = False
isUsedU (UCall Many _) = True -- No need to recurse
-- Squashing usage demand demands
seqUseDmd :: UseDmd -> ()
seqUseDmd (UProd ds) = seqArgUseList ds
seqUseDmd (UCall c d) = c `seq` seqUseDmd d
seqUseDmd _ = ()
seqArgUseList :: [ArgUse] -> ()
seqArgUseList [] = ()
seqArgUseList (d:ds) = seqArgUse d `seq` seqArgUseList ds
seqArgUse :: ArgUse -> ()
seqArgUse (Use c u) = c `seq` seqUseDmd u
seqArgUse _ = ()
-- Splitting polymorphic Maybe-Used demands
splitUseProdDmd :: Int -> UseDmd -> Maybe [ArgUse]
splitUseProdDmd n Used = Just (replicate n useTop)
splitUseProdDmd n UHead = Just (replicate n Abs)
splitUseProdDmd n (UProd ds) = ASSERT2( ds `lengthIs` n, text "splitUseProdDmd" $$ ppr n $$ ppr ds )
Just ds
splitUseProdDmd _ (UCall _ _) = Nothing
-- This can happen when the programmer uses unsafeCoerce,
-- and we don't then want to crash the compiler (Trac #9208)
useCount :: Use u -> Count
useCount Abs = One
useCount (Use One _) = One
useCount _ = Many
{-
************************************************************************
* *
Clean demand for Strictness and Usage
* *
************************************************************************
This domain differst from JointDemand in the sence that pure absence
is taken away, i.e., we deal *only* with non-absent demands.
Note [Strict demands]
~~~~~~~~~~~~~~~~~~~~~
isStrictDmd returns true only of demands that are
both strict
and used
In particular, it is False for <HyperStr, Abs>, which can and does
arise in, say (Trac #7319)
f x = raise# <some exception>
Then 'x' is not used, so f gets strictness <HyperStr,Abs> -> .
Now the w/w generates
fx = let x <HyperStr,Abs> = absentError "unused"
in raise <some exception>
At this point we really don't want to convert to
fx = case absentError "unused" of x -> raise <some exception>
Since the program is going to diverge, this swaps one error for another,
but it's really a bad idea to *ever* evaluate an absent argument.
In Trac #7319 we get
T7319.exe: Oops! Entered absent arg w_s1Hd{v} [lid] [base:GHC.Base.String{tc 36u}]
Note [Dealing with call demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Call demands are constructed and deconstructed coherently for
strictness and absence. For instance, the strictness signature for the
following function
f :: (Int -> (Int, Int)) -> (Int, Bool)
f g = (snd (g 3), True)
should be: <L,C(U(AU))>m
-}
type CleanDemand = JointDmd StrDmd UseDmd
-- A demand that is at least head-strict
bothCleanDmd :: CleanDemand -> CleanDemand -> CleanDemand
bothCleanDmd (JD { sd = s1, ud = a1}) (JD { sd = s2, ud = a2})
= JD { sd = s1 `bothStr` s2, ud = a1 `bothUse` a2 }
mkHeadStrict :: CleanDemand -> CleanDemand
mkHeadStrict cd = cd { sd = HeadStr }
mkOnceUsedDmd, mkManyUsedDmd :: CleanDemand -> Demand
mkOnceUsedDmd (JD {sd = s,ud = a}) = JD { sd = Str VanStr s, ud = Use One a }
mkManyUsedDmd (JD {sd = s,ud = a}) = JD { sd = Str VanStr s, ud = Use Many a }
evalDmd :: Demand
-- Evaluated strictly, and used arbitrarily deeply
evalDmd = JD { sd = Str VanStr HeadStr, ud = useTop }
mkProdDmd :: [Demand] -> CleanDemand
mkProdDmd dx
= JD { sd = mkSProd $ map getStrDmd dx
, ud = mkUProd $ map getUseDmd dx }
mkCallDmd :: CleanDemand -> CleanDemand
mkCallDmd (JD {sd = d, ud = u})
= JD { sd = mkSCall d, ud = mkUCall One u }
cleanEvalDmd :: CleanDemand
cleanEvalDmd = JD { sd = HeadStr, ud = Used }
cleanEvalProdDmd :: Arity -> CleanDemand
cleanEvalProdDmd n = JD { sd = HeadStr, ud = UProd (replicate n useTop) }
{-
************************************************************************
* *
Demand: combining stricness and usage
* *
************************************************************************
-}
type Demand = JointDmd ArgStr ArgUse
lubDmd :: Demand -> Demand -> Demand
lubDmd (JD {sd = s1, ud = a1}) (JD {sd = s2, ud = a2})
= JD { sd = s1 `lubArgStr` s2
, ud = a1 `lubArgUse` a2 }
bothDmd :: Demand -> Demand -> Demand
bothDmd (JD {sd = s1, ud = a1}) (JD {sd = s2, ud = a2})
= JD { sd = s1 `bothArgStr` s2
, ud = a1 `bothArgUse` a2 }
lazyApply1Dmd, lazyApply2Dmd, strictApply1Dmd, catchArgDmd :: Demand
strictApply1Dmd = JD { sd = Str VanStr (SCall HeadStr)
, ud = Use Many (UCall One Used) }
-- First argument of catch#:
-- uses its arg once, applies it once
-- and catches exceptions (the ExnStr) part
catchArgDmd = JD { sd = Str ExnStr (SCall HeadStr)
, ud = Use One (UCall One Used) }
lazyApply1Dmd = JD { sd = Lazy
, ud = Use One (UCall One Used) }
-- Second argument of catch#:
-- uses its arg at most once, applies it once
-- but is lazy (might not be called at all)
lazyApply2Dmd = JD { sd = Lazy
, ud = Use One (UCall One (UCall One Used)) }
absDmd :: Demand
absDmd = JD { sd = Lazy, ud = Abs }
topDmd :: Demand
topDmd = JD { sd = Lazy, ud = useTop }
botDmd :: Demand
botDmd = JD { sd = strBot, ud = useBot }
seqDmd :: Demand
seqDmd = JD { sd = Str VanStr HeadStr, ud = Use One UHead }
oneifyDmd :: Demand -> Demand
oneifyDmd (JD { sd = s, ud = Use _ a }) = JD { sd = s, ud = Use One a }
oneifyDmd jd = jd
isTopDmd :: Demand -> Bool
-- Used to suppress pretty-printing of an uninformative demand
isTopDmd (JD {sd = Lazy, ud = Use Many Used}) = True
isTopDmd _ = False
isAbsDmd :: Demand -> Bool
isAbsDmd (JD {ud = Abs}) = True -- The strictness part can be HyperStr
isAbsDmd _ = False -- for a bottom demand
isSeqDmd :: Demand -> Bool
isSeqDmd (JD {sd = Str VanStr HeadStr, ud = Use _ UHead}) = True
isSeqDmd _ = False
isUsedOnce :: Demand -> Bool
isUsedOnce (JD { ud = a }) = case useCount a of
One -> True
Many -> False
-- More utility functions for strictness
seqDemand :: Demand -> ()
seqDemand (JD {sd = s, ud = u}) = seqArgStr s `seq` seqArgUse u
seqDemandList :: [Demand] -> ()
seqDemandList [] = ()
seqDemandList (d:ds) = seqDemand d `seq` seqDemandList ds
isStrictDmd :: Demand -> Bool
-- See Note [Strict demands]
isStrictDmd (JD {ud = Abs}) = False
isStrictDmd (JD {sd = Lazy}) = False
isStrictDmd _ = True
isWeakDmd :: Demand -> Bool
isWeakDmd (JD {sd = s, ud = a}) = isLazy s && isUsedMU a
cleanUseDmd_maybe :: Demand -> Maybe UseDmd
cleanUseDmd_maybe (JD { ud = Use _ u }) = Just u
cleanUseDmd_maybe _ = Nothing
splitFVs :: Bool -- Thunk
-> DmdEnv -> (DmdEnv, DmdEnv)
splitFVs is_thunk rhs_fvs
| is_thunk = foldUFM_Directly add (emptyVarEnv, emptyVarEnv) rhs_fvs
| otherwise = partitionVarEnv isWeakDmd rhs_fvs
where
add uniq dmd@(JD { sd = s, ud = u }) (lazy_fv, sig_fv)
| Lazy <- s = (addToUFM_Directly lazy_fv uniq dmd, sig_fv)
| otherwise = ( addToUFM_Directly lazy_fv uniq (JD { sd = Lazy, ud = u })
, addToUFM_Directly sig_fv uniq (JD { sd = s, ud = Abs }) )
data TypeShape = TsFun TypeShape
| TsProd [TypeShape]
| TsUnk
instance Outputable TypeShape where
ppr TsUnk = text "TsUnk"
ppr (TsFun ts) = text "TsFun" <> parens (ppr ts)
ppr (TsProd tss) = parens (hsep $ punctuate comma $ map ppr tss)
trimToType :: Demand -> TypeShape -> Demand
-- See Note [Trimming a demand to a type]
trimToType (JD { sd = ms, ud = mu }) ts
= JD (go_ms ms ts) (go_mu mu ts)
where
go_ms :: ArgStr -> TypeShape -> ArgStr
go_ms Lazy _ = Lazy
go_ms (Str x s) ts = Str x (go_s s ts)
go_s :: StrDmd -> TypeShape -> StrDmd
go_s HyperStr _ = HyperStr
go_s (SCall s) (TsFun ts) = SCall (go_s s ts)
go_s (SProd mss) (TsProd tss)
| equalLength mss tss = SProd (zipWith go_ms mss tss)
go_s _ _ = HeadStr
go_mu :: ArgUse -> TypeShape -> ArgUse
go_mu Abs _ = Abs
go_mu (Use c u) ts = Use c (go_u u ts)
go_u :: UseDmd -> TypeShape -> UseDmd
go_u UHead _ = UHead
go_u (UCall c u) (TsFun ts) = UCall c (go_u u ts)
go_u (UProd mus) (TsProd tss)
| equalLength mus tss = UProd (zipWith go_mu mus tss)
go_u _ _ = Used
{-
Note [Trimming a demand to a type]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
f :: a -> Bool
f x = case ... of
A g1 -> case (x |> g1) of (p,q) -> ...
B -> error "urk"
where A,B are the constructors of a GADT. We'll get a U(U,U) demand
on x from the A branch, but that's a stupid demand for x itself, which
has type 'a'. Indeed we get ASSERTs going off (notably in
splitUseProdDmd, Trac #8569).
Bottom line: we really don't want to have a binder whose demand is more
deeply-nested than its type. There are various ways to tackle this.
When processing (x |> g1), we could "trim" the incoming demand U(U,U)
to match x's type. But I'm currently doing so just at the moment when
we pin a demand on a binder, in DmdAnal.findBndrDmd.
Note [Threshold demands]
~~~~~~~~~~~~~~~~~~~~~~~~
Threshold usage demand is generated to figure out if
cardinality-instrumented demands of a binding's free variables should
be unleashed. See also [Aggregated demand for cardinality].
Note [Replicating polymorphic demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some demands can be considered as polymorphic. Generally, it is
applicable to such beasts as tops, bottoms as well as Head-Used adn
Head-stricts demands. For instance,
S ~ S(L, ..., L)
Also, when top or bottom is occurred as a result demand, it in fact
can be expanded to saturate a callee's arity.
-}
splitProdDmd_maybe :: Demand -> Maybe [Demand]
-- Split a product into its components, iff there is any
-- useful information to be extracted thereby
-- The demand is not necessarily strict!
splitProdDmd_maybe (JD { sd = s, ud = u })
= case (s,u) of
(Str _ (SProd sx), Use _ u) | Just ux <- splitUseProdDmd (length sx) u
-> Just (mkJointDmds sx ux)
(Str _ s, Use _ (UProd ux)) | Just sx <- splitStrProdDmd (length ux) s
-> Just (mkJointDmds sx ux)
(Lazy, Use _ (UProd ux)) -> Just (mkJointDmds (replicate (length ux) Lazy) ux)
_ -> Nothing
{-
************************************************************************
* *
Demand results
* *
************************************************************************
DmdResult: Dunno CPRResult
/
ThrowsExn
/
Diverges
CPRResult: NoCPR
/ \
RetProd RetSum ConTag
Product contructors return (Dunno (RetProd rs))
In a fixpoint iteration, start from Diverges
We have lubs, but not glbs; but that is ok.
-}
------------------------------------------------------------------------
-- Constructed Product Result
------------------------------------------------------------------------
data Termination r
= Diverges -- Definitely diverges
| ThrowsExn -- Definitely throws an exception or diverges
| Dunno r -- Might diverge or converge
deriving( Eq, Show )
type DmdResult = Termination CPRResult
data CPRResult = NoCPR -- Top of the lattice
| RetProd -- Returns a constructor from a product type
| RetSum ConTag -- Returns a constructor from a data type
deriving( Eq, Show )
lubCPR :: CPRResult -> CPRResult -> CPRResult
lubCPR (RetSum t1) (RetSum t2)
| t1 == t2 = RetSum t1
lubCPR RetProd RetProd = RetProd
lubCPR _ _ = NoCPR
lubDmdResult :: DmdResult -> DmdResult -> DmdResult
lubDmdResult Diverges r = r
lubDmdResult ThrowsExn Diverges = ThrowsExn
lubDmdResult ThrowsExn r = r
lubDmdResult (Dunno c1) Diverges = Dunno c1
lubDmdResult (Dunno c1) ThrowsExn = Dunno c1
lubDmdResult (Dunno c1) (Dunno c2) = Dunno (c1 `lubCPR` c2)
-- This needs to commute with defaultDmd, i.e.
-- defaultDmd (r1 `lubDmdResult` r2) = defaultDmd r1 `lubDmd` defaultDmd r2
-- (See Note [Default demand on free variables] for why)
bothDmdResult :: DmdResult -> Termination () -> DmdResult
-- See Note [Asymmetry of 'both' for DmdType and DmdResult]
bothDmdResult _ Diverges = Diverges
bothDmdResult r ThrowsExn = case r of { Diverges -> r; _ -> ThrowsExn }
bothDmdResult r (Dunno {}) = r
-- This needs to commute with defaultDmd, i.e.
-- defaultDmd (r1 `bothDmdResult` r2) = defaultDmd r1 `bothDmd` defaultDmd r2
-- (See Note [Default demand on free variables] for why)
instance Outputable r => Outputable (Termination r) where
ppr Diverges = char 'b'
ppr ThrowsExn = char 'x'
ppr (Dunno c) = ppr c
instance Outputable CPRResult where
ppr NoCPR = empty
ppr (RetSum n) = char 'm' <> int n
ppr RetProd = char 'm'
seqDmdResult :: DmdResult -> ()
seqDmdResult Diverges = ()
seqDmdResult ThrowsExn = ()
seqDmdResult (Dunno c) = seqCPRResult c
seqCPRResult :: CPRResult -> ()
seqCPRResult NoCPR = ()
seqCPRResult (RetSum n) = n `seq` ()
seqCPRResult RetProd = ()
------------------------------------------------------------------------
-- Combined demand result --
------------------------------------------------------------------------
-- [cprRes] lets us switch off CPR analysis
-- by making sure that everything uses TopRes
topRes, exnRes, botRes :: DmdResult
topRes = Dunno NoCPR
exnRes = ThrowsExn
botRes = Diverges
cprSumRes :: ConTag -> DmdResult
cprSumRes tag = Dunno $ RetSum tag
cprProdRes :: [DmdType] -> DmdResult
cprProdRes _arg_tys = Dunno $ RetProd
vanillaCprProdRes :: Arity -> DmdResult
vanillaCprProdRes _arity = Dunno $ RetProd
isTopRes :: DmdResult -> Bool
isTopRes (Dunno NoCPR) = True
isTopRes _ = False
isBotRes :: DmdResult -> Bool
-- True if the result diverges or throws an exception
isBotRes Diverges = True
isBotRes ThrowsExn = True
isBotRes (Dunno {}) = False
trimCPRInfo :: Bool -> Bool -> DmdResult -> DmdResult
trimCPRInfo trim_all trim_sums res
= trimR res
where
trimR (Dunno c) = Dunno (trimC c)
trimR res = res
trimC (RetSum n) | trim_all || trim_sums = NoCPR
| otherwise = RetSum n
trimC RetProd | trim_all = NoCPR
| otherwise = RetProd
trimC NoCPR = NoCPR
returnsCPR_maybe :: DmdResult -> Maybe ConTag
returnsCPR_maybe (Dunno c) = retCPR_maybe c
returnsCPR_maybe _ = Nothing
retCPR_maybe :: CPRResult -> Maybe ConTag
retCPR_maybe (RetSum t) = Just t
retCPR_maybe RetProd = Just fIRST_TAG
retCPR_maybe NoCPR = Nothing
-- See Notes [Default demand on free variables]
-- and [defaultDmd vs. resTypeArgDmd]
defaultDmd :: Termination r -> Demand
defaultDmd (Dunno {}) = absDmd
defaultDmd _ = botDmd -- Diverges or ThrowsExn
resTypeArgDmd :: Termination r -> Demand
-- TopRes and BotRes are polymorphic, so that
-- BotRes === (Bot -> BotRes) === ...
-- TopRes === (Top -> TopRes) === ...
-- This function makes that concrete
-- Also see Note [defaultDmd vs. resTypeArgDmd]
resTypeArgDmd (Dunno _) = topDmd
resTypeArgDmd _ = botDmd -- Diverges or ThrowsExn
{-
Note [defaultDmd and resTypeArgDmd]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These functions are similar: They express the demand on something not
explicitly mentioned in the environment resp. the argument list. Yet they are
different:
* Variables not mentioned in the free variables environment are definitely
unused, so we can use absDmd there.
* Further arguments *can* be used, of course. Hence topDmd is used.
Note [Worthy functions for Worker-Wrapper split]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For non-bottoming functions a worker-wrapper transformation takes into
account several possibilities to decide if the function is worthy for
splitting:
1. The result is of product type and the function is strict in some
(or even all) of its arguments. The check that the argument is used is
more of sanity nature, since strictness implies usage. Example:
f :: (Int, Int) -> Int
f p = (case p of (a,b) -> a) + 1
should be splitted to
f :: (Int, Int) -> Int
f p = case p of (a,b) -> $wf a
$wf :: Int -> Int
$wf a = a + 1
2. Sometimes it also makes sense to perform a WW split if the
strictness analysis cannot say for sure if the function is strict in
components of its argument. Then we reason according to the inferred
usage information: if the function uses its product argument's
components, the WW split can be beneficial. Example:
g :: Bool -> (Int, Int) -> Int
g c p = case p of (a,b) ->
if c then a else b
The function g is strict in is argument p and lazy in its
components. However, both components are used in the RHS. The idea is
since some of the components (both in this case) are used in the
right-hand side, the product must presumable be taken apart.
Therefore, the WW transform splits the function g to
g :: Bool -> (Int, Int) -> Int
g c p = case p of (a,b) -> $wg c a b
$wg :: Bool -> Int -> Int -> Int
$wg c a b = if c then a else b
3. If an argument is absent, it would be silly to pass it to a
function, hence the worker with reduced arity is generated.
Note [Worker-wrapper for bottoming functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used not to split if the result is bottom.
[Justification: there's no efficiency to be gained.]
But it's sometimes bad not to make a wrapper. Consider
fw = \x# -> let x = I# x# in case e of
p1 -> error_fn x
p2 -> error_fn x
p3 -> the real stuff
The re-boxing code won't go away unless error_fn gets a wrapper too.
[We don't do reboxing now, but in general it's better to pass an
unboxed thing to f, and have it reboxed in the error cases....]
However we *don't* want to do this when the argument is not actually
taken apart in the function at all. Otherwise we risk decomposing a
masssive tuple which is barely used. Example:
f :: ((Int,Int) -> String) -> (Int,Int) -> a
f g pr = error (g pr)
main = print (f fst (1, error "no"))
Here, f does not take 'pr' apart, and it's stupid to do so.
Imagine that it had millions of fields. This actually happened
in GHC itself where the tuple was DynFlags
************************************************************************
* *
Demand environments and types
* *
************************************************************************
-}
type DmdEnv = VarEnv Demand -- See Note [Default demand on free variables]
data DmdType = DmdType
DmdEnv -- Demand on explicitly-mentioned
-- free variables
[Demand] -- Demand on arguments
DmdResult -- See [Nature of result demand]
{-
Note [Nature of result demand]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A DmdResult contains information about termination (currently distinguishing
definite divergence and no information; it is possible to include definite
convergence here), and CPR information about the result.
The semantics of this depends on whether we are looking at a DmdType, i.e. the
demand put on by an expression _under a specific incoming demand_ on its
environment, or at a StrictSig describing a demand transformer.
For a
* DmdType, the termination information is true given the demand it was
generated with, while for
* a StrictSig it holds after applying enough arguments.
The CPR information, though, is valid after the number of arguments mentioned
in the type is given. Therefore, when forgetting the demand on arguments, as in
dmdAnalRhs, this needs to be considere (via removeDmdTyArgs).
Consider
b2 x y = x `seq` y `seq` error (show x)
this has a strictness signature of
<S><S>b
meaning that "b2 `seq` ()" and "b2 1 `seq` ()" might well terminate, but
for "b2 1 2 `seq` ()" we get definite divergence.
For comparison,
b1 x = x `seq` error (show x)
has a strictness signature of
<S>b
and "b1 1 `seq` ()" is known to terminate.
Now consider a function h with signature "<C(S)>", and the expression
e1 = h b1
now h puts a demand of <C(S)> onto its argument, and the demand transformer
turns it into
<S>b
Now the DmdResult "b" does apply to us, even though "b1 `seq` ()" does not
diverge, and we do not anything being passed to b.
Note [Asymmetry of 'both' for DmdType and DmdResult]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'both' for DmdTypes is *assymetrical*, because there is only one
result! For example, given (e1 e2), we get a DmdType dt1 for e1, use
its arg demand to analyse e2 giving dt2, and then do (dt1 `bothType` dt2).
Similarly with
case e of { p -> rhs }
we get dt_scrut from the scrutinee and dt_rhs from the RHS, and then
compute (dt_rhs `bothType` dt_scrut).
We
1. combine the information on the free variables,
2. take the demand on arguments from the first argument
3. combine the termination results, but
4. take CPR info from the first argument.
3 and 4 are implementd in bothDmdResult.
-}
-- Equality needed for fixpoints in DmdAnal
instance Eq DmdType where
(==) (DmdType fv1 ds1 res1)
(DmdType fv2 ds2 res2) = ufmToList fv1 == ufmToList fv2
&& ds1 == ds2 && res1 == res2
lubDmdType :: DmdType -> DmdType -> DmdType
lubDmdType d1 d2
= DmdType lub_fv lub_ds lub_res
where
n = max (dmdTypeDepth d1) (dmdTypeDepth d2)
(DmdType fv1 ds1 r1) = ensureArgs n d1
(DmdType fv2 ds2 r2) = ensureArgs n d2
lub_fv = plusVarEnv_CD lubDmd fv1 (defaultDmd r1) fv2 (defaultDmd r2)
lub_ds = zipWithEqual "lubDmdType" lubDmd ds1 ds2
lub_res = lubDmdResult r1 r2
{-
Note [The need for BothDmdArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Previously, the right argument to bothDmdType, as well as the return value of
dmdAnalStar via postProcessDmdType, was a DmdType. But bothDmdType only needs
to know about the free variables and termination information, but nothing about
the demand put on arguments, nor cpr information. So we make that explicit by
only passing the relevant information.
-}
type BothDmdArg = (DmdEnv, Termination ())
mkBothDmdArg :: DmdEnv -> BothDmdArg
mkBothDmdArg env = (env, Dunno ())
toBothDmdArg :: DmdType -> BothDmdArg
toBothDmdArg (DmdType fv _ r) = (fv, go r)
where
go (Dunno {}) = Dunno ()
go ThrowsExn = ThrowsExn
go Diverges = Diverges
bothDmdType :: DmdType -> BothDmdArg -> DmdType
bothDmdType (DmdType fv1 ds1 r1) (fv2, t2)
-- See Note [Asymmetry of 'both' for DmdType and DmdResult]
-- 'both' takes the argument/result info from its *first* arg,
-- using its second arg just for its free-var info.
= DmdType (plusVarEnv_CD bothDmd fv1 (defaultDmd r1) fv2 (defaultDmd t2))
ds1
(r1 `bothDmdResult` t2)
instance Outputable DmdType where
ppr (DmdType fv ds res)
= hsep [text "DmdType",
hcat (map ppr ds) <> ppr res,
if null fv_elts then empty
else braces (fsep (map pp_elt fv_elts))]
where
pp_elt (uniq, dmd) = ppr uniq <> text "->" <> ppr dmd
fv_elts = ufmToList fv
emptyDmdEnv :: VarEnv Demand
emptyDmdEnv = emptyVarEnv
-- nopDmdType is the demand of doing nothing
-- (lazy, absent, no CPR information, no termination information).
-- Note that it is ''not'' the top of the lattice (which would be "may use everything"),
-- so it is (no longer) called topDmd
nopDmdType, botDmdType :: DmdType
nopDmdType = DmdType emptyDmdEnv [] topRes
botDmdType = DmdType emptyDmdEnv [] botRes
cprProdDmdType :: Arity -> DmdType
cprProdDmdType arity
= DmdType emptyDmdEnv [] (vanillaCprProdRes arity)
isNopDmdType :: DmdType -> Bool
isNopDmdType (DmdType env [] res)
| isTopRes res && isEmptyVarEnv env = True
isNopDmdType _ = False
mkDmdType :: DmdEnv -> [Demand] -> DmdResult -> DmdType
mkDmdType fv ds res = DmdType fv ds res
dmdTypeDepth :: DmdType -> Arity
dmdTypeDepth (DmdType _ ds _) = length ds
-- Remove any demand on arguments. This is used in dmdAnalRhs on the body
removeDmdTyArgs :: DmdType -> DmdType
removeDmdTyArgs = ensureArgs 0
-- This makes sure we can use the demand type with n arguments,
-- It extends the argument list with the correct resTypeArgDmd
-- It also adjusts the DmdResult: Divergence survives additional arguments,
-- CPR information does not (and definite converge also would not).
ensureArgs :: Arity -> DmdType -> DmdType
ensureArgs n d | n == depth = d
| otherwise = DmdType fv ds' r'
where depth = dmdTypeDepth d
DmdType fv ds r = d
ds' = take n (ds ++ repeat (resTypeArgDmd r))
r' = case r of -- See [Nature of result demand]
Dunno _ -> topRes
_ -> r
seqDmdType :: DmdType -> ()
seqDmdType (DmdType env ds res) =
seqDmdEnv env `seq` seqDemandList ds `seq` seqDmdResult res `seq` ()
seqDmdEnv :: DmdEnv -> ()
seqDmdEnv env = seqDemandList (varEnvElts env)
splitDmdTy :: DmdType -> (Demand, DmdType)
-- Split off one function argument
-- We already have a suitable demand on all
-- free vars, so no need to add more!
splitDmdTy (DmdType fv (dmd:dmds) res_ty) = (dmd, DmdType fv dmds res_ty)
splitDmdTy ty@(DmdType _ [] res_ty) = (resTypeArgDmd res_ty, ty)
-- When e is evaluated after executing an IO action, and d is e's demand, then
-- what of this demand should we consider, given that the IO action can cleanly
-- exit?
-- * We have to kill all strictness demands (i.e. lub with a lazy demand)
-- * We can keep demand information (i.e. lub with an absent demand)
-- * We have to kill definite divergence
-- * We can keep CPR information.
-- See Note [IO hack in the demand analyser] in DmdAnal
deferAfterIO :: DmdType -> DmdType
deferAfterIO d@(DmdType _ _ res) =
case d `lubDmdType` nopDmdType of
DmdType fv ds _ -> DmdType fv ds (defer_res res)
where
defer_res r@(Dunno {}) = r
defer_res _ = topRes -- Diverges and ThrowsExn
strictenDmd :: Demand -> CleanDemand
strictenDmd (JD { sd = s, ud = u})
= JD { sd = poke_s s, ud = poke_u u }
where
poke_s Lazy = HeadStr
poke_s (Str _ s) = s
poke_u Abs = UHead
poke_u (Use _ u) = u
-- Deferring and peeeling
type DmdShell -- Describes the "outer shell"
-- of a Demand
= JointDmd (Str ()) (Use ())
toCleanDmd :: Demand -> Type -> (DmdShell, CleanDemand)
-- Splicts a Demand into its "shell" and the inner "clean demand"
toCleanDmd (JD { sd = s, ud = u }) expr_ty
= (JD { sd = ss, ud = us }, JD { sd = s', ud = u' })
-- See Note [Analyzing with lazy demand and lambdas]
where
(ss, s') = case s of
Str x s' -> (Str x (), s')
Lazy | is_unlifted -> (Str VanStr (), HeadStr)
| otherwise -> (Lazy, HeadStr)
(us, u') = case u of
Use c u' -> (Use c (), u')
Abs | is_unlifted -> (Use One (), Used)
| otherwise -> (Abs, Used)
is_unlifted = isUnliftedType expr_ty
-- See Note [Analysing with absent demand]
-- This is used in dmdAnalStar when post-processing
-- a function's argument demand. So we only care about what
-- does to free variables, and whether it terminates.
-- see Note [The need for BothDmdArg]
postProcessDmdType :: DmdShell -> DmdType -> BothDmdArg
postProcessDmdType du@(JD { sd = ss }) (DmdType fv _ res_ty)
= (postProcessDmdEnv du fv, term_info)
where
term_info = case postProcessDmdResult ss res_ty of
Dunno _ -> Dunno ()
ThrowsExn -> ThrowsExn
Diverges -> Diverges
postProcessDmdResult :: Str () -> DmdResult -> DmdResult
postProcessDmdResult Lazy _ = topRes
postProcessDmdResult (Str ExnStr _) ThrowsExn = topRes -- Key point!
postProcessDmdResult _ res = res
postProcessDmdEnv :: DmdShell -> DmdEnv -> DmdEnv
postProcessDmdEnv ds@(JD { sd = ss, ud = us }) env
| Abs <- us = emptyDmdEnv
| Str _ _ <- ss
, Use One _ <- us = env -- Shell is a no-op
| otherwise = mapVarEnv (postProcessDmd ds) env
-- For the Absent case just discard all usage information
-- We only processed the thing at all to analyse the body
-- See Note [Always analyse in virgin pass]
reuseEnv :: DmdEnv -> DmdEnv
reuseEnv = mapVarEnv (postProcessDmd
(JD { sd = Str VanStr (), ud = Use Many () }))
postProcessUnsat :: DmdShell -> DmdType -> DmdType
postProcessUnsat ds@(JD { sd = ss }) (DmdType fv args res_ty)
= DmdType (postProcessDmdEnv ds fv)
(map (postProcessDmd ds) args)
(postProcessDmdResult ss res_ty)
postProcessDmd :: DmdShell -> Demand -> Demand
postProcessDmd (JD { sd = ss, ud = us }) (JD { sd = s, ud = a})
= JD { sd = s', ud = a' }
where
s' = case ss of
Lazy -> Lazy
Str ExnStr _ -> markExnStr s
Str VanStr _ -> s
a' = case us of
Abs -> Abs
Use Many _ -> markReusedDmd a
Use One _ -> a
markExnStr :: ArgStr -> ArgStr
markExnStr (Str VanStr s) = Str ExnStr s
markExnStr s = s
-- Peels one call level from the demand, and also returns
-- whether it was unsaturated (separately for strictness and usage)
peelCallDmd :: CleanDemand -> (CleanDemand, DmdShell)
-- Exploiting the fact that
-- on the strictness side C(B) = B
-- and on the usage side C(U) = U
peelCallDmd (JD {sd = s, ud = u})
= (JD { sd = s', ud = u' }, JD { sd = ss, ud = us })
where
(s', ss) = case s of
SCall s' -> (s', Str VanStr ())
HyperStr -> (HyperStr, Str VanStr ())
_ -> (HeadStr, Lazy)
(u', us) = case u of
UCall c u' -> (u', Use c ())
_ -> (Used, Use Many ())
-- The _ cases for usage includes UHead which seems a bit wrong
-- because the body isn't used at all!
-- c.f. the Abs case in toCleanDmd
-- Peels that multiple nestings of calls clean demand and also returns
-- whether it was unsaturated (separately for strictness and usage
-- see Note [Demands from unsaturated function calls]
peelManyCalls :: Int -> CleanDemand -> DmdShell
peelManyCalls n (JD { sd = str, ud = abs })
= JD { sd = go_str n str, ud = go_abs n abs }
where
go_str :: Int -> StrDmd -> Str () -- True <=> unsaturated, defer
go_str 0 _ = Str VanStr ()
go_str _ HyperStr = Str VanStr () -- == go_str (n-1) HyperStr, as HyperStr = Call(HyperStr)
go_str n (SCall d') = go_str (n-1) d'
go_str _ _ = Lazy
go_abs :: Int -> UseDmd -> Use () -- Many <=> unsaturated, or at least
go_abs 0 _ = Use One () -- one UCall Many in the demand
go_abs n (UCall One d') = go_abs (n-1) d'
go_abs _ _ = Use Many ()
{-
Note [Demands from unsaturated function calls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider a demand transformer d1 -> d2 -> r for f.
If a sufficiently detailed demand is fed into this transformer,
e.g <C(C(S)), C1(C1(S))> arising from "f x1 x2" in a strict, use-once context,
then d1 and d2 is precisely the demand unleashed onto x1 and x2 (similar for
the free variable environment) and furthermore the result information r is the
one we want to use.
An anonymous lambda is also an unsaturated function all (needs one argument,
none given), so this applies to that case as well.
But the demand fed into f might be less than <C(C(S)), C1(C1(S))>. There are a few cases:
* Not enough demand on the strictness side:
- In that case, we need to zap all strictness in the demand on arguments and
free variables.
- Furthermore, we remove CPR information. It could be left, but given the incoming
demand is not enough to evaluate so far we just do not bother.
- And finally termination information: If r says that f diverges for sure,
then this holds when the demand guarantees that two arguments are going to
be passed. If the demand is lower, we may just as well converge.
If we were tracking definite convegence, than that would still hold under
a weaker demand than expected by the demand transformer.
* Not enough demand from the usage side: The missing usage can be expanded
using UCall Many, therefore this is subsumed by the third case:
* At least one of the uses has a cardinality of Many.
- Even if f puts a One demand on any of its argument or free variables, if
we call f multiple times, we may evaluate this argument or free variable
multiple times. So forget about any occurrence of "One" in the demand.
In dmdTransformSig, we call peelManyCalls to find out if we are in any of these
cases, and then call postProcessUnsat to reduce the demand appropriately.
Similarly, dmdTransformDictSelSig and dmdAnal, when analyzing a Lambda, use
peelCallDmd, which peels only one level, but also returns the demand put on the
body of the function.
-}
peelFV :: DmdType -> Var -> (DmdType, Demand)
peelFV (DmdType fv ds res) id = -- pprTrace "rfv" (ppr id <+> ppr dmd $$ ppr fv)
(DmdType fv' ds res, dmd)
where
fv' = fv `delVarEnv` id
-- See Note [Default demand on free variables]
dmd = lookupVarEnv fv id `orElse` defaultDmd res
addDemand :: Demand -> DmdType -> DmdType
addDemand dmd (DmdType fv ds res) = DmdType fv (dmd:ds) res
findIdDemand :: DmdType -> Var -> Demand
findIdDemand (DmdType fv _ res) id
= lookupVarEnv fv id `orElse` defaultDmd res
{-
Note [Default demand on free variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the variable is not mentioned in the environment of a demand type,
its demand is taken to be a result demand of the type.
For the stricness component,
if the result demand is a Diverges, then we use HyperStr
else we use Lazy
For the usage component, we use Absent.
So we use either absDmd or botDmd.
Also note the equations for lubDmdResult (resp. bothDmdResult) noted there.
Note [Always analyse in virgin pass]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tricky point: make sure that we analyse in the 'virgin' pass. Consider
rec { f acc x True = f (...rec { g y = ...g... }...)
f acc x False = acc }
In the virgin pass for 'f' we'll give 'f' a very strict (bottom) type.
That might mean that we analyse the sub-expression containing the
E = "...rec g..." stuff in a bottom demand. Suppose we *didn't analyse*
E, but just retuned botType.
Then in the *next* (non-virgin) iteration for 'f', we might analyse E
in a weaker demand, and that will trigger doing a fixpoint iteration
for g. But *because it's not the virgin pass* we won't start g's
iteration at bottom. Disaster. (This happened in $sfibToList' of
nofib/spectral/fibheaps.)
So in the virgin pass we make sure that we do analyse the expression
at least once, to initialise its signatures.
Note [Analyzing with lazy demand and lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The insight for analyzing lambdas follows from the fact that for
strictness S = C(L). This polymorphic expansion is critical for
cardinality analysis of the following example:
{-# NOINLINE build #-}
build g = (g (:) [], g (:) [])
h c z = build (\x ->
let z1 = z ++ z
in if c
then \y -> x (y ++ z1)
else \y -> x (z1 ++ y))
One can see that `build` assigns to `g` demand <L,C(C1(U))>.
Therefore, when analyzing the lambda `(\x -> ...)`, we
expect each lambda \y -> ... to be annotated as "one-shot"
one. Therefore (\x -> \y -> x (y ++ z)) should be analyzed with a
demand <C(C(..), C(C1(U))>.
This is achieved by, first, converting the lazy demand L into the
strict S by the second clause of the analysis.
Note [Analysing with absent demand]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we analyse an expression with demand <L,A>. The "A" means
"absent", so this expression will never be needed. What should happen?
There are several wrinkles:
* We *do* want to analyse the expression regardless.
Reason: Note [Always analyse in virgin pass]
But we can post-process the results to ignore all the usage
demands coming back. This is done by postProcessDmdType.
* But in the case of an *unlifted type* we must be extra careful,
because unlifted values are evaluated even if they are not used.
Example (see Trac #9254):
f :: (() -> (# Int#, () #)) -> ()
-- Strictness signature is
-- <C(S(LS)), 1*C1(U(A,1*U()))>
-- I.e. calls k, but discards first component of result
f k = case k () of (# _, r #) -> r
g :: Int -> ()
g y = f (\n -> (# case y of I# y2 -> y2, n #))
Here f's strictness signature says (correctly) that it calls its
argument function and ignores the first component of its result.
This is correct in the sense that it'd be fine to (say) modify the
function so that always returned 0# in the first component.
But in function g, we *will* evaluate the 'case y of ...', because
it has type Int#. So 'y' will be evaluated. So we must record this
usage of 'y', else 'g' will say 'y' is absent, and will w/w so that
'y' is bound to an aBSENT_ERROR thunk.
An alternative would be to replace the 'case y of ...' with (say) 0#,
but I have not tried that. It's not a common situation, but it is
not theoretical: unsafePerformIO's implementation is very very like
'f' above.
************************************************************************
* *
Demand signatures
* *
************************************************************************
In a let-bound Id we record its strictness info.
In principle, this strictness info is a demand transformer, mapping
a demand on the Id into a DmdType, which gives
a) the free vars of the Id's value
b) the Id's arguments
c) an indication of the result of applying
the Id to its arguments
However, in fact we store in the Id an extremely emascuated demand
transfomer, namely
a single DmdType
(Nevertheless we dignify StrictSig as a distinct type.)
This DmdType gives the demands unleashed by the Id when it is applied
to as many arguments as are given in by the arg demands in the DmdType.
Also see Note [Nature of result demand] for the meaning of a DmdResult in a
strictness signature.
If an Id is applied to less arguments than its arity, it means that
the demand on the function at a call site is weaker than the vanilla
call demand, used for signature inference. Therefore we place a top
demand on all arguments. Otherwise, the demand is specified by Id's
signature.
For example, the demand transformer described by the demand signature
StrictSig (DmdType {x -> <S,1*U>} <L,A><L,U(U,U)>m)
says that when the function is applied to two arguments, it
unleashes demand <S,1*U> on the free var x, <L,A> on the first arg,
and <L,U(U,U)> on the second, then returning a constructor.
If this same function is applied to one arg, all we can say is that it
uses x with <L,U>, and its arg with demand <L,U>.
-}
newtype StrictSig = StrictSig DmdType
deriving( Eq )
instance Outputable StrictSig where
ppr (StrictSig ty) = ppr ty
-- Used for printing top-level strictness pragmas in interface files
pprIfaceStrictSig :: StrictSig -> SDoc
pprIfaceStrictSig (StrictSig (DmdType _ dmds res))
= hcat (map ppr dmds) <> ppr res
mkStrictSig :: DmdType -> StrictSig
mkStrictSig dmd_ty = StrictSig dmd_ty
mkClosedStrictSig :: [Demand] -> DmdResult -> StrictSig
mkClosedStrictSig ds res = mkStrictSig (DmdType emptyDmdEnv ds res)
splitStrictSig :: StrictSig -> ([Demand], DmdResult)
splitStrictSig (StrictSig (DmdType _ dmds res)) = (dmds, res)
increaseStrictSigArity :: Int -> StrictSig -> StrictSig
-- Add extra arguments to a strictness signature
increaseStrictSigArity arity_increase (StrictSig (DmdType env dmds res))
= StrictSig (DmdType env (replicate arity_increase topDmd ++ dmds) res)
isNopSig :: StrictSig -> Bool
isNopSig (StrictSig ty) = isNopDmdType ty
isBottomingSig :: StrictSig -> Bool
-- True if the signature diverges or throws an exception
isBottomingSig (StrictSig (DmdType _ _ res)) = isBotRes res
nopSig, botSig :: StrictSig
nopSig = StrictSig nopDmdType
botSig = StrictSig botDmdType
cprProdSig :: Arity -> StrictSig
cprProdSig arity = StrictSig (cprProdDmdType arity)
seqStrictSig :: StrictSig -> ()
seqStrictSig (StrictSig ty) = seqDmdType ty
dmdTransformSig :: StrictSig -> CleanDemand -> DmdType
-- (dmdTransformSig fun_sig dmd) considers a call to a function whose
-- signature is fun_sig, with demand dmd. We return the demand
-- that the function places on its context (eg its args)
dmdTransformSig (StrictSig dmd_ty@(DmdType _ arg_ds _)) cd
= postProcessUnsat (peelManyCalls (length arg_ds) cd) dmd_ty
-- see Note [Demands from unsaturated function calls]
dmdTransformDataConSig :: Arity -> StrictSig -> CleanDemand -> DmdType
-- Same as dmdTransformSig but for a data constructor (worker),
-- which has a special kind of demand transformer.
-- If the constructor is saturated, we feed the demand on
-- the result into the constructor arguments.
dmdTransformDataConSig arity (StrictSig (DmdType _ _ con_res))
(JD { sd = str, ud = abs })
| Just str_dmds <- go_str arity str
, Just abs_dmds <- go_abs arity abs
= DmdType emptyDmdEnv (mkJointDmds str_dmds abs_dmds) con_res
-- Must remember whether it's a product, hence con_res, not TopRes
| otherwise -- Not saturated
= nopDmdType
where
go_str 0 dmd = splitStrProdDmd arity dmd
go_str n (SCall s') = go_str (n-1) s'
go_str n HyperStr = go_str (n-1) HyperStr
go_str _ _ = Nothing
go_abs 0 dmd = splitUseProdDmd arity dmd
go_abs n (UCall One u') = go_abs (n-1) u'
go_abs _ _ = Nothing
dmdTransformDictSelSig :: StrictSig -> CleanDemand -> DmdType
-- Like dmdTransformDataConSig, we have a special demand transformer
-- for dictionary selectors. If the selector is saturated (ie has one
-- argument: the dictionary), we feed the demand on the result into
-- the indicated dictionary component.
dmdTransformDictSelSig (StrictSig (DmdType _ [dict_dmd] _)) cd
| (cd',defer_use) <- peelCallDmd cd
, Just jds <- splitProdDmd_maybe dict_dmd
= postProcessUnsat defer_use $
DmdType emptyDmdEnv [mkOnceUsedDmd $ mkProdDmd $ map (enhance cd') jds] topRes
| otherwise
= nopDmdType -- See Note [Demand transformer for a dictionary selector]
where
enhance cd old | isAbsDmd old = old
| otherwise = mkOnceUsedDmd cd -- This is the one!
dmdTransformDictSelSig _ _ = panic "dmdTransformDictSelSig: no args"
{-
Note [Demand transformer for a dictionary selector]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we evaluate (op dict-expr) under demand 'd', then we can push the demand 'd'
into the appropriate field of the dictionary. What *is* the appropriate field?
We just look at the strictness signature of the class op, which will be
something like: U(AAASAAAAA). Then replace the 'S' by the demand 'd'.
For single-method classes, which are represented by newtypes the signature
of 'op' won't look like U(...), so the splitProdDmd_maybe will fail.
That's fine: if we are doing strictness analysis we are also doing inling,
so we'll have inlined 'op' into a cast. So we can bale out in a conservative
way, returning nopDmdType.
It is (just.. Trac #8329) possible to be running strictness analysis *without*
having inlined class ops from single-method classes. Suppose you are using
ghc --make; and the first module has a local -O0 flag. So you may load a class
without interface pragmas, ie (currently) without an unfolding for the class
ops. Now if a subsequent module in the --make sweep has a local -O flag
you might do strictness analysis, but there is no inlining for the class op.
This is weird, so I'm not worried about whether this optimises brilliantly; but
it should not fall over.
-}
argsOneShots :: StrictSig -> Arity -> [[OneShotInfo]]
-- See Note [Computing one-shot info, and ProbOneShot]
argsOneShots (StrictSig (DmdType _ arg_ds _)) n_val_args
= go arg_ds
where
unsaturated_call = arg_ds `lengthExceeds` n_val_args
good_one_shot
| unsaturated_call = ProbOneShot
| otherwise = OneShotLam
go [] = []
go (arg_d : arg_ds) = argOneShots good_one_shot arg_d `cons` go arg_ds
-- Avoid list tail like [ [], [], [] ]
cons [] [] = []
cons a as = a:as
argOneShots :: OneShotInfo -> Demand -> [OneShotInfo]
argOneShots one_shot_info (JD { ud = usg })
= case usg of
Use _ arg_usg -> go arg_usg
_ -> []
where
go (UCall One u) = one_shot_info : go u
go (UCall Many u) = NoOneShotInfo : go u
go _ = []
{- Note [Computing one-shot info, and ProbOneShot]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider a call
f (\pqr. e1) (\xyz. e2) e3
where f has usage signature
C1(C(C1(U))) C1(U) U
Then argsOneShots returns a [[OneShotInfo]] of
[[OneShot,NoOneShotInfo,OneShot], [OneShot]]
The occurrence analyser propagates this one-shot infor to the
binders \pqr and \xyz; see Note [Use one-shot information] in OccurAnal.
But suppose f was not saturated, so the call looks like
f (\pqr. e1) (\xyz. e2)
The in principle this partial application might be shared, and
the (\prq.e1) abstraction might be called more than once. So
we can't mark them OneShot. But instead we return
[[ProbOneShot,NoOneShotInfo,ProbOneShot], [ProbOneShot]]
The occurrence analyser propagates this to the \pqr and \xyz
binders.
How is it used? Well, it's quite likely that the partial application
of f is not shared, so the float-out pass (in SetLevels.lvlLamBndrs)
does not float MFEs out of a ProbOneShot lambda. That currently is
the only way that ProbOneShot is used.
-}
-- appIsBottom returns true if an application to n args
-- would diverge or throw an exception
-- See Note [Unsaturated applications]
appIsBottom :: StrictSig -> Int -> Bool
appIsBottom (StrictSig (DmdType _ ds res)) n
| isBotRes res = not $ lengthExceeds ds n
appIsBottom _ _ = False
{-
Note [Unsaturated applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a function having bottom as its demand result is applied to a less
number of arguments than its syntactic arity, we cannot say for sure
that it is going to diverge. This is the reason why we use the
function appIsBottom, which, given a strictness signature and a number
of arguments, says conservatively if the function is going to diverge
or not.
Zap absence or one-shot information, under control of flags
Note [Killing usage information]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The flags -fkill-one-shot and -fkill-absence let you switch off the generation
of absence or one-shot information altogether. This is only used for performance
tests, to see how important they are.
-}
zapUsageDemand :: Demand -> Demand
-- Remove the usage info, but not the strictness info, from the demand
zapUsageDemand = kill_usage (True, True)
killUsageDemand :: DynFlags -> Demand -> Demand
-- See Note [Killing usage information]
killUsageDemand dflags dmd
| Just kfs <- killFlags dflags = kill_usage kfs dmd
| otherwise = dmd
killUsageSig :: DynFlags -> StrictSig -> StrictSig
-- See Note [Killing usage information]
killUsageSig dflags sig@(StrictSig (DmdType env ds r))
| Just kfs <- killFlags dflags = StrictSig (DmdType env (map (kill_usage kfs) ds) r)
| otherwise = sig
type KillFlags = (Bool, Bool)
killFlags :: DynFlags -> Maybe KillFlags
-- See Note [Killing usage information]
killFlags dflags
| not kill_abs && not kill_one_shot = Nothing
| otherwise = Just (kill_abs, kill_one_shot)
where
kill_abs = gopt Opt_KillAbsence dflags
kill_one_shot = gopt Opt_KillOneShot dflags
kill_usage :: KillFlags -> Demand -> Demand
kill_usage kfs (JD {sd = s, ud = u}) = JD {sd = s, ud = zap_musg kfs u}
zap_musg :: KillFlags -> ArgUse -> ArgUse
zap_musg (kill_abs, _) Abs
| kill_abs = useTop
| otherwise = Abs
zap_musg kfs (Use c u) = Use (zap_count kfs c) (zap_usg kfs u)
zap_count :: KillFlags -> Count -> Count
zap_count (_, kill_one_shot) c
| kill_one_shot = Many
| otherwise = c
zap_usg :: KillFlags -> UseDmd -> UseDmd
zap_usg kfs (UCall c u) = UCall (zap_count kfs c) (zap_usg kfs u)
zap_usg kfs (UProd us) = UProd (map (zap_musg kfs) us)
zap_usg _ u = u
-- If the argument is a used non-newtype dictionary, give it strict
-- demand. Also split the product type & demand and recur in order to
-- similarly strictify the argument's contained used non-newtype
-- superclass dictionaries. We use the demand as our recursive measure
-- to guarantee termination.
strictifyDictDmd :: Type -> Demand -> Demand
strictifyDictDmd ty dmd = case getUseDmd dmd of
Use n _ |
Just (tycon, _arg_tys, _data_con, inst_con_arg_tys)
<- splitDataProductType_maybe ty,
not (isNewTyCon tycon), isClassTyCon tycon -- is a non-newtype dictionary
-> seqDmd `bothDmd` -- main idea: ensure it's strict
case splitProdDmd_maybe dmd of
-- superclass cycles should not be a problem, since the demand we are
-- consuming would also have to be infinite in order for us to diverge
Nothing -> dmd -- no components have interesting demand, so stop
-- looking for superclass dicts
Just dmds
| all (not . isAbsDmd) dmds -> evalDmd
-- abstract to strict w/ arbitrary component use, since this
-- smells like reboxing; results in CBV boxed
--
-- TODO revisit this if we ever do boxity analysis
| otherwise -> case mkProdDmd $ zipWith strictifyDictDmd inst_con_arg_tys dmds of
JD {sd = s,ud = a} -> JD (Str VanStr s) (Use n a)
-- TODO could optimize with an aborting variant of zipWith since
-- the superclass dicts are always a prefix
_ -> dmd -- unused or not a dictionary
{-
Note [HyperStr and Use demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The information "HyperStr" needs to be in the strictness signature, and not in
the demand signature, because we still want to know about the demand on things. Consider
f (x,y) True = error (show x)
f (x,y) False = x+1
The signature of f should be <S(SL),1*U(1*U(U),A)><S,1*U>m. If we were not
distinguishing the uses on x and y in the True case, we could either not figure
out how deeply we can unpack x, or that we do not have to pass y.
************************************************************************
* *
Serialisation
* *
************************************************************************
-}
instance Binary StrDmd where
put_ bh HyperStr = do putByte bh 0
put_ bh HeadStr = do putByte bh 1
put_ bh (SCall s) = do putByte bh 2
put_ bh s
put_ bh (SProd sx) = do putByte bh 3
put_ bh sx
get bh = do
h <- getByte bh
case h of
0 -> do return HyperStr
1 -> do return HeadStr
2 -> do s <- get bh
return (SCall s)
_ -> do sx <- get bh
return (SProd sx)
instance Binary ExnStr where
put_ bh VanStr = putByte bh 0
put_ bh ExnStr = putByte bh 1
get bh = do h <- getByte bh
return (case h of
0 -> VanStr
_ -> ExnStr)
instance Binary ArgStr where
put_ bh Lazy = do
putByte bh 0
put_ bh (Str x s) = do
putByte bh 1
put_ bh x
put_ bh s
get bh = do
h <- getByte bh
case h of
0 -> return Lazy
_ -> do x <- get bh
s <- get bh
return $ Str x s
instance Binary Count where
put_ bh One = do putByte bh 0
put_ bh Many = do putByte bh 1
get bh = do h <- getByte bh
case h of
0 -> return One
_ -> return Many
instance Binary ArgUse where
put_ bh Abs = do
putByte bh 0
put_ bh (Use c u) = do
putByte bh 1
put_ bh c
put_ bh u
get bh = do
h <- getByte bh
case h of
0 -> return Abs
_ -> do c <- get bh
u <- get bh
return $ Use c u
instance Binary UseDmd where
put_ bh Used = do
putByte bh 0
put_ bh UHead = do
putByte bh 1
put_ bh (UCall c u) = do
putByte bh 2
put_ bh c
put_ bh u
put_ bh (UProd ux) = do
putByte bh 3
put_ bh ux
get bh = do
h <- getByte bh
case h of
0 -> return $ Used
1 -> return $ UHead
2 -> do c <- get bh
u <- get bh
return (UCall c u)
_ -> do ux <- get bh
return (UProd ux)
instance (Binary s, Binary u) => Binary (JointDmd s u) where
put_ bh (JD { sd = x, ud = y }) = do put_ bh x; put_ bh y
get bh = do
x <- get bh
y <- get bh
return $ JD { sd = x, ud = y }
instance Binary StrictSig where
put_ bh (StrictSig aa) = do
put_ bh aa
get bh = do
aa <- get bh
return (StrictSig aa)
instance Binary DmdType where
-- Ignore DmdEnv when spitting out the DmdType
put_ bh (DmdType _ ds dr)
= do put_ bh ds
put_ bh dr
get bh
= do ds <- get bh
dr <- get bh
return (DmdType emptyDmdEnv ds dr)
instance Binary DmdResult where
put_ bh (Dunno c) = do { putByte bh 0; put_ bh c }
put_ bh ThrowsExn = putByte bh 1
put_ bh Diverges = putByte bh 2
get bh = do { h <- getByte bh
; case h of
0 -> do { c <- get bh; return (Dunno c) }
1 -> return ThrowsExn
_ -> return Diverges }
instance Binary CPRResult where
put_ bh (RetSum n) = do { putByte bh 0; put_ bh n }
put_ bh RetProd = putByte bh 1
put_ bh NoCPR = putByte bh 2
get bh = do
h <- getByte bh
case h of
0 -> do { n <- get bh; return (RetSum n) }
1 -> return RetProd
_ -> return NoCPR
| nushio3/ghc | compiler/basicTypes/Demand.hs | bsd-3-clause | 75,989 | 5 | 19 | 21,278 | 13,795 | 7,210 | 6,585 | 909 | 9 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Fix
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology, 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Monadic fixpoints.
--
-- For a detailed discussion, see Levent Erkok's thesis,
-- /Value Recursion in Monadic Computations/, Oregon Graduate Institute, 2002.
--
-----------------------------------------------------------------------------
module Control.Monad.Fix (
MonadFix(mfix),
fix
) where
import Data.Either
import Data.Function ( fix )
import Data.Maybe
import Data.Monoid ( Dual(..), Sum(..), Product(..)
, First(..), Last(..), Alt(..) )
import GHC.Base ( Monad, errorWithoutStackTrace, (.) )
import GHC.List ( head, tail )
import GHC.ST
import System.IO
-- | Monads having fixed points with a \'knot-tying\' semantics.
-- Instances of 'MonadFix' should satisfy the following laws:
--
-- [/purity/]
-- @'mfix' ('return' . h) = 'return' ('fix' h)@
--
-- [/left shrinking/ (or /tightening/)]
-- @'mfix' (\\x -> a >>= \\y -> f x y) = a >>= \\y -> 'mfix' (\\x -> f x y)@
--
-- [/sliding/]
-- @'mfix' ('Control.Monad.liftM' h . f) = 'Control.Monad.liftM' h ('mfix' (f . h))@,
-- for strict @h@.
--
-- [/nesting/]
-- @'mfix' (\\x -> 'mfix' (\\y -> f x y)) = 'mfix' (\\x -> f x x)@
--
-- This class is used in the translation of the recursive @do@ notation
-- supported by GHC and Hugs.
class (Monad m) => MonadFix m where
-- | The fixed point of a monadic computation.
-- @'mfix' f@ executes the action @f@ only once, with the eventual
-- output fed back as the input. Hence @f@ should not be strict,
-- for then @'mfix' f@ would diverge.
mfix :: (a -> m a) -> m a
-- Instances of MonadFix for Prelude monads
instance MonadFix Maybe where
mfix f = let a = f (unJust a) in a
where unJust (Just x) = x
unJust Nothing = errorWithoutStackTrace "mfix Maybe: Nothing"
instance MonadFix [] where
mfix f = case fix (f . head) of
[] -> []
(x:_) -> x : mfix (tail . f)
instance MonadFix IO where
mfix = fixIO
instance MonadFix ((->) r) where
mfix f = \ r -> let a = f a r in a
instance MonadFix (Either e) where
mfix f = let a = f (unRight a) in a
where unRight (Right x) = x
unRight (Left _) = errorWithoutStackTrace "mfix Either: Left"
instance MonadFix (ST s) where
mfix = fixST
-- Instances of Data.Monoid wrappers
instance MonadFix Dual where
mfix f = Dual (fix (getDual . f))
instance MonadFix Sum where
mfix f = Sum (fix (getSum . f))
instance MonadFix Product where
mfix f = Product (fix (getProduct . f))
instance MonadFix First where
mfix f = First (mfix (getFirst . f))
instance MonadFix Last where
mfix f = Last (mfix (getLast . f))
instance MonadFix f => MonadFix (Alt f) where
mfix f = Alt (mfix (getAlt . f))
| nushio3/ghc | libraries/base/Control/Monad/Fix.hs | bsd-3-clause | 3,278 | 3 | 12 | 829 | 705 | 390 | 315 | 49 | 0 |
{-# LANGUAGE OverloadedStrings, FlexibleContexts #-}
module Blockchain.BlockChain (
nextDifficulty,
addBlock,
addBlocks,
addTransaction,
addTransactions,
getBestBlock,
getBestBlockHash,
getGenesisBlockHash,
runCodeForTransaction
) where
import Control.Monad
import Control.Monad.IfElse
import Control.Monad.IO.Class
import Control.Monad.Trans
import Control.Monad.Trans.Either
import Control.Monad.State hiding (state)
import Data.Binary hiding (get)
import Data.Bits
import qualified Data.ByteString as B
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
import Data.Functor
import Data.List
import Data.Maybe
import Data.Time
import Data.Time.Clock
import Data.Time.Clock.POSIX
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
import Text.Printf
import qualified Blockchain.Colors as CL
import Blockchain.Context
import Blockchain.Data.Address
import Blockchain.Data.AddressStateDB
import Blockchain.Data.BlockDB
import Blockchain.Data.Code
import Blockchain.Data.DataDefs
import Blockchain.Data.DiffDB
import Blockchain.Data.GenesisBlock
import Blockchain.Data.RLP
import Blockchain.Data.Transaction
import Blockchain.Data.TransactionResult
import Blockchain.Database.MerklePatricia
import Blockchain.DB.CodeDB
import Blockchain.DB.ModifyStateDB
import Blockchain.DBM
import Blockchain.Constants
import Blockchain.ExtDBs
import Blockchain.Format
import Blockchain.Mining
import Blockchain.SHA
import Blockchain.Util
import Blockchain.VM
import Blockchain.VM.Code
import Blockchain.VM.OpcodePrices
import Blockchain.VM.VMState
--import Debug.Trace
{-
initializeBlockChain::ContextM ()
initializeBlockChain = do
let bytes = rlpSerialize $ rlpEncode genesisBlock
blockDBPut (BL.toStrict $ encode $ blockHash $ genesisBlock) bytes
detailsDBPut "best" (BL.toStrict $ encode $ blockHash genesisBlock)
-}
nextDifficulty::Integer->UTCTime->UTCTime->Integer
nextDifficulty oldDifficulty oldTime newTime = max nextDiff' minimumDifficulty
where
nextDiff' =
if round (utcTimeToPOSIXSeconds newTime) >=
(round (utcTimeToPOSIXSeconds oldTime) + 8::Integer)
then oldDifficulty - oldDifficulty `shiftR` 11
else oldDifficulty + oldDifficulty `shiftR` 11
nextGasLimit::Integer->Integer->Integer
nextGasLimit oldGasLimit oldGasUsed = max (max 125000 3141592) ((oldGasLimit * 1023 + oldGasUsed *6 `quot` 5) `quot` 1024)
nextGasLimitDelta::Integer->Integer
nextGasLimitDelta oldGasLimit = oldGasLimit `div` 1024
minGasLimit::Integer
minGasLimit = 125000
checkUnclesHash::Block->Bool
checkUnclesHash b = blockDataUnclesHash (blockBlockData b) == hash (rlpSerialize $ RLPArray (rlpEncode <$> blockBlockUncles b))
--data BlockValidityError = BlockDifficultyWrong Integer Integer | BlockNumberWrong Integer Integer | BlockGasLimitWrong Integer Integer | BlockNonceWrong | BlockUnclesHashWrong
{-
instance Format BlockValidityError where
--format BlockOK = "Block is valid"
format (BlockDifficultyWrong d expected) = "Block difficulty is wrong, is '" ++ show d ++ "', expected '" ++ show expected ++ "'"
-}
verifyStateRootExists::Block->ContextM Bool
verifyStateRootExists b = do
val <- lift $ stateDBGet (BL.toStrict $ encode $ blockDataStateRoot $ blockBlockData b)
case val of
Nothing -> return False
Just _ -> return True
checkParentChildValidity::(Monad m)=>Block->Block->m ()
checkParentChildValidity Block{blockBlockData=c} Block{blockBlockData=p} = do
unless (blockDataDifficulty c == nextDifficulty (blockDataDifficulty p) (blockDataTimestamp p) (blockDataTimestamp c))
$ fail $ "Block difficulty is wrong: got '" ++ show (blockDataDifficulty c) ++ "', expected '" ++ show (nextDifficulty (blockDataDifficulty p) (blockDataTimestamp p) (blockDataTimestamp c)) ++ "'"
unless (blockDataNumber c == blockDataNumber p + 1)
$ fail $ "Block number is wrong: got '" ++ show (blockDataNumber c) ++ ", expected '" ++ show (blockDataNumber p + 1) ++ "'"
unless (blockDataGasLimit c <= blockDataGasLimit p + nextGasLimitDelta (blockDataGasLimit p))
$ fail $ "Block gasLimit is too high: got '" ++ show (blockDataGasLimit c) ++ "', should be less than '" ++ show (blockDataGasLimit p + nextGasLimitDelta (blockDataGasLimit p)) ++ "'"
unless (blockDataGasLimit c >= blockDataGasLimit p - nextGasLimitDelta (blockDataGasLimit p))
$ fail $ "Block gasLimit is too low: got '" ++ show (blockDataGasLimit c) ++ "', should be less than '" ++ show (blockDataGasLimit p - nextGasLimitDelta (blockDataGasLimit p)) ++ "'"
unless (blockDataGasLimit c >= minGasLimit)
$ fail $ "Block gasLimit is lower than minGasLimit: got '" ++ show (blockDataGasLimit c) ++ "'"
return ()
checkValidity::Monad m=>Block->ContextM (m ())
checkValidity b = do
maybeParentBlock <- lift $ getBlock (blockDataParentHash $ blockBlockData b)
case maybeParentBlock of
Just parentBlock -> do
checkParentChildValidity b parentBlock
nIsValid <- nonceIsValid' b
--unless nIsValid $ fail $ "Block nonce is wrong: " ++ format b
unless (checkUnclesHash b) $ fail "Block unclesHash is wrong"
stateRootExists <- verifyStateRootExists b
unless stateRootExists $ fail ("Block stateRoot does not exist: " ++ show (pretty $ blockDataStateRoot $ blockBlockData b))
return $ return ()
Nothing -> fail ("Parent Block does not exist: " ++ show (pretty $ blockDataParentHash $ blockBlockData b))
{-
coinbase=prvKey2Address prvKey,
stateRoot = SHA 0x9b109189563315bfeb13d4bfd841b129ff3fd5c85f228a8d9d8563b4dde8432e,
transactionsTrie = 0,
-}
runCodeForTransaction::Block->Integer->Address->Address->Transaction->ContextM (Either VMException B.ByteString, VMState)
runCodeForTransaction b availableGas tAddr newAddress ut | isContractCreationTX ut = do
whenM isDebugEnabled $ liftIO $ putStrLn "runCodeForTransaction: ContractCreationTX"
(result, vmState) <-
create b 0 tAddr tAddr (transactionValue ut) (transactionGasPrice ut) availableGas newAddress (transactionInit ut)
return (const B.empty <$> result, vmState)
runCodeForTransaction b availableGas tAddr owner ut | isMessageTX ut = do
whenM isDebugEnabled $ liftIO $ putStrLn $ "runCodeForTransaction: MessageTX caller: " ++ show (pretty $ tAddr) ++ ", address: " ++ show (pretty $ transactionTo ut)
call b 0 owner owner tAddr
(fromIntegral $ transactionValue ut) (fromIntegral $ transactionGasPrice ut)
(transactionData ut) (fromIntegral availableGas) tAddr
addBlocks::Bool->[Block]->ContextM ()
addBlocks isBeingCreated blocks =
forM_ blocks $ \block -> do
before <- liftIO $ getPOSIXTime
addBlock isBeingCreated block
after <- liftIO $ getPOSIXTime
liftIO $ putStrLn $ "#### Block insertion time = " ++ printf "%.4f" (realToFrac $ after - before::Double) ++ "s"
isNonceValid::Transaction->ContextM Bool
isNonceValid t = do
case whoSignedThisTransaction t of
Nothing -> return False --no nonce would work
Just tAddr -> do
addressState <- lift $ getAddressState tAddr
return $ addressStateNonce addressState == transactionNonce t
codeOrDataLength::Transaction->Int
codeOrDataLength t | isMessageTX t = B.length $ transactionData t
codeOrDataLength t | isContractCreationTX t = codeLength $ transactionInit t
zeroBytesLength::Transaction->Int
zeroBytesLength t | isMessageTX t = length $ filter (==0) $ B.unpack $ transactionData t
zeroBytesLength t | isContractCreationTX t = length $ filter (==0) $ B.unpack codeBytes
where
Code codeBytes = transactionInit t
intrinsicGas::Transaction->Integer
intrinsicGas t = gTXDATAZERO * zeroLen + gTXDATANONZERO * (fromIntegral (codeOrDataLength t) - zeroLen) + gTX
where
zeroLen = fromIntegral $ zeroBytesLength t
--intrinsicGas t@ContractCreationTX{} = 5 * (fromIntegral (codeOrDataLength t)) + 500
addTransaction::Block->Integer->Transaction->EitherT String ContextM (VMState, Integer)
addTransaction b remainingBlockGas t = do
let maybeAddr = whoSignedThisTransaction t
case maybeAddr of
Just x -> return ()
Nothing -> left "malformed signature"
let Just tAddr = maybeAddr
nonceValid <- lift $ isNonceValid t
let intrinsicGas' = intrinsicGas t
whenM (lift isDebugEnabled) $
liftIO $ do
putStrLn $ "bytes cost: " ++ show (gTXDATAZERO * (fromIntegral $ zeroBytesLength t) + gTXDATANONZERO * (fromIntegral (codeOrDataLength t) - (fromIntegral $ zeroBytesLength t)))
putStrLn $ "transaction cost: " ++ show gTX
putStrLn $ "intrinsicGas: " ++ show (intrinsicGas')
addressState <- lift $ lift $ getAddressState tAddr
when (transactionGasLimit t * transactionGasPrice t + transactionValue t > addressStateBalance addressState) $ left "sender doesn't have high enough balance"
when (intrinsicGas' > transactionGasLimit t) $ left "intrinsic gas higher than transaction gas limit"
when (transactionGasLimit t > remainingBlockGas) $ left "block gas has run out"
when (not nonceValid) $ left "nonce incorrect"
let availableGas = transactionGasLimit t - intrinsicGas'
theAddress <-
if isContractCreationTX t
then lift $ getNewAddress tAddr
else do
lift $ incrementNonce tAddr
return $ transactionTo t
success <- lift $ addToBalance tAddr (-transactionGasLimit t * transactionGasPrice t)
whenM (lift isDebugEnabled) $ liftIO $ putStrLn "running code"
if success
then do
(result, newVMState') <- lift $ runCodeForTransaction b (transactionGasLimit t - intrinsicGas') tAddr theAddress t
lift $ addToBalance (blockDataCoinbase $ blockBlockData b) (transactionGasLimit t * transactionGasPrice t)
case result of
Left e -> do
whenM (lift isDebugEnabled) $ liftIO $ putStrLn $ CL.red $ show e
return (newVMState'{vmException = Just e}, remainingBlockGas - transactionGasLimit t)
Right x -> do
let realRefund =
min (refund newVMState') ((transactionGasLimit t - vmGasRemaining newVMState') `div` 2)
success <- lift $ pay "VM refund fees" (blockDataCoinbase $ blockBlockData b) tAddr ((realRefund + vmGasRemaining newVMState') * transactionGasPrice t)
when (not success) $ error "oops, refund was too much"
whenM (lift isDebugEnabled) $ liftIO $ putStrLn $ "Removing accounts in suicideList: " ++ intercalate ", " (show . pretty <$> suicideList newVMState')
forM_ (suicideList newVMState') $ lift . lift . deleteAddressState
return (newVMState', remainingBlockGas - (transactionGasLimit t - realRefund - vmGasRemaining newVMState'))
else do
lift $ addToBalance (blockDataCoinbase $ blockBlockData b) (intrinsicGas' * transactionGasPrice t)
addressState <- lift $ lift $ getAddressState tAddr
liftIO $ putStrLn $ "Insufficient funds to run the VM: need " ++ show (availableGas*transactionGasPrice t) ++ ", have " ++ show (addressStateBalance addressState)
return (VMState{vmException=Just InsufficientFunds, vmGasRemaining=0, refund=0, debugCallCreates=Nothing, suicideList=[], logs=[], returnVal=Nothing}, remainingBlockGas)
printTransactionMessage::Transaction->ContextM ()
printTransactionMessage t = do
case whoSignedThisTransaction t of
Just tAddr -> do
nonce <- lift $ fmap addressStateNonce $ getAddressState tAddr
liftIO $ putStrLn $ CL.magenta " =========================================================================="
liftIO $ putStrLn $ CL.magenta " | Adding transaction signed by: " ++ show (pretty tAddr) ++ CL.magenta " |"
liftIO $ putStrLn $ CL.magenta " | " ++
(
if isMessageTX t
then "MessageTX to " ++ show (pretty $ transactionTo t) ++ " "
else "Create Contract " ++ show (pretty $ getNewAddress_unsafe tAddr nonce)
) ++ CL.magenta " |"
_ -> liftIO $ putStrLn $ CL.red $ "Malformed Signature!"
formatAddress::Address->String
formatAddress (Address x) = BC.unpack $ B16.encode $ B.pack $ word160ToBytes x
addTransactions::Block->Integer->[Transaction]->ContextM ()
addTransactions _ _ [] = return ()
addTransactions b blockGas (t:rest) = do
printTransactionMessage t
before <- liftIO $ getPOSIXTime
stateRootBefore <- lift $ getStateRoot
result <- runEitherT $ addTransaction b blockGas t
let (resultString, response) =
case result of
Left err -> (err, "")
Right (state, _) -> ("Success!", BC.unpack $ B16.encode $ fromMaybe "" $ returnVal state)
after <- liftIO $ getPOSIXTime
stateRootAfter <- lift $ getStateRoot
mpdb <- fmap stateDB $ lift get
addrDiff <- lift $ addrDbDiff mpdb stateRootBefore stateRootAfter
detailsString <- getDebugMsg
lift $ putTransactionResult $
TransactionResult {
transactionResultBlockHash=blockHash b,
transactionResultTransactionHash=transactionHash t,
transactionResultMessage=resultString,
transactionResultResponse=response,
transactionResultTrace=detailsString,
transactionResultGasUsed=0,
transactionResultEtherUsed=0,
transactionResultContractsCreated=intercalate "," $ map formatAddress [x|CreateAddr x _ <- addrDiff],
transactionResultContractsDeleted=intercalate "," $ map formatAddress [x|DeleteAddr x <- addrDiff],
transactionResultTime=realToFrac $ after - before::Double,
transactionResultNewStorage="",
transactionResultDeletedStorage=""
}
clearDebugMsg
liftIO $ putStrLn $ CL.magenta " |" ++ " t = " ++ printf "%.2f" (realToFrac $ after - before::Double) ++ "s " ++ CL.magenta "|"
liftIO $ putStrLn $ CL.magenta " =========================================================================="
remainingBlockGas <-
case result of
Left e -> do
liftIO $ putStrLn $ CL.red "Insertion of transaction failed! " ++ e
return blockGas
Right (_, g') -> return g'
addTransactions b remainingBlockGas rest
addBlock::Bool->Block->ContextM ()
addBlock isBeingCreated b@Block{blockBlockData=bd, blockBlockUncles=uncles} = do
liftIO $ putStrLn $ "Inserting block #" ++ show (blockDataNumber bd) ++ " (" ++ show (pretty $ blockHash b) ++ ")."
maybeParent <- lift $ getBlock $ blockDataParentHash bd
case maybeParent of
Nothing ->
liftIO $ putStrLn $ "Missing parent block in addBlock: " ++ show (pretty $ blockDataParentHash bd) ++ "\n" ++
"Block will not be added now, but will be requested and added later"
Just parentBlock -> do
lift $ setStateRoot $ blockDataStateRoot $ blockBlockData parentBlock
let rewardBase = 1500 * finney
addToBalance (blockDataCoinbase bd) rewardBase
forM_ uncles $ \uncle -> do
addToBalance (blockDataCoinbase bd) (rewardBase `quot` 32)
addToBalance
(blockDataCoinbase uncle)
((rewardBase*(8+blockDataNumber uncle - blockDataNumber bd )) `quot` 8)
let transactions = blockReceiptTransactions b
addTransactions b (blockDataGasLimit $ blockBlockData b) transactions
dbs <- lift get
b' <-
if isBeingCreated
then return b{blockBlockData = (blockBlockData b){blockDataStateRoot=stateRoot $ stateDB dbs}}
else do
when ((blockDataStateRoot (blockBlockData b) /= stateRoot (stateDB dbs))) $ do
liftIO $ putStrLn $ "newStateRoot: " ++ show (pretty $ stateRoot $ stateDB dbs)
error $ "stateRoot mismatch!! New stateRoot doesn't match block stateRoot: " ++ show (pretty $ blockDataStateRoot $ blockBlockData b)
return b
valid <- checkValidity b'
case valid of
Right () -> return ()
Left err -> error err
-- let bytes = rlpSerialize $ rlpEncode b
blkDataId <- lift $ putBlock b'
replaceBestIfBetter (blkDataId, b')
getBestBlockHash::ContextM SHA
getBestBlockHash = do
maybeBestHash <- lift $ detailsDBGet "best"
case maybeBestHash of
Nothing -> do
bhSHA <- getGenesisBlockHash
lift $ detailsDBPut "best" $ BL.toStrict $ encode bhSHA
return bhSHA
Just bestHash -> return $ decode $ BL.fromStrict $ bestHash
getGenesisBlockHash::ContextM SHA
getGenesisBlockHash = do
maybeGenesisHash <- lift $ detailsDBGet "genesis"
case maybeGenesisHash of
Nothing -> do
bhSHA <- blockHash <$> initializeGenesisBlock
lift $ detailsDBPut "genesis" $ BL.toStrict $ encode bhSHA
return bhSHA
Just bestHash -> return $ decode $ BL.fromStrict $ bestHash
getBestBlock::ContextM Block
getBestBlock = do
bestBlockHash <- getBestBlockHash
bestBlock <- lift $ getBlock bestBlockHash
return $ fromMaybe (error $ "Missing block in database: " ++ show (pretty bestBlockHash)) bestBlock
replaceBestIfBetter::(BlockDataRefId, Block)->ContextM ()
replaceBestIfBetter (blkDataId, b) = do
best <- getBestBlock
if blockDataNumber (blockBlockData best) >= n
then return ()
else do
lift $ detailsDBPut "best" (BL.toStrict $ encode $ blockHash b)
let oldStateRoot = blockDataStateRoot (blockBlockData best)
newStateRoot = blockDataStateRoot (blockBlockData b)
lift $ sqlDiff blkDataId n oldStateRoot newStateRoot
where n = blockDataNumber (blockBlockData b)
| jamshidh/ethereum-vm | src/Blockchain/BlockChain.hs | bsd-3-clause | 17,599 | 8 | 24 | 3,542 | 4,824 | 2,376 | 2,448 | 311 | 5 |
{-# LANGUAGE KindSignatures, TupleSections, GADTs,
GeneralizedNewtypeDeriving, InstanceSigs, OverloadedStrings,
ExistentialQuantification, FlexibleInstances #-}
module Graphics.Storyboard.Behavior where
import Control.Applicative
import Control.Concurrent.STM
import qualified Graphics.Blank as Blank
import Graphics.Blank hiding (Event)
import Graphics.Storyboard.Types
-----------------------------------------------------------------
-- The assumption is that the history timestamps are the same
-- as the main timestamp.
data TheBehaviorEnv = TheBehaviorEnv
{ theTimer :: Historic Double
, theEvent :: Historic (Maybe Blank.Event)
, theTimestamp :: Timestamp
-- , theBehaviorCavity :: Cavity Double
}
defaultBehaviorEnv :: TheBehaviorEnv
defaultBehaviorEnv = TheBehaviorEnv
{ theTimer = (0,0,0)
, theEvent = (Nothing,0,Nothing)
, theTimestamp = 0
-- , theBehaviorCavity
}
nextBehaviorEnv :: Double -> Maybe Blank.Event -> TheBehaviorEnv -> TheBehaviorEnv
nextBehaviorEnv t e env = TheBehaviorEnv
{ theTimer = consHistoric t $ theTimer env
, theEvent = consHistoric e $ theEvent env
, theTimestamp = theTimestamp env + 1
}
type Timestamp = Int
type Historic a = (a,Timestamp,a)
data Behavior :: * -> * where
Behavior :: (Cavity Double -> TheBehaviorEnv -> STM a)
-> Behavior a
TimerB :: Behavior Double
EventB :: Behavior (Maybe Blank.Event)
CavityB :: Behavior (Cavity Double)
PureB :: a -> Behavior a
timerB :: Behavior Double
timerB = TimerB
eventB :: Behavior (Maybe Blank.Event)
eventB = EventB
cavityB = CavityB
cavityB :: Behavior (Cavity Double)
evalBehavior :: Cavity Double -> TheBehaviorEnv -> Behavior a -> STM a
evalBehavior cavity env (Behavior fn) = fn cavity env
evalBehavior _ env TimerB = return $ evalHistoric env (theTimer env)
evalBehavior _ env EventB = return $ evalHistoric env (theEvent env)
evalBehavior cavity _ CavityB = return $ cavity
evalBehavior _ _ (PureB a) = return a
translateBehavior :: Coord Double -> Behavior (Canvas a) -> Behavior (Canvas a)
translateBehavior (x,y) b = (\ m -> saveRestore $ do { translate (x,y) ; m }
) <$> b
evalHistoric :: TheBehaviorEnv -> Historic a -> a
evalHistoric env (new,clk,old)
| clk - 1 == theTimestamp env = old
| clk == theTimestamp env = new
| otherwise = error "not enough history for behaviour"
consHistoric :: a -> Historic a -> Historic a
consHistoric a2 (a1,t,_) = (a2,t+1,a1)
instance Functor Behavior where
fmap f b = pure f <*> b
instance Applicative Behavior where
pure = PureB
PureB f <*> PureB x = PureB $ f x
f <*> x = Behavior $ \ cav env -> evalBehavior cav env f <*> evalBehavior cav env x
sample :: STM a -> STM (Behavior a)
sample m = do
b <- m
var <- newTVar (b,0,b)
return $ Behavior $ \ _ env -> do
history@(_,clk,_) <- readTVar var
if clk + 1 == theTimestamp env
then do
a <- m
writeTVar var $ consHistoric a $ history
return a
else return $ evalHistoric env history
switch :: (a -> b -> b) -> b -> Behavior a -> STM (Behavior b)
switch f b bah = do
var <- newTVar (b,0,b)
return $ Behavior $ \ cav env -> do
history@(new,clk,_) <- readTVar var
if clk + 1 == theTimestamp env
then do
a <- evalBehavior cav env bah
let newest = f a new
writeTVar var $ consHistoric newest $ history
return newest
else return $ evalHistoric env history
instance Show (Behavior a) where
show _ = "Behavior{}"
data Movie p = forall b . Movie
{ movieBehavior :: Behavior b
, movieSnapshot :: b -> p
, movieStop :: b -> Bool
}
class Playing movie where
wrapMovie :: movie picture -> Movie picture
instance Playing ((->) Double) where
wrapMovie f = Movie timerB f (const False)
| tonymorris/story-board | src/Graphics/Storyboard/Behavior.hs | bsd-3-clause | 3,968 | 0 | 17 | 1,001 | 1,318 | 679 | 639 | 94 | 2 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Config.Kde
-- Copyright : (c) Spencer Janssen <[email protected]>
-- License : BSD
--
-- Maintainer : Spencer Janssen <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- This module provides a config suitable for use with the KDE desktop
-- environment.
module XMonad.Config.Kde (
-- * Usage
-- $usage
kdeConfig,
kde4Config,
desktopLayoutModifiers
) where
import XMonad
import XMonad.Config.Desktop
import qualified Data.Map as M
-- $usage
-- To use this module, start with the following @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad
-- > import XMonad.Config.Kde
-- >
-- > main = xmonad kdeConfig
--
-- For KDE 4, replace 'kdeConfig' with 'kde4Config'
--
-- For examples of how to further customize @kdeConfig@ see "XMonad.Config.Desktop".
kdeConfig = desktopConfig
{ terminal = "konsole"
, keys = kdeKeys <+> keys desktopConfig }
kde4Config = desktopConfig
{ terminal = "konsole"
, keys = kde4Keys <+> keys desktopConfig }
kdeKeys (XConfig {modMask = modm}) = M.fromList $
[ ((modm, xK_p), spawn "dcop kdesktop default popupExecuteCommand")
, ((modm .|. shiftMask, xK_q), spawn "dcop kdesktop default logout")
]
kde4Keys (XConfig {modMask = modm}) = M.fromList $
[ ((modm, xK_p), spawn "krunner")
, ((modm .|. shiftMask, xK_q), spawn "dbus-send --print-reply --dest=org.kde.ksmserver /KSMServer org.kde.KSMServerInterface.logout int32:1 int32:0 int32:1")
]
| f1u77y/xmonad-contrib | XMonad/Config/Kde.hs | bsd-3-clause | 1,683 | 0 | 9 | 342 | 245 | 156 | 89 | 20 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Distribution.Solver.Types.Settings
( ReorderGoals(..)
, IndependentGoals(..)
, AvoidReinstalls(..)
, ShadowPkgs(..)
, StrongFlags(..)
, EnableBackjumping(..)
) where
import Distribution.Simple.Setup ( BooleanFlag(..) )
import Distribution.Compat.Binary (Binary(..))
import GHC.Generics (Generic)
newtype ReorderGoals = ReorderGoals Bool
deriving (BooleanFlag, Eq, Generic, Show)
newtype IndependentGoals = IndependentGoals Bool
deriving (BooleanFlag, Eq, Generic, Show)
newtype AvoidReinstalls = AvoidReinstalls Bool
deriving (BooleanFlag, Eq, Generic, Show)
newtype ShadowPkgs = ShadowPkgs Bool
deriving (BooleanFlag, Eq, Generic, Show)
newtype StrongFlags = StrongFlags Bool
deriving (BooleanFlag, Eq, Generic, Show)
newtype EnableBackjumping = EnableBackjumping Bool
deriving (BooleanFlag, Eq, Generic, Show)
instance Binary ReorderGoals
instance Binary IndependentGoals
instance Binary AvoidReinstalls
instance Binary ShadowPkgs
instance Binary StrongFlags
| bennofs/cabal | cabal-install/Distribution/Solver/Types/Settings.hs | bsd-3-clause | 1,091 | 0 | 6 | 158 | 291 | 170 | 121 | 29 | 0 |
module PackageTests.BuildDeps.TargetSpecificDeps3.Check where
import Test.HUnit
import PackageTests.PackageTester
import System.FilePath
import Data.List
import qualified Control.Exception as E
import Text.Regex.Posix
suite :: FilePath -> Test
suite ghcPath = TestCase $ do
let spec = PackageSpec ("PackageTests" </> "BuildDeps" </> "TargetSpecificDeps3") []
result <- cabal_build spec ghcPath
do
assertEqual "cabal build should fail - see test-log.txt" False (successful result)
assertBool "error should be in lemon.hs" $
"lemon.hs:" `isInfixOf` outputText result
assertBool "error should be \"Could not find module `System.Time\"" $
(intercalate " " $ lines $ outputText result)
=~ "Could not find module.*System.Time"
`E.catch` \exc -> do
putStrLn $ "Cabal result was "++show result
E.throwIO (exc :: E.SomeException)
| jwiegley/ghc-release | libraries/Cabal/cabal/tests/PackageTests/BuildDeps/TargetSpecificDeps3/Check.hs | gpl-3.0 | 918 | 0 | 16 | 202 | 215 | 110 | 105 | 21 | 1 |
-- | module for programming little dust clouds around nikki
module Sorts.Nikki.Dust where
import Data.Map (lookup)
import Graphics.Qt
import Utils
import Base
import Sorts.Nikki.Types
import Sorts.Nikki.Configuration
-- | create nikki's dust
addDustClouds :: Seconds -> Nikki -> IO Nikki
addDustClouds _ n = return n
-- addDustClouds now nikki = do
-- p <- getPosition $ chipmunk nikki
-- return nikki{state = newState p}
-- where
-- newState p = (state nikki){dustClouds = dustClouds' p}
-- dustClouds' p = newClouds p ++ removeOldClouds (dustClouds $ state nikki)
-- newClouds p = [newCloud p direction_]
--
-- newCloud v _ =
-- DustCloud now $ vector2position v
-- -- newCloud (Vector x y) HRight =
-- -- DustCloud now (Position x y +~ Position (fromUber (13 / 2)) (fromUber (24 / 2)) +~ cloudRenderCorrection)
-- cloudRenderCorrection = Position (- fromUber (5 / 2)) (- fromUber (5 / 2))
-- direction_ = direction $ state nikki
--
-- removeOldClouds = filter (\ c -> now - creationTime c < 4 * cloudCreationTime)
renderDustClouds :: Ptr QPainter -> Offset Double -> Seconds -> NSort -> [DustCloud] -> IO ()
renderDustClouds ptr offset now sort clouds =
fmapM_ render clouds
where
render cloud = do
let mPixmap = case Data.Map.lookup "dust" (pixmaps sort) of
Just pixmapList -> do
pickLimitedAnimationFrame pixmapList cloudFrameTimes (now - creationTime cloud)
case mPixmap of
Just pixmap ->
renderPixmap ptr offset (cloudPosition cloud) Nothing pixmap
Nothing -> return ()
| nikki-and-the-robots/nikki | src/Sorts/Nikki/Dust.hs | lgpl-3.0 | 1,652 | 1 | 20 | 412 | 256 | 138 | 118 | 20 | 2 |
module Main where
double x = x + x
| frankiesardo/seven-languages-in-seven-weeks | src/main/haskell/day1/double.hs | apache-2.0 | 40 | 0 | 5 | 14 | 16 | 9 | 7 | 2 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Front-End Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/frontendscanner/src/main/javahelp/org/zaproxy/zap/extension/frontendscanner/resources/help_fa_IR/helpset_fa_IR.hs | apache-2.0 | 978 | 78 | 67 | 159 | 417 | 211 | 206 | -1 | -1 |
{-# LANGUAGE MagicHash, UnboxedTuples #-}
-- | FFI and hmatrix helpers.
--
-- Sample usage, to upload a perspective matrix to a shader.
--
-- @ glUniformMatrix4fv 0 1 (fromIntegral gl_TRUE) \`appMatrix\` perspective 0.01 100 (pi\/2) (4\/3)
-- @
--
module Data.Packed.Foreign
( app
, appVector, appVectorLen
, appMatrix, appMatrixLen, appMatrixRaw, appMatrixRawLen
, unsafeMatrixToVector, unsafeMatrixToForeignPtr
) where
import Data.Packed.Internal
import qualified Data.Vector.Storable as S
import Foreign (Ptr, ForeignPtr, Storable)
import Foreign.C.Types (CInt)
import GHC.Base (IO(..), realWorld#)
{-# INLINE unsafeInlinePerformIO #-}
-- | If we use unsafePerformIO, it may not get inlined, so in a function that returns IO (which are all safe uses of app* in this module), there would be
-- unecessary calls to unsafePerformIO or its internals.
unsafeInlinePerformIO :: IO a -> a
unsafeInlinePerformIO (IO f) = case f realWorld# of
(# _, x #) -> x
{-# INLINE app #-}
-- | Only useful since it is left associated with a precedence of 1, unlike 'Prelude.$', which is right associative.
-- e.g.
--
-- @
-- someFunction
-- \`appMatrixLen\` m
-- \`appVectorLen\` v
-- \`app\` other
-- \`app\` arguments
-- \`app\` go here
-- @
--
-- One could also write:
--
-- @
-- (someFunction
-- \`appMatrixLen\` m
-- \`appVectorLen\` v)
-- other
-- arguments
-- (go here)
-- @
--
app :: (a -> b) -> a -> b
app f = f
{-# INLINE appVector #-}
appVector :: Storable a => (Ptr a -> b) -> Vector a -> b
appVector f x = unsafeInlinePerformIO (S.unsafeWith x (return . f))
{-# INLINE appVectorLen #-}
appVectorLen :: Storable a => (CInt -> Ptr a -> b) -> Vector a -> b
appVectorLen f x = unsafeInlinePerformIO (S.unsafeWith x (return . f (fromIntegral (S.length x))))
{-# INLINE appMatrix #-}
appMatrix :: Element a => (Ptr a -> b) -> Matrix a -> b
appMatrix f x = unsafeInlinePerformIO (S.unsafeWith (flatten x) (return . f))
{-# INLINE appMatrixLen #-}
appMatrixLen :: Element a => (CInt -> CInt -> Ptr a -> b) -> Matrix a -> b
appMatrixLen f x = unsafeInlinePerformIO (S.unsafeWith (flatten x) (return . f r c))
where
r = fromIntegral (rows x)
c = fromIntegral (cols x)
{-# INLINE appMatrixRaw #-}
appMatrixRaw :: Storable a => (Ptr a -> b) -> Matrix a -> b
appMatrixRaw f x = unsafeInlinePerformIO (S.unsafeWith (xdat x) (return . f))
{-# INLINE appMatrixRawLen #-}
appMatrixRawLen :: Element a => (CInt -> CInt -> Ptr a -> b) -> Matrix a -> b
appMatrixRawLen f x = unsafeInlinePerformIO (S.unsafeWith (xdat x) (return . f r c))
where
r = fromIntegral (rows x)
c = fromIntegral (cols x)
infixl 1 `app`
infixl 1 `appVector`
infixl 1 `appMatrix`
infixl 1 `appMatrixRaw`
{-# INLINE unsafeMatrixToVector #-}
-- | This will disregard the order of the matrix, and simply return it as-is.
-- If the order of the matrix is RowMajor, this function is identical to 'flatten'.
unsafeMatrixToVector :: Matrix a -> Vector a
unsafeMatrixToVector = xdat
{-# INLINE unsafeMatrixToForeignPtr #-}
unsafeMatrixToForeignPtr :: Storable a => Matrix a -> (ForeignPtr a, Int)
unsafeMatrixToForeignPtr m = S.unsafeToForeignPtr0 (xdat m)
| mightymoose/liquidhaskell | benchmarks/hmatrix-0.15.0.1/lib/Data/Packed/Foreign.hs | bsd-3-clause | 3,201 | 0 | 15 | 619 | 814 | 447 | 367 | 50 | 1 |
module BadExprArg where
{-@ type ListN a N = {v:[a] | len v = N} @-}
{-@ foo :: ListN 0 0 @-}
foo :: [a]
foo = undefined
| mightymoose/liquidhaskell | tests/crash/BadExprArg.hs | bsd-3-clause | 123 | 0 | 5 | 31 | 19 | 13 | 6 | 3 | 1 |
{-# LANGUAGE ImplicitParams, RankNTypes #-}
-- Trac #1445
module Bug where
f :: () -> (?p :: ()) => () -> ()
f _ _ = ()
g :: (?p :: ()) => ()
g = f () ()
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/typecheck/should_compile/tc230.hs | bsd-3-clause | 158 | 0 | 8 | 43 | 80 | 46 | 34 | 6 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.